From 2c5fb24855662e45d0f09a05cd0ce327bc75ecba Mon Sep 17 00:00:00 2001 From: Michael Yang Date: Mon, 2 Dec 2024 11:45:23 -0800 Subject: [PATCH] wip: next ollama runner implement llama and mllama model architectures in go using ggml (through cgo) --- .gitattributes | 9 + cache/cache.go | 63 + fs/ggml/ggml.go | 305 + fs/ggml/gguf.go | 661 + fs/ggml/type.go | 180 + {util => fs/util}/bufioutil/buffer_seeker.go | 0 .../util}/bufioutil/buffer_seeker_test.go | 0 go.mod | 4 +- go.sum | 4 + llm/ggml.go | 2 +- ml/backend.go | 191 + ml/backend/backend.go | 5 + ml/backend/ggml/backend.go | 470 + ml/backend/ggml/backend_cpu.go | 8 + ml/backend/ggml/backend_darwin.go | 13 + ml/backend/ggml/backend_debug.go | 6 + ml/backend/ggml/backend_linux.go | 10 + ml/backend/ggml/backend_windows.go | 10 + ml/backend/ggml/ggml-aarch64.c | 3235 +++ ml/backend/ggml/ggml-aarch64.h | 65 + ml/backend/ggml/ggml-alloc.c | 1068 + ml/backend/ggml/ggml-alloc.h | 102 + ml/backend/ggml/ggml-backend-impl.h | 180 + ml/backend/ggml/ggml-backend.c | 2325 ++ ml/backend/ggml/ggml-backend.h | 267 + ml/backend/ggml/ggml-blas.h | 49 + ml/backend/ggml/ggml-common.h | 1879 ++ ml/backend/ggml/ggml-cpu-impl.h | 640 + ml/backend/ggml/ggml-cuda.h | 75 + ml/backend/ggml/ggml-debug.c | 110 + ml/backend/ggml/ggml-debug.h | 3 + ml/backend/ggml/ggml-impl.h | 212 + ml/backend/ggml/ggml-metal-embed.metal | 8325 ++++++ ml/backend/ggml/ggml-metal.h | 88 + ml/backend/ggml/ggml-metal.metal | 6445 +++++ ml/backend/ggml/ggml-metal_darwin_arm64.m | 3669 +++ ml/backend/ggml/ggml-metal_darwin_arm64.s | 6 + ml/backend/ggml/ggml-quants.c | 15778 +++++++++++ ml/backend/ggml/ggml-quants.h | 173 + ml/backend/ggml/ggml.c | 23355 ++++++++++++++++ ml/backend/ggml/ggml.h | 2595 ++ ml/backend/ggml/sgemm.h | 14 + ml/nn/convolution.go | 11 + ml/nn/embedding.go | 11 + ml/nn/linear.go | 17 + ml/nn/normalization.go | 33 + model/cmd/main.go | 154 + model/llama/model.go | 147 + model/llama/process_text.go | 25 + model/mllama/model.go | 90 + model/mllama/model_text.go | 225 + model/mllama/model_vision.go | 234 + model/mllama/process_image.go | 240 + model/mllama/process_text.go | 25 + model/mllama/process_text_test.go | 82 + model/mllama/testdata/model.bin | 1 + model/mllama/testdata/theirs.json | 1 + model/model.go | 228 + model/process_text.go | 311 + model/testdata/inputs.json | 588 + sample/greedy.go | 13 + sample/sample.go | 74 + 62 files changed, 75107 insertions(+), 2 deletions(-) create mode 100644 cache/cache.go create mode 100644 fs/ggml/ggml.go create mode 100644 fs/ggml/gguf.go create mode 100644 fs/ggml/type.go rename {util => fs/util}/bufioutil/buffer_seeker.go (100%) rename {util => fs/util}/bufioutil/buffer_seeker_test.go (100%) create mode 100644 ml/backend.go create mode 100644 ml/backend/backend.go create mode 100644 ml/backend/ggml/backend.go create mode 100644 ml/backend/ggml/backend_cpu.go create mode 100644 ml/backend/ggml/backend_darwin.go create mode 100644 ml/backend/ggml/backend_debug.go create mode 100644 ml/backend/ggml/backend_linux.go create mode 100644 ml/backend/ggml/backend_windows.go create mode 100644 ml/backend/ggml/ggml-aarch64.c create mode 100644 ml/backend/ggml/ggml-aarch64.h create mode 100644 ml/backend/ggml/ggml-alloc.c create mode 100644 ml/backend/ggml/ggml-alloc.h create mode 100644 ml/backend/ggml/ggml-backend-impl.h create mode 100644 ml/backend/ggml/ggml-backend.c create mode 100644 ml/backend/ggml/ggml-backend.h create mode 100644 ml/backend/ggml/ggml-blas.h create mode 100644 ml/backend/ggml/ggml-common.h create mode 100644 ml/backend/ggml/ggml-cpu-impl.h create mode 100644 ml/backend/ggml/ggml-cuda.h create mode 100644 ml/backend/ggml/ggml-debug.c create mode 100644 ml/backend/ggml/ggml-debug.h create mode 100644 ml/backend/ggml/ggml-impl.h create mode 100644 ml/backend/ggml/ggml-metal-embed.metal create mode 100644 ml/backend/ggml/ggml-metal.h create mode 100644 ml/backend/ggml/ggml-metal.metal create mode 100644 ml/backend/ggml/ggml-metal_darwin_arm64.m create mode 100644 ml/backend/ggml/ggml-metal_darwin_arm64.s create mode 100644 ml/backend/ggml/ggml-quants.c create mode 100644 ml/backend/ggml/ggml-quants.h create mode 100644 ml/backend/ggml/ggml.c create mode 100644 ml/backend/ggml/ggml.h create mode 100644 ml/backend/ggml/sgemm.h create mode 100644 ml/nn/convolution.go create mode 100644 ml/nn/embedding.go create mode 100644 ml/nn/linear.go create mode 100644 ml/nn/normalization.go create mode 100644 model/cmd/main.go create mode 100644 model/llama/model.go create mode 100644 model/llama/process_text.go create mode 100644 model/mllama/model.go create mode 100644 model/mllama/model_text.go create mode 100644 model/mllama/model_vision.go create mode 100644 model/mllama/process_image.go create mode 100644 model/mllama/process_text.go create mode 100644 model/mllama/process_text_test.go create mode 120000 model/mllama/testdata/model.bin create mode 100644 model/mllama/testdata/theirs.json create mode 100644 model/model.go create mode 100644 model/process_text.go create mode 100644 model/testdata/inputs.json create mode 100644 sample/greedy.go create mode 100644 sample/sample.go diff --git a/.gitattributes b/.gitattributes index 51635caa7..4bcd95b0b 100644 --- a/.gitattributes +++ b/.gitattributes @@ -7,5 +7,14 @@ llama/**/*.cuh linguist-vendored llama/**/*.m linguist-vendored llama/**/*.metal linguist-vendored +ml/backend/**/*.c linguist-vendored +ml/backend/**/*.h linguist-vendored +ml/backend/**/*.cpp linguist-vendored +ml/backend/**/*.hpp linguist-vendored +ml/backend/**/*.cu linguist-vendored +ml/backend/**/*.cuh linguist-vendored +ml/backend/**/*.m linguist-vendored +ml/backend/**/*.metal linguist-vendored + * text=auto *.go text eol=lf diff --git a/cache/cache.go b/cache/cache.go new file mode 100644 index 000000000..210aaf234 --- /dev/null +++ b/cache/cache.go @@ -0,0 +1,63 @@ +package cache + +import ( + "github.com/ollama/ollama/ml" +) + +type Options struct { + Position int +} + +type Cache interface { + Sub(i int) Cache + Put(ctx ml.Context, key, value ml.Tensor, opts Options) (ml.Tensor, ml.Tensor) +} + +type Simple struct { + DType ml.DType + Capacity int + + keys, values []ml.Tensor +} + +func (c *Simple) Sub(i int) Cache { + if i >= len(c.keys) { + c.keys = append(c.keys, make([]ml.Tensor, i-len(c.keys)+1)...) + c.values = append(c.values, make([]ml.Tensor, i-len(c.values)+1)...) + } + + return &Simple{ + keys: c.keys[i : i+1], + values: c.values[i : i+1], + Capacity: c.Capacity, + DType: c.DType, + } +} + +func (c *Simple) Put(ctx ml.Context, key, value ml.Tensor, opts Options) (ml.Tensor, ml.Tensor) { + if c.keys[0] == nil || c.values[0] == nil { + c.keys[0] = ctx.Zeros(c.DType, int(key.Dim(0)*key.Dim(1))*c.Capacity) + c.values[0] = ctx.Zeros(c.DType, int(value.Dim(0)*value.Dim(1))*c.Capacity) + } + + ctx.Forward(key.Copy(ctx, c.keys[0].View(ctx, int(key.Stride(2))*opts.Position, int(key.Dim(0)*key.Dim(1)*key.Dim(2))))) + ctx.Forward(value.Copy(ctx, c.values[0].View(ctx, int(value.Stride(2))*opts.Position, int(value.Dim(0)*value.Dim(1)*value.Dim(2))))) + + n := min(c.Capacity, int(key.Dim(2))+opts.Position) + + key = c.keys[0].View(ctx, 0, + int(key.Dim(0)), int(key.Stride(1)), + int(key.Dim(1)), int(key.Stride(2)), + n, + ) + + value = c.values[0].View(ctx, 0, + int(value.Dim(0)), int(value.Stride(1)), + int(value.Dim(1)), int(value.Stride(2)), + n, + ) + + // TODO shift context if necessary + + return key, value +} diff --git a/fs/ggml/ggml.go b/fs/ggml/ggml.go new file mode 100644 index 000000000..5bb799206 --- /dev/null +++ b/fs/ggml/ggml.go @@ -0,0 +1,305 @@ +package ggml + +import ( + "cmp" + "encoding/binary" + "errors" + "fmt" + "io" + "log/slog" + "strings" + + "github.com/ollama/ollama/fs/util/bufioutil" +) + +type GGML struct { + container + model +} + +type model interface { + KV() KV + Tensors() Tensors +} + +type KV map[string]any + +func (kv KV) Architecture() string { + return cmp.Or(kv.String("general.architecture"), "unknown") +} + +func (kv KV) FileType() fileType { + if t := kv.Uint("general.file_type"); t > 0 { + return fileType(t) + } + + return fileTypeUnknown +} + +func (kv KV) String(key string, defaultValue ...string) string { + return keyValue(kv, key, append(defaultValue, "")...) +} + +func (kv KV) Uint(key string, defaultValue ...uint32) uint32 { + return keyValue(kv, key, append(defaultValue, 0)...) +} + +func (kv KV) Float(key string, defaultValue ...float32) float32 { + return keyValue(kv, key, append(defaultValue, 0)...) +} + +func (kv KV) Strings(key string, defaultValue ...[]string) []string { + r := keyValue(kv, key, &array{}) + s := make([]string, r.size) + for i := range r.size { + s[i] = r.values[i].(string) + } + + return s +} + +func (kv KV) Uints(key string, defaultValue ...[]uint32) []uint32 { + r := keyValue(kv, key, &array{}) + s := make([]uint32, r.size) + for i := range r.size { + s[i] = uint32(r.values[i].(int32)) + } + + return s +} + +func keyValue[T string | uint32 | float32 | *array](kv KV, key string, defaultValue ...T) T { + if !strings.HasPrefix(key, "tokenizer.") && !strings.HasPrefix(key, "general.") { + key = kv.Architecture() + "." + key + } + + if val, ok := kv[key]; ok { + return val.(T) + } + + slog.Warn("key not found", "key", key, "default", defaultValue[0]) + return defaultValue[0] +} + +type Tensors struct { + Items []*Tensor + Offset uint64 +} + +func (ts Tensors) Layers() map[string]Layer { + layers := make(map[string]Layer) + for _, t := range ts.Items { + parts := strings.Split(t.Name, ".") + if parts[0] == "blk" { + // join first and second part, e.g. blk.%d + parts = append([]string{fmt.Sprintf("%s.%s", parts[0], parts[1])}, parts[2:]...) + } + + if _, ok := layers[parts[0]]; !ok { + layers[parts[0]] = make(Layer) + } + + layers[parts[0]][strings.Join(parts[1:], ".")] = t + } + + return layers +} + +type Layer map[string]*Tensor + +func (l Layer) size() (size uint64) { + for _, t := range l { + size += t.Size() + } + + return size +} + +type Tensor struct { + Name string `json:"name"` + Kind uint32 `json:"kind"` + Offset uint64 `json:"-"` + + // Shape is the number of elements in each dimension + Shape []uint64 `json:"shape"` + + io.WriterTo `json:"-"` +} + +func (t Tensor) block() (n int) { + if _, err := fmt.Sscanf(t.Name, "blk.%d.", &n); err != nil { + return -1 + } + + return +} + +func (t Tensor) blockSize() uint64 { + switch t.Kind { + case 0, 1, 24, 25, 26, 27, 28, 30: // F32, F16, I8, I16, I32, I64, F64, BF16 + return 1 + case 2, 3, 4, 5, 6, 7, 8, 9, 20: // Q4_0, Q4_1, Q5_0, Q5_1, Q8_0, Q8_1, IQ4_NL + return 32 + default: // All others + return 256 + } +} + +func (t Tensor) typeSize() uint64 { + blockSize := t.blockSize() + + switch t.Kind { + case 0: // FP32 + return 4 + case 1: // FP16 + return 2 + case 2: // Q4_0 + return 2 + blockSize/2 + case 3: // Q4_1 + return 2 + 2 + blockSize/2 + case 6: // Q5_0 + return 2 + 4 + blockSize/2 + case 7: // Q5_1 + return 2 + 2 + 4 + blockSize/2 + case 8: // Q8_0 + return 2 + blockSize + case 9: // Q8_1 + return 4 + 4 + blockSize + case 10: // Q2_K + return blockSize/16 + blockSize/4 + 2 + 2 + case 11: // Q3_K + return blockSize/8 + blockSize/4 + 12 + 2 + case 12: // Q4_K + return 2 + 2 + 12 + blockSize/2 + case 13: // Q5_K + return 2 + 2 + 12 + blockSize/8 + blockSize/2 + case 14: // Q6_K + return blockSize/2 + blockSize/4 + blockSize/16 + 2 + case 15: // Q8_K + return 2 + blockSize + 2*blockSize/16 + case 16: // IQ2_XXS + return 2 + 2*blockSize/8 + case 17: // IQ2_XS + return 2 + 2*blockSize/8 + blockSize/32 + case 18: // IQ3_XXS + return 2 + blockSize/4 + blockSize/8 + case 19: // IQ1_S + return 2 + blockSize/8 + blockSize/16 + case 20: // IQ4_NL + return 2 + blockSize/2 + case 21: // IQ3_S + return 2 + blockSize/4 + blockSize/8 + blockSize/32 + 4 + case 22: // IQ2_S + return 2 + blockSize/4 + blockSize/16 + case 23: // IQ4_XS + return 2 + 2 + blockSize/2 + blockSize/64 + case 24: // I8 + return 1 + case 25: // I16 + return 2 + case 26: // I32 + return 4 + case 27: // I64 + return 8 + case 28: // F64 + return 8 + case 29: // IQ1_M + return blockSize/8 + blockSize/16 + blockSize/32 + default: + return 0 + } +} + +func (t Tensor) parameters() uint64 { + var count uint64 = 1 + for _, n := range t.Shape { + count *= n + } + return count +} + +func (t Tensor) Size() uint64 { + return t.parameters() * t.typeSize() / t.blockSize() +} + +type container interface { + Name() string + Decode(io.ReadSeeker) (model, error) +} + +const ( + // Magic constant for `ggml` files (unversioned). + FILE_MAGIC_GGML = 0x67676d6c + // Magic constant for `ggml` files (versioned, ggmf). + FILE_MAGIC_GGMF = 0x67676d66 + // Magic constant for `ggml` files (versioned, ggjt). + FILE_MAGIC_GGJT = 0x67676a74 + // Magic constant for `ggla` files (LoRA adapter). + FILE_MAGIC_GGLA = 0x67676C61 + // Magic constant for `gguf` files (versioned, gguf) + FILE_MAGIC_GGUF_LE = 0x46554747 + FILE_MAGIC_GGUF_BE = 0x47475546 +) + +var ErrUnsupportedFormat = errors.New("unsupported model format") + +func DetectGGMLType(b []byte) string { + switch binary.LittleEndian.Uint32(b[:4]) { + case FILE_MAGIC_GGML: + return "ggml" + case FILE_MAGIC_GGMF: + return "ggmf" + case FILE_MAGIC_GGJT: + return "ggjt" + case FILE_MAGIC_GGLA: + return "ggla" + case FILE_MAGIC_GGUF_LE, FILE_MAGIC_GGUF_BE: + return "gguf" + default: + return "" + } +} + +// DecodeGGML decodes a GGML model from the given reader. +// +// It collects array values for arrays with a size less than or equal to +// maxArraySize. If maxArraySize is 0, the default value of 1024 is used. If +// the maxArraySize is negative, all arrays are collected. +func DecodeGGML(rs io.ReadSeeker, maxArraySize int) (*GGML, int64, error) { + if maxArraySize == 0 { + maxArraySize = 1024 + } + + rs = bufioutil.NewBufferedSeeker(rs, 32<<10) + + var magic uint32 + if err := binary.Read(rs, binary.LittleEndian, &magic); err != nil { + return nil, 0, err + } + + var c container + switch magic { + case FILE_MAGIC_GGUF_LE: + c = &containerGGUF{ByteOrder: binary.LittleEndian, maxArraySize: maxArraySize} + case FILE_MAGIC_GGUF_BE: + c = &containerGGUF{ByteOrder: binary.BigEndian, maxArraySize: maxArraySize} + default: + return nil, 0, errors.New("invalid file magic") + } + + model, err := c.Decode(rs) + if err != nil { + return nil, 0, err + } + + offset, err := rs.Seek(0, io.SeekCurrent) + if err != nil { + return nil, 0, err + } + + // final model type + return &GGML{ + container: c, + model: model, + }, offset, nil +} diff --git a/fs/ggml/gguf.go b/fs/ggml/gguf.go new file mode 100644 index 000000000..f26b8a0c9 --- /dev/null +++ b/fs/ggml/gguf.go @@ -0,0 +1,661 @@ +package ggml + +import ( + "bytes" + "cmp" + "encoding/binary" + "encoding/json" + "fmt" + "io" + "log/slog" + "maps" + "slices" + "strings" +) + +type containerGGUF struct { + ByteOrder binary.ByteOrder + + Version uint32 + + V1 struct { + NumTensor uint32 + NumKV uint32 + } + + V2 struct { + NumTensor uint64 + NumKV uint64 + } + + V3 struct { + NumTensor uint64 + NumKV uint64 + } + + maxArraySize int +} + +func (c *containerGGUF) canCollectArray(size int) bool { + return c.maxArraySize < 0 || size <= c.maxArraySize +} + +func (c *containerGGUF) Name() string { + return "gguf" +} + +func (c *containerGGUF) Decode(rs io.ReadSeeker) (model, error) { + if err := binary.Read(rs, c.ByteOrder, &c.Version); err != nil { + return nil, err + } + + var err error + switch c.Version { + case 1: + err = binary.Read(rs, c.ByteOrder, &c.V1) + case 2: + err = binary.Read(rs, c.ByteOrder, &c.V2) + default: + err = binary.Read(rs, c.ByteOrder, &c.V3) + } + if err != nil { + return nil, err + } + + model := newGGUF(c) + if err := model.Decode(rs); err != nil { + return nil, err + } + + return model, nil +} + +const ( + ggufTypeUint8 uint32 = iota + ggufTypeInt8 + ggufTypeUint16 + ggufTypeInt16 + ggufTypeUint32 + ggufTypeInt32 + ggufTypeFloat32 + ggufTypeBool + ggufTypeString + ggufTypeArray + ggufTypeUint64 + ggufTypeInt64 + ggufTypeFloat64 +) + +type gguf struct { + *containerGGUF + + kv KV + tensors []*Tensor + + parameters uint64 + tensorOffset uint64 + + scratch [16 << 10]byte +} + +func newGGUF(container *containerGGUF) *gguf { + return &gguf{ + containerGGUF: container, + kv: make(KV), + } +} + +func (llm *gguf) KV() KV { + return llm.kv +} + +func (llm *gguf) Tensors() Tensors { + return Tensors{ + Items: llm.tensors, + Offset: llm.tensorOffset, + } +} + +func (llm *gguf) numTensor() uint64 { + switch llm.Version { + case 1: + return uint64(llm.V1.NumTensor) + case 2: + return llm.V2.NumTensor + default: + return llm.V3.NumTensor + } +} + +func (llm *gguf) numKV() uint64 { + switch llm.Version { + case 1: + return uint64(llm.V1.NumKV) + case 2: + return llm.V2.NumKV + default: + return llm.V3.NumKV + } +} + +func (llm *gguf) Decode(rs io.ReadSeeker) error { + // decode key-values + for i := 0; uint64(i) < llm.numKV(); i++ { + k, err := readGGUFString(llm, rs) + if err != nil { + return err + } + + t, err := readGGUF[uint32](llm, rs) + if err != nil { + return err + } + + var v any + switch t { + case ggufTypeUint8: + v, err = readGGUF[uint8](llm, rs) + case ggufTypeInt8: + v, err = readGGUF[int8](llm, rs) + case ggufTypeUint16: + v, err = readGGUF[uint16](llm, rs) + case ggufTypeInt16: + v, err = readGGUF[int16](llm, rs) + case ggufTypeUint32: + v, err = readGGUF[uint32](llm, rs) + case ggufTypeInt32: + v, err = readGGUF[int32](llm, rs) + case ggufTypeUint64: + v, err = readGGUF[uint64](llm, rs) + case ggufTypeInt64: + v, err = readGGUF[int64](llm, rs) + case ggufTypeFloat32: + v, err = readGGUF[float32](llm, rs) + case ggufTypeFloat64: + v, err = readGGUF[float64](llm, rs) + case ggufTypeBool: + v, err = readGGUF[bool](llm, rs) + case ggufTypeString: + v, err = readGGUFString(llm, rs) + case ggufTypeArray: + v, err = readGGUFArray(llm, rs) + default: + return fmt.Errorf("invalid type: %d", t) + } + + if err != nil { + return err + } + + llm.kv[k] = v + } + + // decode tensors + for range llm.numTensor() { + name, err := readGGUFString(llm, rs) + if err != nil { + return fmt.Errorf("failed to read tensor name: %w", err) + } + + // dims is the number of dimensions in the tensor + dims, err := readGGUF[uint32](llm, rs) + if err != nil { + return fmt.Errorf("failed to read tensor dimensions: %w", err) + } + + shape := make([]uint64, dims) + for i := 0; uint32(i) < dims; i++ { + shape[i], err = readGGUF[uint64](llm, rs) + if err != nil { + return fmt.Errorf("failed to read tensor shape: %w", err) + } + } + + kind, err := readGGUF[uint32](llm, rs) + if err != nil { + return fmt.Errorf("failed to read tensor kind: %w", err) + } + + offset, err := readGGUF[uint64](llm, rs) + if err != nil { + return fmt.Errorf("failed to read tensor offset: %w", err) + } + + tensor := Tensor{ + Name: name, + Kind: kind, + Offset: offset, + Shape: shape[:], + } + + llm.tensors = append(llm.tensors, &tensor) + llm.parameters += tensor.parameters() + } + + // patch KV with parameter count + llm.kv["general.parameter_count"] = llm.parameters + + alignment, ok := llm.kv["general.alignment"].(uint32) + if !ok { + alignment = 32 + } + + offset, err := rs.Seek(0, io.SeekCurrent) + if err != nil { + return err + } + + padding := ggufPadding(offset, int64(alignment)) + llm.tensorOffset = uint64(offset + padding) + + for _, tensor := range llm.tensors { + offset, err := rs.Seek(0, io.SeekCurrent) + if err != nil { + return fmt.Errorf("failed to get current offset: %w", err) + } + + padding := ggufPadding(offset, int64(alignment)) + if _, err := rs.Seek(padding, io.SeekCurrent); err != nil { + return fmt.Errorf("failed to seek to init padding: %w", err) + } + + if _, err := rs.Seek(int64(tensor.Size()), io.SeekCurrent); err != nil { + return fmt.Errorf("failed to seek to tensor: %w", err) + } + } + + return nil +} + +func readGGUF[T any](llm *gguf, r io.Reader) (T, error) { + var t T + err := binary.Read(r, llm.ByteOrder, &t) + return t, err +} + +func writeGGUF[V any](w io.Writer, t uint32, v V) error { + if err := binary.Write(w, binary.LittleEndian, t); err != nil { + return err + } + + return binary.Write(w, binary.LittleEndian, v) +} + +func readGGUFV1String(llm *gguf, r io.Reader) (string, error) { + var length uint64 + if err := binary.Read(r, llm.ByteOrder, &length); err != nil { + return "", err + } + + var b bytes.Buffer + if _, err := io.CopyN(&b, r, int64(length)); err != nil { + return "", err + } + + // gguf v1 strings are null-terminated + b.Truncate(b.Len() - 1) + + return b.String(), nil +} + +func discardGGUFString(llm *gguf, r io.Reader) error { + buf := llm.scratch[:8] + _, err := io.ReadFull(r, buf) + if err != nil { + return err + } + + size := int(llm.ByteOrder.Uint64(buf)) + for size > 0 { + n, err := r.Read(llm.scratch[:min(size, cap(llm.scratch))]) + if err != nil { + return err + } + size -= n + } + return nil +} + +func readGGUFString(llm *gguf, r io.Reader) (string, error) { + if llm.Version == 1 { + return readGGUFV1String(llm, r) + } + + buf := llm.scratch[:8] + _, err := io.ReadFull(r, buf) + if err != nil { + return "", err + } + + length := int(llm.ByteOrder.Uint64(buf)) + if length > len(llm.scratch) { + buf = make([]byte, length) + } else { + buf = llm.scratch[:length] + } + clear(buf) + + _, err = io.ReadFull(r, buf) + if err != nil { + return "", err + } + return string(buf), nil +} + +func writeGGUFString(w io.Writer, s string) error { + if err := binary.Write(w, binary.LittleEndian, ggufTypeString); err != nil { + return err + } + + if err := binary.Write(w, binary.LittleEndian, uint64(len(s))); err != nil { + return err + } + + _, err := io.Copy(w, strings.NewReader(s)) + return err +} + +type array struct { + size int + values []any +} + +func (a *array) MarshalJSON() ([]byte, error) { + return json.Marshal(a.values) +} + +func readGGUFV1Array(llm *gguf, r io.Reader) (*array, error) { + t, err := readGGUF[uint32](llm, r) + if err != nil { + return nil, err + } + + n, err := readGGUF[uint32](llm, r) + if err != nil { + return nil, err + } + + a := &array{size: int(n)} + if llm.canCollectArray(int(n)) { + a.values = make([]any, 0, int(n)) + } + + for i := range n { + var e any + switch t { + case ggufTypeUint8: + e, err = readGGUF[uint8](llm, r) + case ggufTypeInt8: + e, err = readGGUF[int8](llm, r) + case ggufTypeUint16: + e, err = readGGUF[uint16](llm, r) + case ggufTypeInt16: + e, err = readGGUF[int16](llm, r) + case ggufTypeUint32: + e, err = readGGUF[uint32](llm, r) + case ggufTypeInt32: + e, err = readGGUF[int32](llm, r) + case ggufTypeUint64: + e, err = readGGUF[uint64](llm, r) + case ggufTypeInt64: + e, err = readGGUF[int64](llm, r) + case ggufTypeFloat32: + e, err = readGGUF[float32](llm, r) + case ggufTypeFloat64: + e, err = readGGUF[float64](llm, r) + case ggufTypeBool: + e, err = readGGUF[bool](llm, r) + case ggufTypeString: + e, err = readGGUFV1String(llm, r) + default: + return nil, fmt.Errorf("invalid array type: %d", t) + } + if err != nil { + return nil, err + } + + if a.values != nil { + a.values[i] = e + } + } + + return a, nil +} + +func readGGUFArray(llm *gguf, r io.Reader) (*array, error) { + if llm.Version == 1 { + return readGGUFV1Array(llm, r) + } + + t, err := readGGUF[uint32](llm, r) + if err != nil { + return nil, err + } + + n, err := readGGUF[uint64](llm, r) + if err != nil { + return nil, err + } + + a := &array{size: int(n)} + if llm.canCollectArray(int(n)) { + a.values = make([]any, int(n)) + } + + for i := range n { + var e any + switch t { + case ggufTypeUint8: + e, err = readGGUF[uint8](llm, r) + case ggufTypeInt8: + e, err = readGGUF[int8](llm, r) + case ggufTypeUint16: + e, err = readGGUF[uint16](llm, r) + case ggufTypeInt16: + e, err = readGGUF[int16](llm, r) + case ggufTypeUint32: + e, err = readGGUF[uint32](llm, r) + case ggufTypeInt32: + e, err = readGGUF[int32](llm, r) + case ggufTypeUint64: + e, err = readGGUF[uint64](llm, r) + case ggufTypeInt64: + e, err = readGGUF[int64](llm, r) + case ggufTypeFloat32: + e, err = readGGUF[float32](llm, r) + case ggufTypeFloat64: + e, err = readGGUF[float64](llm, r) + case ggufTypeBool: + e, err = readGGUF[bool](llm, r) + case ggufTypeString: + if a.values != nil { + e, err = readGGUFString(llm, r) + } else { + err = discardGGUFString(llm, r) + } + default: + return nil, fmt.Errorf("invalid array type: %d", t) + } + if err != nil { + return nil, err + } + + if a.values != nil { + a.values[i] = e + } + } + + return a, nil +} + +// writeGGUFArray writes a slice s of type E to the write with a gguf type of t +func writeGGUFArray[S ~[]E, E any](w io.Writer, t uint32, s S) error { + if err := binary.Write(w, binary.LittleEndian, ggufTypeArray); err != nil { + return err + } + + if err := binary.Write(w, binary.LittleEndian, t); err != nil { + return err + } + + if err := binary.Write(w, binary.LittleEndian, uint64(len(s))); err != nil { + return err + } + + return binary.Write(w, binary.LittleEndian, s) +} + +func WriteGGUF(ws io.WriteSeeker, kv KV, ts []Tensor) error { + if err := binary.Write(ws, binary.LittleEndian, []byte("GGUF")); err != nil { + return err + } + + if err := binary.Write(ws, binary.LittleEndian, uint32(3)); err != nil { + return err + } + + if err := binary.Write(ws, binary.LittleEndian, uint64(len(ts))); err != nil { + return err + } + + if err := binary.Write(ws, binary.LittleEndian, uint64(len(kv))); err != nil { + return err + } + + keys := slices.Collect(maps.Keys(kv)) + slices.Sort(keys) + + for _, key := range keys { + if err := ggufWriteKV(ws, key, kv[key]); err != nil { + return err + } + } + + slices.SortStableFunc(ts, func(a, b Tensor) int { + if i, j := a.block(), b.block(); i < 0 && j > 0 { + return 1 + } else if i > 0 && j < 0 { + return -1 + } else { + return cmp.Compare(i, j) + } + }) + + var s uint64 + for _, t := range ts { + t.Offset = s + if err := ggufWriteTensorInfo(ws, t); err != nil { + return err + } + s += t.Size() + } + + var alignment int64 = 32 + for _, t := range ts { + if err := ggufWriteTensor(ws, t, alignment); err != nil { + return err + } + } + + return nil +} + +func ggufWriteKV(ws io.WriteSeeker, k string, v any) error { + slog.Debug(k, "type", fmt.Sprintf("%T", v)) + if err := binary.Write(ws, binary.LittleEndian, uint64(len(k))); err != nil { + return err + } + + if err := binary.Write(ws, binary.LittleEndian, []byte(k)); err != nil { + return err + } + + var err error + switch v := v.(type) { + case uint32: + err = writeGGUF(ws, ggufTypeUint32, v) + case float32: + err = writeGGUF(ws, ggufTypeFloat32, v) + case bool: + err = writeGGUF(ws, ggufTypeBool, v) + case string: + err = writeGGUFString(ws, v) + case []int32: + err = writeGGUFArray(ws, ggufTypeInt32, v) + case []uint32: + err = writeGGUFArray(ws, ggufTypeUint32, v) + case []float32: + err = writeGGUFArray(ws, ggufTypeFloat32, v) + case []string: + if err := binary.Write(ws, binary.LittleEndian, ggufTypeArray); err != nil { + return err + } + + if err := binary.Write(ws, binary.LittleEndian, ggufTypeString); err != nil { + return err + } + + if err := binary.Write(ws, binary.LittleEndian, uint64(len(v))); err != nil { + return err + } + + for _, e := range v { + if err := binary.Write(ws, binary.LittleEndian, uint64(len(e))); err != nil { + return err + } + + if err := binary.Write(ws, binary.LittleEndian, []byte(e)); err != nil { + return err + } + } + default: + return fmt.Errorf("improper type for '%s'", k) + } + + return err +} + +func ggufWriteTensorInfo(ws io.WriteSeeker, t Tensor) error { + slog.Debug(t.Name, "kind", t.Kind, "shape", t.Shape, "offset", t.Offset) + if err := binary.Write(ws, binary.LittleEndian, uint64(len(t.Name))); err != nil { + return err + } + + if err := binary.Write(ws, binary.LittleEndian, []byte(t.Name)); err != nil { + return err + } + + if err := binary.Write(ws, binary.LittleEndian, uint32(len(t.Shape))); err != nil { + return err + } + + for i := range len(t.Shape) { + if err := binary.Write(ws, binary.LittleEndian, t.Shape[len(t.Shape)-i-1]); err != nil { + return err + } + } + + if err := binary.Write(ws, binary.LittleEndian, t.Kind); err != nil { + return err + } + + return binary.Write(ws, binary.LittleEndian, t.Offset) +} + +func ggufWriteTensor(ws io.WriteSeeker, t Tensor, alignment int64) error { + offset, err := ws.Seek(0, io.SeekCurrent) + if err != nil { + return err + } + + if err := binary.Write(ws, binary.LittleEndian, bytes.Repeat([]byte{0}, int(ggufPadding(offset, alignment)))); err != nil { + return err + } + + _, err = t.WriteTo(ws) + return err +} + +func ggufPadding(offset, align int64) int64 { + return (align - offset%align) % align +} diff --git a/fs/ggml/type.go b/fs/ggml/type.go new file mode 100644 index 000000000..a24d8b34e --- /dev/null +++ b/fs/ggml/type.go @@ -0,0 +1,180 @@ +package ggml + +import "fmt" + +type fileType uint32 + +const ( + fileTypeF32 fileType = iota + fileTypeF16 + fileTypeQ4_0 + fileTypeQ4_1 + fileTypeQ4_1_F16 + fileTypeQ4_2 // unused + fileTypeQ4_3 // unused + fileTypeQ8_0 + fileTypeQ5_0 + fileTypeQ5_1 + fileTypeQ2_K + fileTypeQ3_K_S + fileTypeQ3_K_M + fileTypeQ3_K_L + fileTypeQ4_K_S + fileTypeQ4_K_M + fileTypeQ5_K_S + fileTypeQ5_K_M + fileTypeQ6_K + fileTypeIQ2_XXS + fileTypeIQ2_XS + fileTypeQ2_K_S + fileTypeIQ3_XS + fileTypeIQ3_XXS + fileTypeIQ1_S + fileTypeIQ4_NL + fileTypeIQ3_S + fileTypeIQ2_S + fileTypeIQ4_XS + fileTypeIQ2_M + fileTypeIQ1_M + fileTypeBF16 + + fileTypeUnknown +) + +func ParseFileType(s string) (fileType, error) { + switch s { + case "F32": + return fileTypeF32, nil + case "F16": + return fileTypeF16, nil + case "Q4_0": + return fileTypeQ4_0, nil + case "Q4_1": + return fileTypeQ4_1, nil + case "Q4_1_F16": + return fileTypeQ4_1_F16, nil + case "Q8_0": + return fileTypeQ8_0, nil + case "Q5_0": + return fileTypeQ5_0, nil + case "Q5_1": + return fileTypeQ5_1, nil + case "Q2_K": + return fileTypeQ2_K, nil + case "Q3_K_S": + return fileTypeQ3_K_S, nil + case "Q3_K_M": + return fileTypeQ3_K_M, nil + case "Q3_K_L": + return fileTypeQ3_K_L, nil + case "Q4_K_S": + return fileTypeQ4_K_S, nil + case "Q4_K_M": + return fileTypeQ4_K_M, nil + case "Q5_K_S": + return fileTypeQ5_K_S, nil + case "Q5_K_M": + return fileTypeQ5_K_M, nil + case "Q6_K": + return fileTypeQ6_K, nil + case "IQ2_XXS": + return fileTypeIQ2_XXS, nil + case "IQ2_XS": + return fileTypeIQ2_XS, nil + case "Q2_K_S": + return fileTypeQ2_K_S, nil + case "IQ3_XS": + return fileTypeIQ3_XS, nil + case "IQ3_XXS": + return fileTypeIQ3_XXS, nil + case "IQ1_S": + return fileTypeIQ1_S, nil + case "IQ4_NL": + return fileTypeIQ4_NL, nil + case "IQ3_S": + return fileTypeIQ3_S, nil + case "IQ2_S": + return fileTypeIQ2_S, nil + case "IQ4_XS": + return fileTypeIQ4_XS, nil + case "IQ2_M": + return fileTypeIQ2_M, nil + case "IQ1_M": + return fileTypeIQ1_M, nil + case "BF16": + return fileTypeBF16, nil + default: + return fileTypeUnknown, fmt.Errorf("unknown fileType: %s", s) + } +} + +func (t fileType) String() string { + switch t { + case fileTypeF32: + return "F32" + case fileTypeF16: + return "F16" + case fileTypeQ4_0: + return "Q4_0" + case fileTypeQ4_1: + return "Q4_1" + case fileTypeQ4_1_F16: + return "Q4_1_F16" + case fileTypeQ8_0: + return "Q8_0" + case fileTypeQ5_0: + return "Q5_0" + case fileTypeQ5_1: + return "Q5_1" + case fileTypeQ2_K: + return "Q2_K" + case fileTypeQ3_K_S: + return "Q3_K_S" + case fileTypeQ3_K_M: + return "Q3_K_M" + case fileTypeQ3_K_L: + return "Q3_K_L" + case fileTypeQ4_K_S: + return "Q4_K_S" + case fileTypeQ4_K_M: + return "Q4_K_M" + case fileTypeQ5_K_S: + return "Q5_K_S" + case fileTypeQ5_K_M: + return "Q5_K_M" + case fileTypeQ6_K: + return "Q6_K" + case fileTypeIQ2_XXS: + return "IQ2_XXS" + case fileTypeIQ2_XS: + return "IQ2_XS" + case fileTypeQ2_K_S: + return "Q2_K_S" + case fileTypeIQ3_XS: + return "IQ3_XS" + case fileTypeIQ3_XXS: + return "IQ3_XXS" + case fileTypeIQ1_S: + return "IQ1_S" + case fileTypeIQ4_NL: + return "IQ4_NL" + case fileTypeIQ3_S: + return "IQ3_S" + case fileTypeIQ2_S: + return "IQ2_S" + case fileTypeIQ4_XS: + return "IQ4_XS" + case fileTypeIQ2_M: + return "IQ2_M" + case fileTypeIQ1_M: + return "IQ1_M" + case fileTypeBF16: + return "BF16" + default: + return "unknown" + } +} + +func (t fileType) Value() uint32 { + return uint32(t) +} diff --git a/util/bufioutil/buffer_seeker.go b/fs/util/bufioutil/buffer_seeker.go similarity index 100% rename from util/bufioutil/buffer_seeker.go rename to fs/util/bufioutil/buffer_seeker.go diff --git a/util/bufioutil/buffer_seeker_test.go b/fs/util/bufioutil/buffer_seeker_test.go similarity index 100% rename from util/bufioutil/buffer_seeker_test.go rename to fs/util/bufioutil/buffer_seeker_test.go diff --git a/go.mod b/go.mod index 66a4f77e3..52c28f9b4 100644 --- a/go.mod +++ b/go.mod @@ -18,11 +18,14 @@ require ( require ( github.com/agnivade/levenshtein v1.1.1 github.com/d4l3k/go-bfloat16 v0.0.0-20211005043715-690c3bdd05f1 + github.com/dlclark/regexp2 v1.11.4 + github.com/emirpasic/gods/v2 v2.0.0-alpha github.com/google/go-cmp v0.6.0 github.com/mattn/go-runewidth v0.0.14 github.com/nlpodyssey/gopickle v0.3.0 github.com/pdevine/tensor v0.0.0-20240510204454-f88f4562727c golang.org/x/image v0.22.0 + gonum.org/v1/gonum v0.15.0 ) require ( @@ -42,7 +45,6 @@ require ( github.com/xtgo/set v1.0.0 // indirect go4.org/unsafe/assume-no-moving-gc v0.0.0-20231121144256-b99613f794b6 // indirect golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect - gonum.org/v1/gonum v0.15.0 // indirect gorgonia.org/vecf32 v0.9.0 // indirect gorgonia.org/vecf64 v0.9.0 // indirect ) diff --git a/go.sum b/go.sum index b3093ceb9..b94898df9 100644 --- a/go.sum +++ b/go.sum @@ -42,8 +42,12 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/dgryski/trifles v0.0.0-20200323201526-dd97f9abfb48 h1:fRzb/w+pyskVMQ+UbP35JkH8yB7MYb4q/qhBarqZE6g= github.com/dgryski/trifles v0.0.0-20200323201526-dd97f9abfb48/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA= +github.com/dlclark/regexp2 v1.11.4 h1:rPYF9/LECdNymJufQKmri9gV604RvvABwgOA8un7yAo= +github.com/dlclark/regexp2 v1.11.4/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= +github.com/emirpasic/gods/v2 v2.0.0-alpha h1:dwFlh8pBg1VMOXWGipNMRt8v96dKAIvBehtCt6OtunU= +github.com/emirpasic/gods/v2 v2.0.0-alpha/go.mod h1:W0y4M2dtBB9U5z3YlghmpuUhiaZT2h6yoeE+C1sCp6A= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= diff --git a/llm/ggml.go b/llm/ggml.go index 2710f7b75..a4cf6f501 100644 --- a/llm/ggml.go +++ b/llm/ggml.go @@ -9,7 +9,7 @@ import ( "strings" "sync" - "github.com/ollama/ollama/util/bufioutil" + "github.com/ollama/ollama/fs/util/bufioutil" ) type GGML struct { diff --git a/ml/backend.go b/ml/backend.go new file mode 100644 index 000000000..b9efad8c2 --- /dev/null +++ b/ml/backend.go @@ -0,0 +1,191 @@ +package ml + +import ( + "bytes" + "encoding/binary" + "fmt" + "io" + "strings" +) + +type Config interface { + Architecture() string + String(string, ...string) string + Uint(string, ...uint32) uint32 + Float(string, ...float32) float32 + + Strings(string, ...[]string) []string + Uints(string, ...[]uint32) []uint32 +} + +type Backend interface { + Config() Config + Get(name string) Tensor + NewContext() Context +} + +var backends = make(map[string]func(io.ReadSeeker) (Backend, error)) + +func RegisterBackend(name string, f func(io.ReadSeeker) (Backend, error)) { + if _, ok := backends[name]; ok { + panic("backend: backend already registered") + } + + backends[name] = f +} + +func NewBackend(r io.ReadSeeker) (Backend, error) { + if backend, ok := backends["ggml"]; ok { + return backend(r) + } + + return nil, fmt.Errorf("unsupported backend") +} + +type Context interface { + Zeros(dtype DType, shape ...int) Tensor + FromFloatSlice(s []float32, shape ...int) (Tensor, error) + FromIntSlice(s []int32, shape ...int) (Tensor, error) + + Forward(Tensor) + Compute(Tensor) Tensor + Close() error +} + +type Tensor interface { + Dim(n int) int64 + Stride(n int) int64 + + Shape() []int64 + DType() DType + + Bytes() []byte + Floats() []float32 + + Add(ctx Context, t2 Tensor) Tensor + Mul(ctx Context, t2 Tensor) Tensor + Mulmat(ctx Context, t2 Tensor) Tensor + + Softmax(ctx Context) Tensor + Norm(ctx Context, eps float32) Tensor + RMSNorm(ctx Context, eps float32) Tensor + Scale(ctx Context, s float64) Tensor + + Conv2D(ctx Context, weight Tensor, s0, s1, p0, p1, d0, d1 int) Tensor + Rope(ctx Context, positionIDs, ropeFactors Tensor, dim uint32, base, scale float32) Tensor + + Tanh(ctx Context) Tensor + GELU(ctx Context) Tensor + SILU(ctx Context) Tensor + + Reshape(ctx Context, shape ...int64) Tensor + View(ctx Context, offset int, shape ...int) Tensor + Permute(ctx Context, shape ...int) Tensor + Contiguous(ctx Context) Tensor + + Pad(ctx Context, shape ...int64) Tensor + Unpad(ctx Context, shape ...int64) Tensor + + Stack(ctx Context, dim int, s ...Tensor) Tensor + Concat(ctx Context, t2 Tensor, dim int) Tensor + Rows(ctx Context, t2 Tensor) Tensor + Copy(ctx Context, t2 Tensor) Tensor +} + +type number interface { + ~int | ~int8 | ~int16 | ~int32 | ~int64 | + ~uint | ~uint8 | ~uint16 | ~uint32 | ~uint64 | + ~float32 | ~float64 | + ~complex64 | ~complex128 +} + +func mul[T number](s ...T) T { + p := T(1) + for _, v := range s { + p *= v + } + + return p +} + +type DumpOptions struct { + // Items is the number of elements to print at the beginning and end of each dimension. + Items int64 + + // Precision is the number of decimal places to print. Applies to float32 and float64. + Precision int +} + +func Dump(t Tensor, opts ...DumpOptions) string { + if len(opts) < 1 { + opts = append(opts, DumpOptions{ + Items: 3, + Precision: 4, + }) + } + + switch t.DType() { + case DTypeF32: + return dump[[]float32](t, opts[0]) + case DTypeI32: + return dump[[]int32](t, opts[0]) + default: + return "" + } +} + +func dump[S ~[]E, E number](t Tensor, opts DumpOptions) string { + bts := t.Bytes() + if bts == nil { + return "" + } + + s := make(S, mul(t.Shape()...)) + if err := binary.Read(bytes.NewBuffer(t.Bytes()), binary.LittleEndian, &s); err != nil { + panic(err) + } + + shape := t.Shape() + + var sb strings.Builder + var f func([]int64, int64) + f = func(dims []int64, stride int64) { + prefix := strings.Repeat(" ", len(shape)-len(dims)+1) + fmt.Fprint(&sb, "[") + defer func() { fmt.Fprint(&sb, "]") }() + for i := int64(0); i < dims[0]; i++ { + if i >= opts.Items && i < dims[0]-opts.Items { + fmt.Fprint(&sb, "..., ") + // skip to next printable element + skip := dims[0] - 2*opts.Items + if len(dims) > 1 { + stride += mul(append(dims[1:], skip)...) + fmt.Fprint(&sb, strings.Repeat("\n", len(dims)-1), prefix) + } + i += skip - 1 + } else if len(dims) > 1 { + f(dims[1:], stride) + stride += mul(dims[1:]...) + if i < dims[0]-1 { + fmt.Fprint(&sb, ",", strings.Repeat("\n", len(dims)-1), prefix) + } + } else { + fmt.Fprint(&sb, s[stride+i]) + if i < dims[0]-1 { + fmt.Fprint(&sb, ", ") + } + } + } + } + f(shape, 0) + + return sb.String() +} + +type DType int + +const ( + DTypeF32 DType = iota + DTypeI32 + DTypeOther +) diff --git a/ml/backend/backend.go b/ml/backend/backend.go new file mode 100644 index 000000000..55063fb3b --- /dev/null +++ b/ml/backend/backend.go @@ -0,0 +1,5 @@ +package backend + +import ( + _ "github.com/ollama/ollama/ml/backend/ggml" +) diff --git a/ml/backend/ggml/backend.go b/ml/backend/ggml/backend.go new file mode 100644 index 000000000..302df5b8d --- /dev/null +++ b/ml/backend/ggml/backend.go @@ -0,0 +1,470 @@ +package ggml + +// #cgo CPPFLAGS: -DNDEBUG +// #include +// #include +// #include "ggml.h" +// #include "ggml-backend.h" +import "C" + +import ( + "bytes" + "fmt" + "io" + "log/slog" + "unsafe" + + "github.com/ollama/ollama/format" + "github.com/ollama/ollama/fs/ggml" + "github.com/ollama/ollama/ml" +) + +type Backend struct { + c *C.struct_ggml_context + b *C.struct_ggml_backend + bb *C.struct_ggml_backend_buffer + + ggml.KV + ggml.Tensors +} + +func New(r io.ReadSeeker) (ml.Backend, error) { + f, _, err := ggml.DecodeGGML(r, -1) + if err != nil { + return nil, err + } + + slog.Info( + "", + "architecture", f.KV().Architecture(), + "file_type", f.KV().FileType(), + "name", f.KV().String("general.name"), + "description", f.KV().String("general.description"), + "num_tensors", len(f.Tensors().Items), + "num_key_values", len(f.KV()), + ) + + c := C.ggml_init(C.struct_ggml_init_params{ + mem_size: C.size_t(len(f.Tensors().Items)) * C.ggml_tensor_overhead(), + mem_buffer: nil, + no_alloc: true, + }) + + for _, t := range f.Tensors().Items { + func() { + cname := C.CString(t.Name) + defer C.free(unsafe.Pointer(cname)) + + tt := C.ggml_new_tensor(c, t.Kind, C.int(len(t.Shape)), (*C.int64_t)(unsafe.Pointer(&t.Shape[0]))) + C.ggml_set_name(tt, cname) + }() + } + + b := newBackend() + bb := C.ggml_backend_alloc_ctx_tensors(c, b) + for _, t := range f.Tensors().Items { + if _, err := r.Seek(int64(f.Tensors().Offset+t.Offset), io.SeekStart); err != nil { + return nil, err + } + + var b bytes.Buffer + n, err := io.CopyN(&b, r, int64(t.Size())) + if err != nil { + return nil, err + } + + if n != int64(t.Size()) { + return nil, fmt.Errorf("expected %d bytes, got %d", t.Size(), n) + } + + func() { + cname := C.CString(t.Name) + defer C.free(unsafe.Pointer(cname)) + + cbytes := C.CBytes(b.Bytes()) + defer C.free(cbytes) + + C.ggml_backend_tensor_set(C.ggml_get_tensor(c, cname), cbytes, 0, C.size_t(n)) + }() + } + + return &Backend{c, b, bb, f.KV(), f.Tensors()}, nil +} + +func init() { + ml.RegisterBackend("ggml", New) +} + +func (b *Backend) Config() ml.Config { + return b.KV +} + +func (b *Backend) Get(name string) ml.Tensor { + cname := C.CString(name) + defer C.free(unsafe.Pointer(cname)) + if t := C.ggml_get_tensor(b.c, cname); t != nil { + return &Tensor{t} + } + + return nil +} + +func (b *Backend) NewContext() ml.Context { + n := max(8192, len(b.Tensors.Items)*5) + bts := make([]byte, C.size_t(n)*C.ggml_tensor_overhead()+C.ggml_graph_overhead_custom(C.size_t(n), false)) + c := C.ggml_init(C.struct_ggml_init_params{ + mem_buffer: unsafe.Pointer(&bts[0]), + mem_size: C.size_t(len(bts)), + no_alloc: true, + }) + return &Context{ + b: b.b, + c: c, + g: C.ggml_new_graph_custom(c, C.size_t(n), false), + } +} + +type Context struct { + b *C.struct_ggml_backend + c *C.struct_ggml_context + g *C.struct_ggml_cgraph +} + +func (c *Context) Forward(t ml.Tensor) { + C.ggml_build_forward_expand(c.g, t.(*Tensor).t) +} + +func (c *Context) Compute(t ml.Tensor) ml.Tensor { + c.Forward(t) + + a := C.ggml_gallocr_new(C.ggml_backend_get_default_buffer_type(c.b)) + C.ggml_gallocr_alloc_graph(a, c.g) + slog.Debug("compute graph memory", "require", format.HumanBytes2(uint64(C.ggml_gallocr_get_buffer_size(a, 0)))) + + C.ggml_backend_graph_compute(c.b, c.g) + return &Tensor{ + C.ggml_graph_node(c.g, C.ggml_graph_n_nodes(c.g)-1), + } +} + +func (c Context) Zeros(dtype ml.DType, shape ...int) ml.Tensor { + if len(shape) < 1 || len(shape) > 4 { + panic("unsupported number of dimensions") + } + + for _, dim := range shape { + if dim < 1 { + panic("invalid shape") + } + } + + var t *C.struct_ggml_tensor + switch dtype { + case ml.DTypeF32: + t = C.ggml_new_tensor(c.c, C.GGML_TYPE_F32, C.int(len(shape)), (*C.int64_t)(unsafe.Pointer(&shape[0]))) + case ml.DTypeI32: + t = C.ggml_new_tensor(c.c, C.GGML_TYPE_I32, C.int(len(shape)), (*C.int64_t)(unsafe.Pointer(&shape[0]))) + default: + panic("unsupported dtype") + } + + b := C.ggml_backend_alloc_buffer(c.b, C.ggml_nbytes(t)) + C.ggml_backend_tensor_alloc(b, t, C.ggml_backend_buffer_get_base(b)) + C.ggml_set_f32(t, 0.) + return &Tensor{t} +} + +func fromSlice[S ~[]E, E float32 | int32](ctx Context, s S, shape []int, dtype uint32) (ml.Tensor, error) { + n := len(s) + for _, v := range shape { + n /= v + } + + if n != 1 { + return nil, fmt.Errorf("invalid shape %v for %d elements", shape, len(s)) + } + + t := C.ggml_new_tensor(ctx.c, dtype, C.int(len(shape)), (*C.int64_t)(unsafe.Pointer(&shape[0]))) + b := C.ggml_backend_alloc_buffer(ctx.b, C.ggml_nbytes(t)) + C.ggml_backend_tensor_alloc(b, t, C.ggml_backend_buffer_get_base(b)) + C.ggml_backend_tensor_set(t, unsafe.Pointer(&s[0]), 0, C.ggml_nbytes(t)) + return &Tensor{t}, nil +} + +func (c Context) FromFloatSlice(s []float32, shape ...int) (ml.Tensor, error) { + return fromSlice(c, s, shape, C.GGML_TYPE_F32) +} + +func (c Context) FromIntSlice(s []int32, shape ...int) (ml.Tensor, error) { + return fromSlice(c, s, shape, C.GGML_TYPE_I32) +} + +func (c *Context) Close() error { + C.ggml_free(c.c) + return nil +} + +type Tensor struct { + t *C.struct_ggml_tensor +} + +func (t *Tensor) LogValue() slog.Value { + return slog.GroupValue( + slog.String("name", C.GoString(C.ggml_get_name(t.t))), + slog.String("type", C.GoString(C.ggml_type_name(t.t._type))), + slog.Any("shape", t.Shape()), + ) +} + +func (t *Tensor) Dim(n int) int64 { + return int64(t.t.ne[n]) +} + +func (t *Tensor) Stride(n int) int64 { + return int64(t.t.nb[n]) +} + +func (t *Tensor) Shape() []int64 { + shape := make([]int64, C.ggml_n_dims(t.t)) + for i := range shape { + shape[i] = t.Dim(i) + } + + return shape +} + +func (t *Tensor) Bytes() []byte { + if bts := C.ggml_get_data(t.t); bts != nil { + return C.GoBytes(bts, C.int(C.ggml_nbytes(t.t))) + } + + return nil +} + +func (t *Tensor) Floats() []float32 { + if s := C.ggml_get_data_f32(t.t); s != nil { + f32s := make([]float32, C.ggml_nelements(t.t)) + for i, v := range unsafe.Slice(s, C.ggml_nelements(t.t)) { + f32s[i] = float32(v) + } + + return f32s + } + + return nil +} + +func (t *Tensor) DType() ml.DType { + switch t.t._type { + case C.GGML_TYPE_F32: + return ml.DTypeF32 + case C.GGML_TYPE_I32: + return ml.DTypeI32 + default: + return ml.DTypeOther + } +} + +func (t *Tensor) Add(ctx ml.Context, t2 ml.Tensor) ml.Tensor { + return &Tensor{ + C.ggml_add(ctx.(*Context).c, t.t, t2.(*Tensor).t), + } +} + +func (t *Tensor) Stack(ctx ml.Context, dim int, s ...ml.Tensor) ml.Tensor { + if len(s) > 0 { + return t.Concat(ctx, s[0].Stack(ctx, dim, s[1:]...), dim) + } + + return t +} + +func (t *Tensor) Concat(ctx ml.Context, t2 ml.Tensor, dim int) ml.Tensor { + return &Tensor{ + C.ggml_concat(ctx.(*Context).c, t.t, t2.(*Tensor).t, C.int(dim)), + } +} + +func (t *Tensor) Contiguous(ctx ml.Context) ml.Tensor { + return &Tensor{ + C.ggml_cont(ctx.(*Context).c, t.t), + } +} + +func (t *Tensor) Mul(ctx ml.Context, t2 ml.Tensor) ml.Tensor { + return &Tensor{ + C.ggml_mul(ctx.(*Context).c, t.t, t2.(*Tensor).t), + } +} + +func (t *Tensor) Mulmat(ctx ml.Context, t2 ml.Tensor) ml.Tensor { + return &Tensor{ + C.ggml_mul_mat(ctx.(*Context).c, t.t, t2.(*Tensor).t), + } +} + +func (t *Tensor) Norm(ctx ml.Context, eps float32) ml.Tensor { + return &Tensor{ + C.ggml_norm(ctx.(*Context).c, t.t, (C.float)(eps)), + } +} + +func (t *Tensor) RMSNorm(ctx ml.Context, eps float32) ml.Tensor { + return &Tensor{ + C.ggml_rms_norm(ctx.(*Context).c, t.t, C.float(eps)), + } +} + +func (t *Tensor) Pad(ctx ml.Context, shape ...int64) ml.Tensor { + if len(shape) != 4 { + panic("expected 4 dimensions") + } + + return &Tensor{ + C.ggml_pad(ctx.(*Context).c, t.t, C.int(shape[0]), C.int(shape[1]), C.int(shape[2]), C.int(shape[3])), + } +} + +func (t *Tensor) Permute(ctx ml.Context, shape ...int) ml.Tensor { + if len(shape) != 4 { + panic("expected 4 dimensions") + } + + return &Tensor{ + C.ggml_permute(ctx.(*Context).c, t.t, C.int(shape[0]), C.int(shape[1]), C.int(shape[2]), C.int(shape[3])), + } +} + +func (t *Tensor) Rows(ctx ml.Context, t2 ml.Tensor) ml.Tensor { + return &Tensor{ + C.ggml_get_rows(ctx.(*Context).c, t.t, t2.(*Tensor).t), + } +} + +func (t *Tensor) Copy(ctx ml.Context, t2 ml.Tensor) ml.Tensor { + return &Tensor{ + C.ggml_cpy(ctx.(*Context).c, t.t, t2.(*Tensor).t), + } +} + +func (t *Tensor) Reshape(ctx ml.Context, shape ...int64) ml.Tensor { + switch len(shape) { + case 1: + return &Tensor{ + C.ggml_reshape_1d(ctx.(*Context).c, t.t, C.int64_t(shape[0])), + } + case 2: + return &Tensor{ + C.ggml_reshape_2d(ctx.(*Context).c, t.t, C.int64_t(shape[0]), C.int64_t(shape[1])), + } + case 3: + return &Tensor{ + C.ggml_reshape_3d(ctx.(*Context).c, t.t, C.int64_t(shape[0]), C.int64_t(shape[1]), C.int64_t(shape[2])), + } + case 4: + return &Tensor{ + C.ggml_reshape_4d(ctx.(*Context).c, t.t, C.int64_t(shape[0]), C.int64_t(shape[1]), C.int64_t(shape[2]), C.int64_t(shape[3])), + } + default: + panic("unsupported number of dimensions") + } +} + +func (t *Tensor) Scale(ctx ml.Context, s float64) ml.Tensor { + return &Tensor{ + C.ggml_scale(ctx.(*Context).c, t.t, (C.float)(s)), + } +} + +func (t *Tensor) Softmax(ctx ml.Context) ml.Tensor { + return &Tensor{ + C.ggml_soft_max(ctx.(*Context).c, t.t), + } +} + +func (t *Tensor) Tanh(ctx ml.Context) ml.Tensor { + return &Tensor{ + C.ggml_tanh_inplace(ctx.(*Context).c, t.t), + } +} + +func (t *Tensor) Unpad(ctx ml.Context, shape ...int64) ml.Tensor { + if len(shape) != 4 { + panic("expected 4 dimensions") + } + + return &Tensor{ + C.ggml_unpad(ctx.(*Context).c, t.t, C.int(shape[0]), C.int(shape[1]), C.int(shape[2]), C.int(shape[3])), + } +} + +func (t *Tensor) View(ctx ml.Context, offset int, shape ...int) ml.Tensor { + switch len(shape) { + case 1: + return &Tensor{ + C.ggml_view_1d(ctx.(*Context).c, t.t, C.int64_t(shape[0]), C.size_t(offset)), + } + case 3: + return &Tensor{ + C.ggml_view_2d(ctx.(*Context).c, t.t, + C.int64_t(shape[0]), C.int64_t(shape[2]), + C.size_t(shape[1]), + C.size_t(offset)), + } + case 5: + return &Tensor{ + C.ggml_view_3d(ctx.(*Context).c, t.t, + C.int64_t(shape[0]), C.int64_t(shape[2]), C.int64_t(shape[4]), + C.size_t(shape[1]), C.size_t(shape[3]), + C.size_t(offset)), + } + case 7: + return &Tensor{ + C.ggml_view_4d(ctx.(*Context).c, t.t, + C.int64_t(shape[0]), C.int64_t(shape[2]), C.int64_t(shape[4]), C.int64_t(shape[6]), + C.size_t(shape[1]), C.size_t(shape[3]), C.size_t(shape[5]), + C.size_t(offset)), + } + default: + panic("unsupported number of dimensions") + } +} + +const ( + ropeTypeNorm C.int = iota +) + +func (t *Tensor) Rope(ctx ml.Context, positionIDs, ropeFactors ml.Tensor, ropeDim uint32, ropeBase, ropeScale float32) ml.Tensor { + return &Tensor{ + C.ggml_rope_ext( + ctx.(*Context).c, t.t, positionIDs.(*Tensor).t, ropeFactors.(*Tensor).t, + C.int(ropeDim), + 131072, // YaRN n_ctx_train + ropeTypeNorm, // ROPE_TYPE_NORM + C.float(ropeBase), + C.float(ropeScale), + 0., // YaRN ext_factor + 1., // YaRN attn_factor + 32., // YaRN beta_fast + 1., // YaRN beta_slow + ), + } +} + +func (t *Tensor) GELU(ctx ml.Context) ml.Tensor { + return &Tensor{ + C.ggml_gelu_inplace(ctx.(*Context).c, t.t), + } +} + +func (t *Tensor) SILU(ctx ml.Context) ml.Tensor { + return &Tensor{ + C.ggml_silu_inplace(ctx.(*Context).c, t.t), + } +} + +func (t *Tensor) Conv2D(ctx ml.Context, t2 ml.Tensor, s0, s1, p0, p1, d0, d1 int) ml.Tensor { + return &Tensor{ + C.ggml_conv_2d(ctx.(*Context).c, t.t, t2.(*Tensor).t, C.int(s0), C.int(s1), C.int(p0), C.int(p1), C.int(d0), C.int(d1)), + } +} diff --git a/ml/backend/ggml/backend_cpu.go b/ml/backend/ggml/backend_cpu.go new file mode 100644 index 000000000..afc52d830 --- /dev/null +++ b/ml/backend/ggml/backend_cpu.go @@ -0,0 +1,8 @@ +package ggml + +// #include "ggml-backend.h" +import "C" + +func newCPUBackend() *C.struct_ggml_backend { + return C.ggml_backend_cpu_init() +} diff --git a/ml/backend/ggml/backend_darwin.go b/ml/backend/ggml/backend_darwin.go new file mode 100644 index 000000000..3b8c7dbfb --- /dev/null +++ b/ml/backend/ggml/backend_darwin.go @@ -0,0 +1,13 @@ +package ggml + +//go:generate sh -c "echo \"// Code generated $(date). DO NOT EDIT.\n\" >ggml-metal-embed.metal" +//go:generate sh -c "sed -e '/#include \"ggml-common.h\"/r ggml-common.h' -e '/#include \"ggml-common.h\"/d' ggml-metal.metal >>ggml-metal-embed.metal" + +// #cgo arm64 CPPFLAGS: -DGGML_USE_METAL -DGGML_METAL_EMBED_LIBRARY -DGGML_USE_ACCELERATE -DGGML_METAL_NDEBUG +// #cgo arm64 LDFLAGS: -framework Foundation -framework Metal -framework MetalKit -framework Accelerate +// #include "ggml-metal.h" +import "C" + +func newBackend() *C.struct_ggml_backend { + return C.ggml_backend_metal_init() +} diff --git a/ml/backend/ggml/backend_debug.go b/ml/backend/ggml/backend_debug.go new file mode 100644 index 000000000..9ddb2718c --- /dev/null +++ b/ml/backend/ggml/backend_debug.go @@ -0,0 +1,6 @@ +//go:build debug + +package ggml + +// #cgo CPPFLAGS: -DOLLAMA_DEBUG +import "C" diff --git a/ml/backend/ggml/backend_linux.go b/ml/backend/ggml/backend_linux.go new file mode 100644 index 000000000..57853940d --- /dev/null +++ b/ml/backend/ggml/backend_linux.go @@ -0,0 +1,10 @@ +package ggml + +// #cgo CPPFLAGS: -D_GNU_SOURCE +// #cgo LDFLAGS: -lm +// #include "ggml-backend.h" +import "C" + +func newBackend() *C.struct_ggml_backend { + return newCPUBackend() +} diff --git a/ml/backend/ggml/backend_windows.go b/ml/backend/ggml/backend_windows.go new file mode 100644 index 000000000..3b610f1a0 --- /dev/null +++ b/ml/backend/ggml/backend_windows.go @@ -0,0 +1,10 @@ +package ggml + +// #cgo CPPFLAGS: -D_WIN32_WINNT=0x602 +// #cgo LDFLAGS: -lmsvcrt -static -static-libgcc -static-libstdc++ +// #include "ggml-backend.h" +import "C" + +func newBackend() *C.struct_ggml_backend { + return newCPUBackend() +} diff --git a/ml/backend/ggml/ggml-aarch64.c b/ml/backend/ggml/ggml-aarch64.c new file mode 100644 index 000000000..0b804bb61 --- /dev/null +++ b/ml/backend/ggml/ggml-aarch64.c @@ -0,0 +1,3235 @@ +/** + * llama.cpp - commit 3f1ae2e32cde00c39b96be6d01c2997c29bae555 - do not edit this file + * + * MIT License + * + * Copyright (c) 2023-2024 The ggml authors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +// SPDX-FileCopyrightText: Copyright 2024 Arm Limited and/or its affiliates +// SPDX-License-Identifier: MIT +// + +#define GGML_COMMON_IMPL_C +#include "ggml-common.h" + +#include "ggml-quants.h" +#include "ggml-impl.h" +#include "ggml-cpu-impl.h" + +#include +#include +#include +#include +#include // for qsort +#include // for GGML_ASSERT + +#include "ggml-aarch64.h" + +#if defined(__GNUC__) +#pragma GCC diagnostic ignored "-Woverlength-strings" +#elif defined(_MSC_VER) +#pragma warning(disable: 4244 4267) // possible loss of data +#endif + +#define UNUSED GGML_UNUSED + +// Functions to create the interleaved data layout formats + +// interleave 4 block_q4_0s in blocks of blck_size_interleave +// returns an interleaved block_q4_0x4 +// in the interleaved block_q4_0x4, place deltas for 4 block_q4_0 blocks +// first, then interleave quants from 4 block_q4_0s in blocks of blck_size_interleave +// +// - in : an array of block_q4_0 pointers +// - blck_size_interleave : the block_q4_0 quants bytes are interleaved in blocks of +// blck_size_interleave bytes +// - xor_mask : the mask to convert the nibbles in block_q4_0 quants bytes +// from bias offset form to pure sign form (this saves subtract +// operations durin unpacking) +// +#if defined(__AVX__) +#if defined(__F16C__) +#if defined(__AVX512F__) +#define GGML_F32Cx8x2_LOAD(x, y) _mm512_cvtph_ps(_mm256_set_m128i(_mm_loadu_si128((const __m128i *)(y)), _mm_loadu_si128((const __m128i *)(x)))) +#define GGML_F32Cx16_REPEAT_LOAD(x) _mm512_cvtph_ps(_mm256_set_m128i(x, x)) +#endif +// the _mm256_cvt intrinsics require F16C +#define GGML_F32Cx8_LOAD(x) _mm256_cvtph_ps(_mm_loadu_si128((const __m128i *)(x))) +#define GGML_F32Cx8_REPEAT_LOAD(x, loadMask) _mm256_cvtph_ps(_mm_shuffle_epi32(_mm_maskload_epi32((int const*)(x), loadMask), 68)) +#define GGML_F32Cx8_REARRANGE_LOAD(x, arrangeMask) _mm256_cvtph_ps(_mm_shuffle_epi8(_mm_loadu_si128((const __m128i *) x), arrangeMask)) +#else +#if defined(__AVX512F__) +static inline __m512 __avx512_f32cx8x2_load(ggml_fp16_t *x, ggml_fp16_t *y) { + float tmp[16]; + + for (int i = 0; i < 8; i++) { + tmp[i] = GGML_FP16_TO_FP32(x[i]); + } + + for (int i = 0; i < 8; i++) { + tmp[i + 8] = GGML_FP16_TO_FP32(y[i]); + } + + return _mm512_loadu_ps(tmp); +} +static inline __m512 __avx512_repeat_f32cx16_load(__m128i x) { + float tmp[16]; + uint16_t tmphalf[8]; + _mm_storeu_si128((__m128i*)tmphalf, x); + + for (int i = 0; i < 4; i++) { + tmp[i] = GGML_FP16_TO_FP32(tmphalf[i]); + tmp[i + 4] = GGML_FP16_TO_FP32(tmphalf[i]); + tmp[i + 8] = GGML_FP16_TO_FP32(tmphalf[i]); + tmp[i + 12] = GGML_FP16_TO_FP32(tmphalf[i]); + } + + return _mm512_loadu_ps(tmp); +} +#endif +static inline __m256 __avx_f32cx8_load(ggml_fp16_t *x) { + float tmp[8]; + + for (int i = 0; i < 8; i++) { + tmp[i] = GGML_FP16_TO_FP32(x[i]); + } + + return _mm256_loadu_ps(tmp); +} +static inline __m256 __avx_repeat_f32cx8_load(ggml_fp16_t *x) { + float tmp[8]; + + for (int i = 0; i < 4; i++) { + tmp[i] = GGML_FP16_TO_FP32(x[i]); + tmp[i + 4] = GGML_FP16_TO_FP32(x[i]); + } + + return _mm256_loadu_ps(tmp); +} +static inline __m256 __avx_rearranged_f32cx8_load(ggml_fp16_t *x, __m128i arrangeMask) { + uint16_t tmphalf[8]; + float tmp[8]; + + _mm_storeu_si128((__m128i*)tmphalf, _mm_shuffle_epi8(_mm_loadu_si128((const __m128i *) x), arrangeMask)); + for (int i = 0; i < 8; i++) { + tmp[i] = GGML_FP16_TO_FP32(tmphalf[i]); + } + + return _mm256_loadu_ps(tmp); +} + +#define GGML_F32Cx8_LOAD(x) __avx_f32cx8_load(x) +#define GGML_F32Cx8_REPEAT_LOAD(x, loadMask) __avx_repeat_f32cx8_load(x) +#define GGML_F32Cx8_REARRANGE_LOAD(x, arrangeMask) __avx_rearranged_f32cx8_load(x, arrangeMask) +#if defined(__AVX512F__) +#define GGML_F32Cx8x2_LOAD(x, y) __avx512_f32cx8x2_load(x, y) +#define GGML_F32Cx16_REPEAT_LOAD(x) __avx512_repeat_f32cx16_load(x) +#endif +#endif +#endif + + +#if defined(__AVX2__) || defined(__AVX512F__) +#if defined(__AVX512F__) +// add int16_t pairwise and return as 512 bit int vector +static inline __m512i sum_i16_pairs_int_32x16(const __m512i x) { + const __m512i ones = _mm512_set1_epi16(1); + return _mm512_madd_epi16(ones, x); +} + +static inline __m512i mul_sum_us8_pairs_int32x16(const __m512i ax, const __m512i sy) { +#if defined(__AVXVNNI__) || (defined(__AVX512VNNI__) && defined(__AVX512VL__)) + const __m512i zero = _mm512_setzero_si512(); + return _mm512_dpbusd_epi32(zero, ax, sy); +#else + // Perform multiplication and create 16-bit values + const __m512i dot = _mm512_maddubs_epi16(ax, sy); + return sum_i16_pairs_int_32x16(dot); +#endif +} + +// multiply int8_t, add results pairwise twice and return as 512 bit int vector +static inline __m512i mul_sum_i8_pairs_int32x16(const __m512i x, const __m512i y) { + const __m512i zero = _mm512_setzero_si512(); + // Get absolute values of x vectors + const __m512i ax = _mm512_abs_epi8(x); + // Sign the values of the y vectors + __mmask64 blt0 = _mm512_movepi8_mask(x); + const __m512i sy = _mm512_mask_sub_epi8(y, blt0, zero, y); + return mul_sum_us8_pairs_int32x16(ax, sy); +} +#endif + +// add int16_t pairwise and return as 256 bit int vector +static inline __m256i sum_i16_pairs_int32x8(const __m256i x) { + const __m256i ones = _mm256_set1_epi16(1); + return _mm256_madd_epi16(ones, x); +} + +static inline __m256i mul_sum_us8_pairs_int32x8(const __m256i ax, const __m256i sy) { +#if defined(__AVXVNNI__) || (defined(__AVX512VNNI__) && defined(__AVX512VL__)) + const __m256i zero = _mm256_setzero_si256(); + return _mm256_dpbusd_epi32(zero, ax, sy); +#else + // Perform multiplication and create 16-bit values + const __m256i dot = _mm256_maddubs_epi16(ax, sy); + return sum_i16_pairs_int32x8(dot); +#endif +} + +// Integer variant of the function defined in ggml-quants.c +// multiply int8_t, add results pairwise twice and return as 256 bit int vector +static inline __m256i mul_sum_i8_pairs_int32x8(const __m256i x, const __m256i y) { +#if __AVXVNNIINT8__ + const __m256i zero = _mm256_setzero_si256(); + return _mm256_dpbssd_epi32(zero, x, y); +#else + // Get absolute values of x vectors + const __m256i ax = _mm256_sign_epi8(x, x); + // Sign the values of the y vectors + const __m256i sy = _mm256_sign_epi8(y, x); + return mul_sum_us8_pairs_int32x8(ax, sy); +#endif +} +#endif + +static block_q4_0x4 make_block_q4_0x4(block_q4_0 * in, unsigned int blck_size_interleave, unsigned int xor_mask) { + block_q4_0x4 out; + + for (int i = 0; i < 4; i++) { + out.d[i] = in[i].d; + } + + for (int i = 0; i < QK4_0 * 2; i++) { + int src_offset = (i / (4 * blck_size_interleave)) * blck_size_interleave; + int src_id = (i % (4 * blck_size_interleave)) / blck_size_interleave; + src_offset += (i % blck_size_interleave); + + out.qs[i] = in[src_id].qs[src_offset] ^ xor_mask; + } + + return out; +} + +// interleave 8 block_q4_0s in blocks of blck_size_interleave +// returns an interleaved block_q4_0x8 +// in the interleaved block_q4_0x8, place deltas for 8 block_q4_0 blocks +// first, then interleave quants from 8 block_q4_0s in blocks of blck_size_interleave +static block_q4_0x8 make_block_q4_0x8(block_q4_0 * in, unsigned int blck_size_interleave, unsigned int xor_mask) { + block_q4_0x8 out; + + for (int i = 0; i < 8; i++) { + out.d[i] = in[i].d; + } + + for (int i = 0; i < QK4_0 * 4; i++) { + int src_offset = (i / (8 * blck_size_interleave)) * blck_size_interleave; + int src_id = (i % (8 * blck_size_interleave)) / blck_size_interleave; + src_offset += (i % blck_size_interleave); + + out.qs[i] = in[src_id].qs[src_offset] ^ xor_mask; + } + + return out; +} + +void quantize_q8_0_4x4(const float * restrict x, void * restrict vy, int64_t k) { + assert(QK8_0 == 32); + assert(k % QK8_0 == 0); + const int nb = k / QK8_0; + + block_q8_0x4 * restrict y = (block_q8_0x4 *) vy; + +#if defined(__ARM_NEON) + float32x4_t srcv[4][8]; + float id[4]; + + for (int i = 0; i < nb; i++) { + float32x4_t asrcv[8]; + float32x4_t amaxv[8]; + + for (int row_iter = 0; row_iter < 4; row_iter++) { + for (int j = 0; j < 8; j++) srcv[row_iter][j] = vld1q_f32(x + row_iter * k + i * 32 + 4 * j); + for (int j = 0; j < 8; j++) asrcv[j] = vabsq_f32(srcv[row_iter][j]); + + for (int j = 0; j < 4; j++) amaxv[2 * j] = vmaxq_f32(asrcv[2 * j], asrcv[2 * j + 1]); + for (int j = 0; j < 2; j++) amaxv[4 * j] = vmaxq_f32(amaxv[4 * j], amaxv[4 * j + 2]); + for (int j = 0; j < 1; j++) amaxv[8 * j] = vmaxq_f32(amaxv[8 * j], amaxv[8 * j + 4]); + + const float amax = vmaxvq_f32(amaxv[0]); + + const float d = amax / ((1 << 7) - 1); + id[row_iter] = d ? 1.0f / d : 0.0f; + + y[i].d[row_iter] = GGML_FP32_TO_FP16(d); + } + + for (int j = 0; j < 8; j++) { + float32x4_t v = vmulq_n_f32(srcv[0][j], id[0]); + int32x4_t vi = vcvtnq_s32_f32(v); + y[i].qs[16 * j + 0] = vgetq_lane_s32(vi, 0); + y[i].qs[16 * j + 1] = vgetq_lane_s32(vi, 1); + y[i].qs[16 * j + 2] = vgetq_lane_s32(vi, 2); + y[i].qs[16 * j + 3] = vgetq_lane_s32(vi, 3); + + v = vmulq_n_f32(srcv[1][j], id[1]); + vi = vcvtnq_s32_f32(v); + y[i].qs[16 * j + 4] = vgetq_lane_s32(vi, 0); + y[i].qs[16 * j + 5] = vgetq_lane_s32(vi, 1); + y[i].qs[16 * j + 6] = vgetq_lane_s32(vi, 2); + y[i].qs[16 * j + 7] = vgetq_lane_s32(vi, 3); + + v = vmulq_n_f32(srcv[2][j], id[2]); + vi = vcvtnq_s32_f32(v); + y[i].qs[16 * j + 8] = vgetq_lane_s32(vi, 0); + y[i].qs[16 * j + 9] = vgetq_lane_s32(vi, 1); + y[i].qs[16 * j + 10] = vgetq_lane_s32(vi, 2); + y[i].qs[16 * j + 11] = vgetq_lane_s32(vi, 3); + + v = vmulq_n_f32(srcv[3][j], id[3]); + vi = vcvtnq_s32_f32(v); + y[i].qs[16 * j + 12] = vgetq_lane_s32(vi, 0); + y[i].qs[16 * j + 13] = vgetq_lane_s32(vi, 1); + y[i].qs[16 * j + 14] = vgetq_lane_s32(vi, 2); + y[i].qs[16 * j + 15] = vgetq_lane_s32(vi, 3); + } + } +#else + // scalar + const int blck_size_interleave = 4; + float srcv[4][QK8_0]; + float id[4]; + + for (int i = 0; i < nb; i++) { + for (int row_iter = 0; row_iter < 4; row_iter++) { + float amax = 0.0f; // absolute max + + for (int j = 0; j < QK8_0; j++) { + srcv[row_iter][j] = x[row_iter * k + i * QK8_0 + j]; + amax = MAX(amax, fabsf(srcv[row_iter][j])); + } + + const float d = amax / ((1 << 7) - 1); + id[row_iter] = d ? 1.0f / d : 0.0f; + + y[i].d[row_iter] = GGML_FP32_TO_FP16(d); + } + + for (int j = 0; j < QK8_0 * 4; j++) { + int src_offset = (j / (4 * blck_size_interleave)) * blck_size_interleave; + int src_id = (j % (4 * blck_size_interleave)) / blck_size_interleave; + src_offset += (j % blck_size_interleave); + + float x0 = srcv[src_id][src_offset] * id[src_id]; + y[i].qs[j] = roundf(x0); + } + } +#endif +} + +void quantize_q8_0_4x8(const float * restrict x, void * restrict vy, int64_t k) { + assert(QK8_0 == 32); + assert(k % QK8_0 == 0); + const int nb = k / QK8_0; + + block_q8_0x4 * restrict y = (block_q8_0x4 *) vy; + +#if defined(__ARM_NEON) + float32x4_t srcv[4][8]; + float id[4]; + + for (int i = 0; i < nb; i++) { + float32x4_t asrcv[8]; + float32x4_t amaxv[8]; + + for (int row_iter = 0; row_iter < 4; row_iter++) { + for (int j = 0; j < 8; j++) srcv[row_iter][j] = vld1q_f32(x + row_iter * k + i * 32 + 4 * j); + for (int j = 0; j < 8; j++) asrcv[j] = vabsq_f32(srcv[row_iter][j]); + + for (int j = 0; j < 4; j++) amaxv[2 * j] = vmaxq_f32(asrcv[2 * j], asrcv[2 * j + 1]); + for (int j = 0; j < 2; j++) amaxv[4 * j] = vmaxq_f32(amaxv[4 * j], amaxv[4 * j + 2]); + for (int j = 0; j < 1; j++) amaxv[8 * j] = vmaxq_f32(amaxv[8 * j], amaxv[8 * j + 4]); + + const float amax = vmaxvq_f32(amaxv[0]); + + const float d = amax / ((1 << 7) - 1); + id[row_iter] = d ? 1.0f / d : 0.0f; + + y[i].d[row_iter] = GGML_FP32_TO_FP16(d); + } + + for (int j = 0; j < 4; j++) { + float32x4_t v = vmulq_n_f32(srcv[0][2 * j], id[0]); + int32x4_t vi = vcvtnq_s32_f32(v); + y[i].qs[32 * j + 0] = vgetq_lane_s32(vi, 0); + y[i].qs[32 * j + 1] = vgetq_lane_s32(vi, 1); + y[i].qs[32 * j + 2] = vgetq_lane_s32(vi, 2); + y[i].qs[32 * j + 3] = vgetq_lane_s32(vi, 3); + v = vmulq_n_f32(srcv[0][2 * j + 1], id[0]); + vi = vcvtnq_s32_f32(v); + y[i].qs[32 * j + 4] = vgetq_lane_s32(vi, 0); + y[i].qs[32 * j + 5] = vgetq_lane_s32(vi, 1); + y[i].qs[32 * j + 6] = vgetq_lane_s32(vi, 2); + y[i].qs[32 * j + 7] = vgetq_lane_s32(vi, 3); + + v = vmulq_n_f32(srcv[1][2 * j], id[1]); + vi = vcvtnq_s32_f32(v); + y[i].qs[32 * j + 8] = vgetq_lane_s32(vi, 0); + y[i].qs[32 * j + 9] = vgetq_lane_s32(vi, 1); + y[i].qs[32 * j + 10] = vgetq_lane_s32(vi, 2); + y[i].qs[32 * j + 11] = vgetq_lane_s32(vi, 3); + v = vmulq_n_f32(srcv[1][2 * j + 1], id[1]); + vi = vcvtnq_s32_f32(v); + y[i].qs[32 * j + 12] = vgetq_lane_s32(vi, 0); + y[i].qs[32 * j + 13] = vgetq_lane_s32(vi, 1); + y[i].qs[32 * j + 14] = vgetq_lane_s32(vi, 2); + y[i].qs[32 * j + 15] = vgetq_lane_s32(vi, 3); + + v = vmulq_n_f32(srcv[2][2 * j], id[2]); + vi = vcvtnq_s32_f32(v); + y[i].qs[32 * j + 16] = vgetq_lane_s32(vi, 0); + y[i].qs[32 * j + 17] = vgetq_lane_s32(vi, 1); + y[i].qs[32 * j + 18] = vgetq_lane_s32(vi, 2); + y[i].qs[32 * j + 19] = vgetq_lane_s32(vi, 3); + v = vmulq_n_f32(srcv[2][2 * j + 1], id[2]); + vi = vcvtnq_s32_f32(v); + y[i].qs[32 * j + 20] = vgetq_lane_s32(vi, 0); + y[i].qs[32 * j + 21] = vgetq_lane_s32(vi, 1); + y[i].qs[32 * j + 22] = vgetq_lane_s32(vi, 2); + y[i].qs[32 * j + 23] = vgetq_lane_s32(vi, 3); + + v = vmulq_n_f32(srcv[3][2 * j], id[3]); + vi = vcvtnq_s32_f32(v); + y[i].qs[32 * j + 24] = vgetq_lane_s32(vi, 0); + y[i].qs[32 * j + 25] = vgetq_lane_s32(vi, 1); + y[i].qs[32 * j + 26] = vgetq_lane_s32(vi, 2); + y[i].qs[32 * j + 27] = vgetq_lane_s32(vi, 3); + v = vmulq_n_f32(srcv[3][2 * j + 1], id[3]); + vi = vcvtnq_s32_f32(v); + y[i].qs[32 * j + 28] = vgetq_lane_s32(vi, 0); + y[i].qs[32 * j + 29] = vgetq_lane_s32(vi, 1); + y[i].qs[32 * j + 30] = vgetq_lane_s32(vi, 2); + y[i].qs[32 * j + 31] = vgetq_lane_s32(vi, 3); + } + } +#elif defined(__AVX2__) || defined(__AVX__) + float id[4]; + __m256 srcv[4][4]; + __m256 idvec[4]; + + for (int i = 0; i < nb; i++) { + for (int row_iter = 0; row_iter < 4; row_iter++) { + // Load elements into 4 AVX vectors + __m256 v0 = _mm256_loadu_ps( x + row_iter * k + i * 32 ); + __m256 v1 = _mm256_loadu_ps( x + row_iter * k + i * 32 + 8 ); + __m256 v2 = _mm256_loadu_ps( x + row_iter * k + i * 32 + 16 ); + __m256 v3 = _mm256_loadu_ps( x + row_iter * k + i * 32 + 24 ); + + // Compute max(abs(e)) for the block + const __m256 signBit = _mm256_set1_ps( -0.0f ); + __m256 maxAbs = _mm256_andnot_ps( signBit, v0 ); + maxAbs = _mm256_max_ps( maxAbs, _mm256_andnot_ps( signBit, v1 ) ); + maxAbs = _mm256_max_ps( maxAbs, _mm256_andnot_ps( signBit, v2 ) ); + maxAbs = _mm256_max_ps( maxAbs, _mm256_andnot_ps( signBit, v3 ) ); + + __m128 max4 = _mm_max_ps( _mm256_extractf128_ps( maxAbs, 1 ), _mm256_castps256_ps128( maxAbs ) ); + max4 = _mm_max_ps( max4, _mm_movehl_ps( max4, max4 ) ); + max4 = _mm_max_ss( max4, _mm_movehdup_ps( max4 ) ); + const float maxScalar = _mm_cvtss_f32( max4 ); + + // Divided by 127.f to mirror results in quantize_row_q8_0 + const float d = maxScalar / 127.f; + id[row_iter] = ( maxScalar != 0.0f ) ? 127.f / maxScalar : 0.0f; //d ? 1.0f / d : 0.0f; + + // Store the scale for the individual block + y[i].d[row_iter] = GGML_FP32_TO_FP16(d); + + // Store the values in blocks of eight values - Aim is to use these later for block interleaving + srcv[row_iter][0] = v0; + srcv[row_iter][1] = v1; + srcv[row_iter][2] = v2; + srcv[row_iter][3] = v3; + idvec[row_iter] = _mm256_set1_ps(id[row_iter]); + } + + // The loop iterates four times - The aim is to get 4 corresponding chunks of eight bytes from the original weight blocks that are interleaved + for (int j = 0; j < 4; j++) { + // Apply the multiplier + __m256 v0 = _mm256_mul_ps(srcv[0][j], idvec[0]); + __m256 v1 = _mm256_mul_ps(srcv[1][j], idvec[1]); + __m256 v2 = _mm256_mul_ps(srcv[2][j], idvec[2]); + __m256 v3 = _mm256_mul_ps(srcv[3][j], idvec[3]); + + // Round to nearest integer + v0 = _mm256_round_ps( v0, _MM_ROUND_NEAREST ); + v1 = _mm256_round_ps( v1, _MM_ROUND_NEAREST ); + v2 = _mm256_round_ps( v2, _MM_ROUND_NEAREST ); + v3 = _mm256_round_ps( v3, _MM_ROUND_NEAREST ); + + // Convert floats to integers + __m256i i0 = _mm256_cvtps_epi32( v0 ); + __m256i i1 = _mm256_cvtps_epi32( v1 ); + __m256i i2 = _mm256_cvtps_epi32( v2 ); + __m256i i3 = _mm256_cvtps_epi32( v3 ); + +#if defined(__AVX2__) + // Convert int32 to int16 + i0 = _mm256_packs_epi32( i0, i1 ); + i2 = _mm256_packs_epi32( i2, i3 ); + // Convert int16 to int8 + i0 = _mm256_packs_epi16( i0, i2 ); + + // Permute and store the quantized weights in the required order after the pack instruction + const __m256i perm = _mm256_setr_epi32( 0, 4, 1, 5, 2, 6, 3, 7 ); + i0 = _mm256_permutevar8x32_epi32( i0, perm ); + + _mm256_storeu_si256((__m256i *)(y[i].qs + 32 * j), i0); +#else + // Since we don't have in AVX some necessary functions, + // we split the registers in half and call AVX2 analogs from SSE + __m128i ni0 = _mm256_castsi256_si128( i0 ); + __m128i ni1 = _mm256_extractf128_si256( i0, 1); + __m128i ni2 = _mm256_castsi256_si128( i1 ); + __m128i ni3 = _mm256_extractf128_si256( i1, 1); + __m128i ni4 = _mm256_castsi256_si128( i2 ); + __m128i ni5 = _mm256_extractf128_si256( i2, 1); + __m128i ni6 = _mm256_castsi256_si128( i3 ); + __m128i ni7 = _mm256_extractf128_si256( i3, 1); + + // Convert int32 to int16 + ni0 = _mm_packs_epi32( ni0, ni1 ); + ni2 = _mm_packs_epi32( ni2, ni3 ); + ni4 = _mm_packs_epi32( ni4, ni5 ); + ni6 = _mm_packs_epi32( ni6, ni7 ); + // Convert int16 to int8 + ni0 = _mm_packs_epi16( ni0, ni2 ); + ni4 = _mm_packs_epi16( ni4, ni6 ); + _mm_storeu_si128((__m128i *)(y[i].qs + 32 * j), ni0); + _mm_storeu_si128((__m128i *)(y[i].qs + 32 * j + 16), ni4); +#endif + } + } +#else + // scalar + const int blck_size_interleave = 8; + float srcv[4][QK8_0]; + float id[4]; + + for (int i = 0; i < nb; i++) { + for (int row_iter = 0; row_iter < 4; row_iter++) { + float amax = 0.0f; // absolute max + + for (int j = 0; j < QK8_0; j++) { + srcv[row_iter][j] = x[row_iter * k + i * QK8_0 + j]; + amax = MAX(amax, fabsf(srcv[row_iter][j])); + } + + const float d = amax / ((1 << 7) - 1); + id[row_iter] = d ? 1.0f / d : 0.0f; + + y[i].d[row_iter] = GGML_FP32_TO_FP16(d); + } + + for (int j = 0; j < QK8_0 * 4; j++) { + int src_offset = (j / (4 * blck_size_interleave)) * blck_size_interleave; + int src_id = (j % (4 * blck_size_interleave)) / blck_size_interleave; + src_offset += (j % blck_size_interleave); + + float x0 = srcv[src_id][src_offset] * id[src_id]; + y[i].qs[j] = roundf(x0); + } + } +#endif +} + +void quantize_mat_q8_0(const float * restrict x, void * restrict vy, int64_t nrow, int64_t n_per_row, int64_t blck_size_interleave) { + assert(nrow == 4); + UNUSED(nrow); + if (blck_size_interleave == 4) { + quantize_q8_0_4x4(x, vy, n_per_row); + } else if (blck_size_interleave == 8) { + quantize_q8_0_4x8(x, vy, n_per_row); + } else { + assert(false); + } +} + +static size_t quantize_q4_0_nr_bl(const float * restrict src, void * restrict dst, int64_t nrow, int64_t n_per_row, int nrows_interleaved, int blck_size_interleave) { + assert(n_per_row % QK4_0 == 0); + const int nb = n_per_row / QK4_0; + + void * out_ptr = NULL; + if (nrows_interleaved == 8) { + out_ptr = (block_q4_0x8 *) dst; + } + else if (nrows_interleaved == 4) { + out_ptr = (block_q4_0x4 *) dst; + } + assert(nrows_interleaved <= 8); + block_q4_0 dst_tmp[8]; + + for (int b = 0; b < (nrow * n_per_row); b += nrows_interleaved * n_per_row) { + + for (int64_t x = 0; x < nb; x++) { + + for (int i = 0; i < nrows_interleaved; i++ ) { + quantize_row_q4_0_ref(src + b + i * n_per_row + x * QK4_0, (block_q4_0 *) dst_tmp + i, QK4_0); + } + + if (nrows_interleaved == 8) { + *(block_q4_0x8 *) out_ptr = make_block_q4_0x8(dst_tmp, blck_size_interleave, 0x88); + out_ptr = (block_q4_0x8 *) out_ptr + 1; + } + else if (nrows_interleaved == 4) { + *(block_q4_0x4 *) out_ptr = make_block_q4_0x4(dst_tmp, blck_size_interleave, 0x88); + out_ptr = (block_q4_0x4 *) out_ptr + 1; + } + } + } + + return ((nrow * n_per_row) / QK4_0 * sizeof(block_q4_0)); +} + +size_t quantize_q4_0_4x4(const float * restrict src, void * restrict dst, int64_t nrow, int64_t n_per_row, const float * quant_weights) { + UNUSED(quant_weights); + return quantize_q4_0_nr_bl(src, dst, nrow, n_per_row, 4, 4); +} + +size_t quantize_q4_0_4x8(const float * restrict src, void * restrict dst, int64_t nrow, int64_t n_per_row, const float * quant_weights) { + UNUSED(quant_weights); + return quantize_q4_0_nr_bl(src, dst, nrow, n_per_row, 4, 8); +} + +size_t quantize_q4_0_8x8(const float * restrict src, void * restrict dst, int64_t nrow, int64_t n_per_row, const float * quant_weights) { + UNUSED(quant_weights); + return quantize_q4_0_nr_bl(src, dst, nrow, n_per_row, 8, 8); +} + +void ggml_gemv_q4_0_4x4_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, const void * restrict vy, int nr, int nc) { + const int qk = QK8_0; + const int nb = n / qk; + const int ncols_interleaved = 4; + const int blocklen = 4; + + assert (n % qk == 0); + assert (nc % ncols_interleaved == 0); + + UNUSED(s); + UNUSED(bs); + UNUSED(vx); + UNUSED(vy); + UNUSED(nr); + UNUSED(nc); + UNUSED(nb); + UNUSED(ncols_interleaved); + UNUSED(blocklen); + +#if ! ((defined(_MSC_VER)) && ! defined(__clang__)) && defined(__aarch64__) && defined(__ARM_NEON) + if (ggml_cpu_has_neon()) { + const void * b_ptr = vx; + const void * a_ptr = vy; + float * res_ptr = s; + + __asm__ __volatile__( + "movi v31.16b, #0x4\n" + "movi v30.16b, #0xf0\n" + "add %x[b_ptr], %x[b_ptr], #0x8\n" + "1:" // Column loop + "add x22, %x[a_ptr], #0x2\n" + "movi v29.16b, #0x0\n" + "mov x21, %x[nb]\n" + "2:" // Block loop + "ldr q28, [%x[b_ptr], #0x0]\n" + "ldr q27, [x22, #0x0]\n" + "movi v26.4s, #0x0\n" + "sub x20, x22, #0x2\n" + "ldr q25, [x22, #0x10]\n" + "ldr q24, [%x[b_ptr], #0x10]\n" + "sub x21, x21, #0x1\n" + "add x22, x22, #0x22\n" + "ldr q23, [%x[b_ptr], #0x20]\n" + "ldr q22, [%x[b_ptr], #0x30]\n" + "ld1r { v21.8h }, [x20]\n" + "ldr q20, [%x[b_ptr], #-0x8]\n" + "sshl v16.16b, v28.16b, v31.16b\n" + "and v28.16b, v28.16b, v30.16b\n" + "sshl v19.16b, v24.16b, v31.16b\n" + "and v24.16b, v24.16b, v30.16b\n" + "add %x[b_ptr], %x[b_ptr], #0x48\n" + "sshl v18.16b, v23.16b, v31.16b\n" + "and v23.16b, v23.16b, v30.16b\n" + ".inst 0x4f9be21a // sdot v26.4s, v16.16b, v27.4b[0]\n" + "sshl v17.16b, v22.16b, v31.16b\n" + "and v22.16b, v22.16b, v30.16b\n" + "fcvtl v21.4s, v21.4h\n" + "fcvtl v16.4s, v20.4h\n" + ".inst 0x4f99e39a // sdot v26.4s, v28.16b, v25.4b[0]\n" + "fmul v16.4s, v16.4s, v21.4s\n" + ".inst 0x4fbbe27a // sdot v26.4s, v19.16b, v27.4b[1]\n" + ".inst 0x4fb9e31a // sdot v26.4s, v24.16b, v25.4b[1]\n" + ".inst 0x4f9bea5a // sdot v26.4s, v18.16b, v27.4b[2]\n" + ".inst 0x4f99eafa // sdot v26.4s, v23.16b, v25.4b[2]\n" + ".inst 0x4fbbea3a // sdot v26.4s, v17.16b, v27.4b[3]\n" + ".inst 0x4fb9eada // sdot v26.4s, v22.16b, v25.4b[3]\n" + "scvtf v26.4s, v26.4s, #0x4\n" + "fmla v29.4s, v26.4s, v16.4s\n" + "cbnz x21, 2b\n" + "sub %x[nc], %x[nc], #0x4\n" + "str q29, [%x[res_ptr], #0x0]\n" + "add %x[res_ptr], %x[res_ptr], #0x10\n" + "cbnz %x[nc], 1b\n" + : [b_ptr] "+&r" (b_ptr), [res_ptr] "+&r" (res_ptr), [nc] "+&r" (nc) + : [a_ptr] "r" (a_ptr), [nb] "r" (nb) + : "memory", "v16", "v17", "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31", "x20", "x21", "x22" + ); + return; + } +#endif // #if ! ((defined(_MSC_VER)) && ! defined(__clang__)) && defined(__aarch64__) && defined(__ARM_NEON) + float sumf[4]; + int sumi; + + const block_q8_0 * a_ptr = (const block_q8_0 *) vy; + for (int x = 0; x < nc / ncols_interleaved; x++) { + const block_q4_0x4 * b_ptr = (const block_q4_0x4 *) vx + (x * nb); + + for (int j = 0; j < ncols_interleaved; j++) sumf[j] = 0.0; + for (int l = 0; l < nb; l++) { + for (int k = 0; k < (qk / (2 * blocklen)); k++) { + for (int j = 0; j < ncols_interleaved; j++) { + sumi = 0; + for (int i = 0; i < blocklen; ++i) { + const int v0 = (int8_t) (b_ptr[l].qs[k * ncols_interleaved * blocklen + j * blocklen + i] << 4); + const int v1 = (int8_t) (b_ptr[l].qs[k * ncols_interleaved * blocklen + j * blocklen + i] & 0xF0); + sumi += ((v0 * a_ptr[l].qs[k * blocklen + i]) + (v1 * a_ptr[l].qs[k * blocklen + i + qk / 2])) >> 4; + } + sumf[j] += sumi * GGML_FP16_TO_FP32(b_ptr[l].d[j]) * GGML_FP16_TO_FP32(a_ptr[l].d); + } + } + } + for (int j = 0; j < ncols_interleaved; j++) s[x * ncols_interleaved + j] = sumf[j]; + } +} + +void ggml_gemv_q4_0_4x8_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, const void * restrict vy, int nr, int nc) { + const int qk = QK8_0; + const int nb = n / qk; + const int ncols_interleaved = 4; + const int blocklen = 8; + + assert (n % qk == 0); + assert (nc % ncols_interleaved == 0); + + UNUSED(s); + UNUSED(bs); + UNUSED(vx); + UNUSED(vy); + UNUSED(nr); + UNUSED(nc); + UNUSED(nb); + UNUSED(ncols_interleaved); + UNUSED(blocklen); + +#if ! ((defined(_MSC_VER)) && ! defined(__clang__)) && defined(__aarch64__) && defined(__ARM_NEON) && defined(__ARM_FEATURE_MATMUL_INT8) + if (ggml_cpu_has_neon() && ggml_cpu_has_matmul_int8()) { + const void * b_ptr = vx; + const void * a_ptr = vy; + float * res_ptr = s; + + __asm__ __volatile__( + "movi v2.16b, #0x4\n" + "movi v1.16b, #0xf0\n" + "add %x[b_ptr], %x[b_ptr], #0x8\n" + "1:" // Column loop + "add x23, %x[a_ptr], #0x2\n" + "movi v0.16b, #0x0\n" + "mov x22, %x[nb]\n" + "2:" // Block loop + "ldr q31, [%x[b_ptr], #0x0]\n" + "ldr q30, [%x[b_ptr], #0x10]\n" + "mov x21, x23\n" + "movi v29.4s, #0x0\n" + "ldr q28, [%x[b_ptr], #0x20]\n" + "ldr q27, [%x[b_ptr], #0x30]\n" + "movi v26.4s, #0x0\n" + "sub x20, x23, #0x2\n" + "ld1r { v25.8h }, [x20]\n" + "ldr q24, [%x[b_ptr], #-0x8]\n" + "sub x22, x22, #0x1\n" + "add x23, x23, #0x22\n" + "ld1r { v23.2d }, [x21], #0x8\n" + "sshl v22.16b, v31.16b, v2.16b\n" + "sshl v16.16b, v30.16b, v2.16b\n" + "add %x[b_ptr], %x[b_ptr], #0x48\n" + "ld1r { v21.2d }, [x21], #0x8\n" + "sshl v20.16b, v28.16b, v2.16b\n" + "sshl v19.16b, v27.16b, v2.16b\n" + "ld1r { v18.2d }, [x21], #0x8\n" + "ld1r { v17.2d }, [x21], #0x8\n" + "and v31.16b, v31.16b, v1.16b\n" + "and v30.16b, v30.16b, v1.16b\n" + ".inst 0x4e9796dd // sdot v29.4s, v22.16b, v23.16b\n" + ".inst 0x4e97961a // sdot v26.4s, v16.16b, v23.16b\n" + "and v28.16b, v28.16b, v1.16b\n" + "and v27.16b, v27.16b, v1.16b\n" + "fcvtl v25.4s, v25.4h\n" + "fcvtl v16.4s, v24.4h\n" + ".inst 0x4e95969d // sdot v29.4s, v20.16b, v21.16b\n" + ".inst 0x4e95967a // sdot v26.4s, v19.16b, v21.16b\n" + "fmul v16.4s, v16.4s, v25.4s\n" + ".inst 0x4e9297fd // sdot v29.4s, v31.16b, v18.16b\n" + ".inst 0x4e9297da // sdot v26.4s, v30.16b, v18.16b\n" + ".inst 0x4e91979d // sdot v29.4s, v28.16b, v17.16b\n" + ".inst 0x4e91977a // sdot v26.4s, v27.16b, v17.16b\n" + "addp v29.4s, v29.4s, v26.4s\n" + "scvtf v29.4s, v29.4s, #0x4\n" + "fmla v0.4s, v29.4s, v16.4s\n" + "cbnz x22, 2b\n" + "sub %x[nc], %x[nc], #0x4\n" + "str q0, [%x[res_ptr], #0x0]\n" + "add %x[res_ptr], %x[res_ptr], #0x10\n" + "cbnz %x[nc], 1b\n" + : [b_ptr] "+&r" (b_ptr), [res_ptr] "+&r" (res_ptr), [nc] "+&r" (nc) + : [a_ptr] "r" (a_ptr), [nb] "r" (nb) + : "memory", "v0", "v1", "v2", "v16", "v17", "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31", "x20", "x21", "x22", "x23" + ); + return; + } +#endif // #if ! ((defined(_MSC_VER)) && ! defined(__clang__)) && defined(__aarch64__) && defined(__ARM_NEON) && defined(__ARM_FEATURE_MATMUL_INT8) + float sumf[4]; + int sumi; + + const block_q8_0 * a_ptr = (const block_q8_0 *) vy; + for (int x = 0; x < nc / ncols_interleaved; x++) { + const block_q4_0x4 * b_ptr = (const block_q4_0x4 *) vx + (x * nb); + + for (int j = 0; j < ncols_interleaved; j++) sumf[j] = 0.0; + for (int l = 0; l < nb; l++) { + for (int k = 0; k < (qk / (2 * blocklen)); k++) { + for (int j = 0; j < ncols_interleaved; j++) { + sumi = 0; + for (int i = 0; i < blocklen; ++i) { + const int v0 = (int8_t) (b_ptr[l].qs[k * ncols_interleaved * blocklen + j * blocklen + i] << 4); + const int v1 = (int8_t) (b_ptr[l].qs[k * ncols_interleaved * blocklen + j * blocklen + i] & 0xF0); + sumi += ((v0 * a_ptr[l].qs[k * blocklen + i]) + (v1 * a_ptr[l].qs[k * blocklen + i + qk / 2])) >> 4; + } + sumf[j] += sumi * GGML_FP16_TO_FP32(b_ptr[l].d[j]) * GGML_FP16_TO_FP32(a_ptr[l].d); + } + } + } + for (int j = 0; j < ncols_interleaved; j++) s[x * ncols_interleaved + j] = sumf[j]; + } +} + +void ggml_gemv_q4_0_8x8_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, const void * restrict vy, int nr, int nc) { + const int qk = QK8_0; + const int nb = n / qk; + const int ncols_interleaved = 8; + const int blocklen = 8; + + assert (n % qk == 0); + assert (nc % ncols_interleaved == 0); + + UNUSED(s); + UNUSED(bs); + UNUSED(vx); + UNUSED(vy); + UNUSED(nr); + UNUSED(nc); + UNUSED(nb); + UNUSED(ncols_interleaved); + UNUSED(blocklen); + +#if ! ((defined(_MSC_VER)) && ! defined(__clang__)) && defined(__aarch64__) +#if defined(__ARM_FEATURE_SVE) + if (ggml_cpu_has_sve() && ggml_cpu_get_sve_cnt() == QK8_0) { + const void * b_ptr = vx; + const void * a_ptr = vy; + float * res_ptr = s; + + __asm__ __volatile__( + "ptrue p0.b\n" + "add %x[b_ptr], %x[b_ptr], #0x10\n" + "1:" // Column loop + "add x22, %x[a_ptr], #0x2\n" + "mov z31.b, #0x0\n" + "mov x21, %x[nb]\n" + "2:" // Block loop + "ld1b { z30.b }, p0/Z, [%x[b_ptr]]\n" + "ld1b { z29.b }, p0/Z, [%x[b_ptr], #1, MUL VL]\n" + "mov z28.s, #0x0\n" + "mov z27.s, #0x0\n" + "ld1rd { z26.d }, p0/Z, [x22]\n" + "ld1b { z25.b }, p0/Z, [%x[b_ptr], #2, MUL VL]\n" + "sub x20, x22, #0x2\n" + "sub x21, x21, #0x1\n" + "ld1b { z24.b }, p0/Z, [%x[b_ptr], #3, MUL VL]\n" + "ld1rd { z23.d }, p0/Z, [x22, #8]\n" + "lsl z22.b, z30.b, #0x4\n" + "lsl z16.b, z29.b, #0x4\n" + "and z30.b, z30.b, #0xf0\n" + "and z29.b, z29.b, #0xf0\n" + "ld1rd { z21.d }, p0/Z, [x22, #16]\n" + "ld1rd { z20.d }, p0/Z, [x22, #24]\n" + "lsl z19.b, z25.b, #0x4\n" + "and z25.b, z25.b, #0xf0\n" + "ld1rh { z17.h }, p0/Z, [x20]\n" + "ld1h { z18.s }, p0/Z, [%x[b_ptr], #-1, MUL VL]\n" + "sdot z28.s, z22.b, z26.b\n" + "sdot z27.s, z16.b, z26.b\n" + "lsl z16.b, z24.b, #0x4\n" + "add x22, x22, #0x22\n" + "and z24.b, z24.b, #0xf0\n" + "add %x[b_ptr], %x[b_ptr], #0x90\n" + "fcvt z17.s, p0/m, z17.h\n" + "fcvt z18.s, p0/m, z18.h\n" + "sdot z28.s, z19.b, z23.b\n" + "sdot z27.s, z16.b, z23.b\n" + "fmul z18.s, z18.s, z17.s\n" + "sdot z28.s, z30.b, z21.b\n" + "sdot z27.s, z29.b, z21.b\n" + "sdot z28.s, z25.b, z20.b\n" + "sdot z27.s, z24.b, z20.b\n" + "uzp1 z17.s, z28.s, z27.s\n" + "uzp2 z16.s, z28.s, z27.s\n" + "add z17.s, z17.s, z16.s\n" + "asr z17.s, z17.s, #0x4\n" + "scvtf z17.s, p0/m, z17.s\n" + "fmla z31.s, p0/M, z17.s, z18.s\n" + "cbnz x21, 2b\n" + "sub %x[nc], %x[nc], #0x8\n" + "st1w { z31.s }, p0, [%x[res_ptr]]\n" + "add %x[res_ptr], %x[res_ptr], #0x20\n" + "cbnz %x[nc], 1b\n" + : [b_ptr] "+&r" (b_ptr), [res_ptr] "+&r" (res_ptr), [nc] "+&r" (nc) + : [a_ptr] "r" (a_ptr), [nb] "r" (nb) + : "memory", "p0", "x20", "x21", "x22", "z16", "z17", "z18", "z19", "z20", "z21", "z22", "z23", "z24", "z25", "z26", "z27", "z28", "z29", "z30", "z31" + ); + return; + } +#endif // #if defined(__ARM_FEATURE_SVE) +#elif defined(__AVX2__) + // Lookup table to convert signed nibbles to signed bytes + __m256i signextendlut = _mm256_castsi128_si256(_mm_set_epi8(-1, -2, -3, -4, -5, -6, -7, -8, 7, 6, 5, 4, 3, 2, 1, 0)); + signextendlut = _mm256_permute2f128_si256(signextendlut, signextendlut, 0); + __m128i changemask = _mm_set_epi8(15, 14, 7, 6, 13, 12, 5, 4, 11, 10, 3, 2, 9, 8, 1, 0); + __m256i finalpermutemask = _mm256_set_epi32(7, 5, 3, 1, 6, 4, 2, 0); + + // Permute mask used for easier vector processing at later stages + const __m256i m4b = _mm256_set1_epi8(0x0F); + + int64_t b_nb = n / QK4_0; + + const block_q4_0x8 * b_ptr_start = (const block_q4_0x8 *)vx; + const block_q8_0 * a_ptr_start = (const block_q8_0 *)vy; + + // Process Q8_0 blocks one by one + for (int64_t y = 0; y < nr; y++) { + + // Pointers to LHS blocks of block_q8_0 format + const block_q8_0 * a_ptr = a_ptr_start + (y * nb); + + // Take group of eight block_q4_0x8 structures at each pass of the loop and perform dot product operation + for (int64_t x = 0; x < nc / 8; x++) { + + // Pointers to RHS blocks + const block_q4_0x8 * b_ptr = b_ptr_start + (x * b_nb); + + // Master FP accumulator + __m256 acc_row = _mm256_setzero_ps(); + + for (int64_t b = 0; b < nb; b++) { + // Load 8 blocks of Q4_0 interleaved as 8 bytes (B0 - B7) + const __m256i rhs_raw_vec_0123_0 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs)); + const __m256i rhs_raw_vec_4567_0 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs) + 1); + const __m256i rhs_raw_vec_0123_1 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs) + 2); + const __m256i rhs_raw_vec_4567_1 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs) + 3); + + // 4-bit -> 8-bit - Sign is maintained + const __m256i rhs_vec_0123_0 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(rhs_raw_vec_0123_0, m4b)); // B0(0-7) B1(0-7) B2(0-7) B3(0-7) + const __m256i rhs_vec_4567_0 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(rhs_raw_vec_4567_0, m4b)); // B4(0-7) B5(0-7) B6(0-7) B7(0-7) + const __m256i rhs_vec_0123_1 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(rhs_raw_vec_0123_1, m4b)); // B0(8-15) B1(8-15) B2(8-15) B3(8-15) + const __m256i rhs_vec_4567_1 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(rhs_raw_vec_4567_1, m4b)); // B0(8-15) B1(8-15) B2(8-15) B3(8-15) + + const __m256i rhs_vec_0123_2 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(_mm256_srli_epi16(rhs_raw_vec_0123_0, 4), m4b)); // B0(16-23) B1(16-23) B2(16-23) B3(16-23) + const __m256i rhs_vec_4567_2 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(_mm256_srli_epi16(rhs_raw_vec_4567_0, 4), m4b)); // B4(16-23) B5(16-23) B6(16-23) B7(16-23) + const __m256i rhs_vec_0123_3 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(_mm256_srli_epi16(rhs_raw_vec_0123_1, 4), m4b)); // B0(24-31) B1(24-31) B2(24-31) B3(24-31) + const __m256i rhs_vec_4567_3 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(_mm256_srli_epi16(rhs_raw_vec_4567_1, 4), m4b)); // B4(24-31) B5(24-31) B6(24-31) B7(24-31) + + // Load the scale values for the 8 blocks interleaved in block_q4_0x8 + const __m256 col_scale_f32 = GGML_F32Cx8_REARRANGE_LOAD(b_ptr[b].d, changemask); + + // Load and convert to FP32 scale from block_q8_0 + const __m256 row_scale_f32 = _mm256_set1_ps(GGML_FP16_TO_FP32(a_ptr[b].d)); + + // Load the block values in block_q8_0 in batches of 16 bytes and replicate the same across 256 bit vector + __m256i lhs_vec_0 = _mm256_castsi128_si256(_mm_loadu_si128((const __m128i *)a_ptr[b].qs)); + __m256i lhs_vec_1 = _mm256_castsi128_si256(_mm_loadu_si128((const __m128i *)(a_ptr[b].qs + 16))); + + lhs_vec_0 = _mm256_permute2f128_si256(lhs_vec_0, lhs_vec_0, 0); // A0 (0-15) A0(0-15) + lhs_vec_1 = _mm256_permute2f128_si256(lhs_vec_1, lhs_vec_1, 0); // A0 (16-31) A0(16-31)) + + __m256i iacc = _mm256_setzero_si256(); + + // Dot product done within 32 bit lanes and accumulated in the same vector + // B0(0-3) B4(0-3) B1(0-3) B5(0-3) B2(0-3) B6(0-3) B3(0-3) B7(0-3) with A0(0-3) + // B0(4-7) B4(4-7) B1(4-7) B5(4-7) B2(4-7) B6(4-7) B3(4-7) B7(4-7) with A0(4-7) + // ........................................................................... + // B0(28-31) B4(28-31) B1(28-31) B5(28-31) B2(28-31) B6(28-31) B3(28-31) B7(28-31) with A0(28-31) + + iacc = _mm256_add_epi32(iacc, mul_sum_i8_pairs_int32x8(_mm256_blend_epi32(rhs_vec_0123_0 ,_mm256_shuffle_epi32(rhs_vec_4567_0, 177), 170), _mm256_shuffle_epi32(lhs_vec_0, 0))); + iacc = _mm256_add_epi32(iacc, mul_sum_i8_pairs_int32x8(_mm256_blend_epi32(_mm256_shuffle_epi32(rhs_vec_0123_0, 177) ,rhs_vec_4567_0, 170), _mm256_shuffle_epi32(lhs_vec_0, 85))); + + iacc = _mm256_add_epi32(iacc, mul_sum_i8_pairs_int32x8(_mm256_blend_epi32(rhs_vec_0123_1 ,_mm256_shuffle_epi32(rhs_vec_4567_1, 177), 170), _mm256_shuffle_epi32(lhs_vec_0, 170))); + iacc = _mm256_add_epi32(iacc, mul_sum_i8_pairs_int32x8(_mm256_blend_epi32(_mm256_shuffle_epi32(rhs_vec_0123_1, 177) ,rhs_vec_4567_1, 170), _mm256_shuffle_epi32(lhs_vec_0, 255))); + + iacc = _mm256_add_epi32(iacc, mul_sum_i8_pairs_int32x8(_mm256_blend_epi32(rhs_vec_0123_2 ,_mm256_shuffle_epi32(rhs_vec_4567_2, 177), 170), _mm256_shuffle_epi32(lhs_vec_1, 0))); + iacc = _mm256_add_epi32(iacc, mul_sum_i8_pairs_int32x8(_mm256_blend_epi32(_mm256_shuffle_epi32(rhs_vec_0123_2, 177) ,rhs_vec_4567_2, 170), _mm256_shuffle_epi32(lhs_vec_1, 85))); + + iacc = _mm256_add_epi32(iacc, mul_sum_i8_pairs_int32x8(_mm256_blend_epi32(rhs_vec_0123_3 ,_mm256_shuffle_epi32(rhs_vec_4567_3, 177), 170), _mm256_shuffle_epi32(lhs_vec_1, 170))); + iacc = _mm256_add_epi32(iacc, mul_sum_i8_pairs_int32x8(_mm256_blend_epi32(_mm256_shuffle_epi32(rhs_vec_0123_3, 177) ,rhs_vec_4567_3, 170), _mm256_shuffle_epi32(lhs_vec_1, 255))); + + // Accumulated values multipled with appropriate scales + acc_row = _mm256_fmadd_ps(_mm256_cvtepi32_ps(iacc), _mm256_mul_ps(col_scale_f32, row_scale_f32), acc_row); + } + + // Accumulated output values permuted so as to be stored in appropriate order post accumulation + acc_row = _mm256_permutevar8x32_ps(acc_row, finalpermutemask); + _mm256_storeu_ps(s + (y * nr + x * 8), acc_row); + } + } + return; +#endif // #if ! ((defined(_MSC_VER)) && ! defined(__clang__)) && defined(__aarch64__) + { + float sumf[8]; + int sumi; + + const block_q8_0 * a_ptr = (const block_q8_0 *) vy; + for (int x = 0; x < nc / ncols_interleaved; x++) { + const block_q4_0x8 * b_ptr = (const block_q4_0x8 *) vx + (x * nb); + + for (int j = 0; j < ncols_interleaved; j++) sumf[j] = 0.0; + for (int l = 0; l < nb; l++) { + for (int k = 0; k < (qk / (2 * blocklen)); k++) { + for (int j = 0; j < ncols_interleaved; j++) { + sumi = 0; + for (int i = 0; i < blocklen; ++i) { + const int v0 = (int8_t) (b_ptr[l].qs[k * ncols_interleaved * blocklen + j * blocklen + i] << 4); + const int v1 = (int8_t) (b_ptr[l].qs[k * ncols_interleaved * blocklen + j * blocklen + i] & 0xF0); + sumi += ((v0 * a_ptr[l].qs[k * blocklen + i]) + (v1 * a_ptr[l].qs[k * blocklen + i + qk / 2])) >> 4; + } + sumf[j] += sumi * GGML_FP16_TO_FP32(b_ptr[l].d[j]) * GGML_FP16_TO_FP32(a_ptr[l].d); + } + } + } + for (int j = 0; j < ncols_interleaved; j++) s[x * ncols_interleaved + j] = sumf[j]; + } + } +} + +void ggml_gemm_q4_0_4x4_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, const void * restrict vy, int nr, int nc) { + const int qk = QK8_0; + const int nb = n / qk; + const int ncols_interleaved = 4; + const int blocklen = 4; + + assert (n % qk == 0); + assert (nr % 4 == 0); + assert (nc % ncols_interleaved == 0); + + UNUSED(s); + UNUSED(bs); + UNUSED(vx); + UNUSED(vy); + UNUSED(nr); + UNUSED(nc); + UNUSED(nb); + UNUSED(ncols_interleaved); + UNUSED(blocklen); + +#if ! ((defined(_MSC_VER)) && ! defined(__clang__)) && defined(__aarch64__) && defined(__ARM_NEON) + if (ggml_cpu_has_neon()) { + const void * b_ptr = vx; + const void * a_ptr = vy; + float * res_ptr = s; + size_t res_stride = bs * sizeof(float); + + __asm__ __volatile__( + "mov x10, %x[nr]\n" + "mov x9, #0x88\n" + "cmp x10, #0x10\n" + "mul x9, %x[nb], x9\n" + "blt 4f\n" + "1:" // Row loop + "add x28, %x[b_ptr], #0x8\n" + "mov x27, %x[nc]\n" + "add x26, %x[res_ptr], %x[res_stride], LSL #4\n" + "2:" // Column loop + "add x25, %x[a_ptr], #0x8\n" + "movi v15.16b, #0x0\n" + "movi v19.16b, #0x0\n" + "mov x24, %x[nb]\n" + "add x23, x25, x9\n" + "movi v18.16b, #0x0\n" + "movi v14.16b, #0x0\n" + "add x22, x23, x9\n" + "movi v11.16b, #0x0\n" + "movi v13.16b, #0x0\n" + "add x21, x22, x9\n" + "movi v23.16b, #0x0\n" + "movi v16.16b, #0x0\n" + "movi v25.16b, #0x0\n" + "movi v7.16b, #0x0\n" + "movi v0.16b, #0x0\n" + "movi v4.16b, #0x0\n" + "movi v5.16b, #0x0\n" + "movi v21.16b, #0x0\n" + "movi v8.16b, #0x0\n" + "movi v1.16b, #0x0\n" + "3:" // Block loop + "ldr q3, [x28, #0x0]\n" + "ldr q31, [x25, #0x0]\n" + "movi v28.16b, #0x4\n" + "movi v10.4s, #0x0\n" + "ldr q22, [x28, #0x10]\n" + "ldr q6, [x25, #0x10]\n" + "movi v29.4s, #0x0\n" + "movi v9.4s, #0x0\n" + "ldr q27, [x28, #0x20]\n" + "ldr q30, [x28, #0x30]\n" + "movi v20.4s, #0x0\n" + "movi v24.16b, #0xf0\n" + "ldr d2, [x25, #-0x8]\n" + "ldr d26, [x23, #-0x8]\n" + "sshl v12.16b, v3.16b, v28.16b\n" + "sub x20, x28, #0x8\n" + "ldr d17, [x20, #0x0]\n" + "and v3.16b, v3.16b, v24.16b\n" + "subs x24, x24, #0x1\n" + "add x28, x28, #0x48\n" + ".inst 0x4f9fe18a // sdot v10.4s, v12.16b, v31.4b[0]\n" + ".inst 0x4fbfe19d // sdot v29.4s, v12.16b, v31.4b[1]\n" + ".inst 0x4f9fe989 // sdot v9.4s, v12.16b, v31.4b[2]\n" + ".inst 0x4fbfe994 // sdot v20.4s, v12.16b, v31.4b[3]\n" + "sshl v31.16b, v22.16b, v28.16b\n" + "and v22.16b, v22.16b, v24.16b\n" + "fcvtl v17.4s, v17.4h\n" + "fcvtl v2.4s, v2.4h\n" + "fcvtl v26.4s, v26.4h\n" + ".inst 0x4f86e3ea // sdot v10.4s, v31.16b, v6.4b[0]\n" + ".inst 0x4fa6e3fd // sdot v29.4s, v31.16b, v6.4b[1]\n" + ".inst 0x4f86ebe9 // sdot v9.4s, v31.16b, v6.4b[2]\n" + ".inst 0x4fa6ebf4 // sdot v20.4s, v31.16b, v6.4b[3]\n" + "sshl v6.16b, v27.16b, v28.16b\n" + "sshl v28.16b, v30.16b, v28.16b\n" + "and v27.16b, v27.16b, v24.16b\n" + "and v30.16b, v30.16b, v24.16b\n" + "ldr q24, [x25, #0x20]\n" + ".inst 0x4f98e0ca // sdot v10.4s, v6.16b, v24.4b[0]\n" + ".inst 0x4fb8e0dd // sdot v29.4s, v6.16b, v24.4b[1]\n" + ".inst 0x4f98e8c9 // sdot v9.4s, v6.16b, v24.4b[2]\n" + ".inst 0x4fb8e8d4 // sdot v20.4s, v6.16b, v24.4b[3]\n" + "ldr q24, [x25, #0x30]\n" + ".inst 0x4f98e38a // sdot v10.4s, v28.16b, v24.4b[0]\n" + ".inst 0x4fb8e39d // sdot v29.4s, v28.16b, v24.4b[1]\n" + ".inst 0x4f98eb89 // sdot v9.4s, v28.16b, v24.4b[2]\n" + ".inst 0x4fb8eb94 // sdot v20.4s, v28.16b, v24.4b[3]\n" + "ldr q24, [x25, #0x40]\n" + ".inst 0x4f98e06a // sdot v10.4s, v3.16b, v24.4b[0]\n" + ".inst 0x4fb8e07d // sdot v29.4s, v3.16b, v24.4b[1]\n" + ".inst 0x4f98e869 // sdot v9.4s, v3.16b, v24.4b[2]\n" + ".inst 0x4fb8e874 // sdot v20.4s, v3.16b, v24.4b[3]\n" + "ldr q24, [x25, #0x50]\n" + ".inst 0x4f98e2ca // sdot v10.4s, v22.16b, v24.4b[0]\n" + ".inst 0x4fb8e2dd // sdot v29.4s, v22.16b, v24.4b[1]\n" + ".inst 0x4f98eac9 // sdot v9.4s, v22.16b, v24.4b[2]\n" + ".inst 0x4fb8ead4 // sdot v20.4s, v22.16b, v24.4b[3]\n" + "ldr q24, [x25, #0x60]\n" + ".inst 0x4f98e36a // sdot v10.4s, v27.16b, v24.4b[0]\n" + ".inst 0x4fb8e37d // sdot v29.4s, v27.16b, v24.4b[1]\n" + ".inst 0x4f98eb69 // sdot v9.4s, v27.16b, v24.4b[2]\n" + ".inst 0x4fb8eb74 // sdot v20.4s, v27.16b, v24.4b[3]\n" + "ldr q24, [x25, #0x70]\n" + "add x25, x25, #0x88\n" + ".inst 0x4f98e3ca // sdot v10.4s, v30.16b, v24.4b[0]\n" + ".inst 0x4fb8e3dd // sdot v29.4s, v30.16b, v24.4b[1]\n" + ".inst 0x4f98ebc9 // sdot v9.4s, v30.16b, v24.4b[2]\n" + ".inst 0x4fb8ebd4 // sdot v20.4s, v30.16b, v24.4b[3]\n" + "fmul v24.4s, v17.4s, v2.s[0]\n" + "scvtf v10.4s, v10.4s, #0x4\n" + "scvtf v29.4s, v29.4s, #0x4\n" + "scvtf v9.4s, v9.4s, #0x4\n" + "scvtf v20.4s, v20.4s, #0x4\n" + "fmla v15.4s, v10.4s, v24.4s\n" + "ldr q24, [x23, #0x0]\n" + "fmul v10.4s, v17.4s, v2.s[1]\n" + "fmla v19.4s, v29.4s, v10.4s\n" + "ldr q10, [x23, #0x10]\n" + "fmul v29.4s, v17.4s, v2.s[2]\n" + "fmul v2.4s, v17.4s, v2.s[3]\n" + "fmla v18.4s, v9.4s, v29.4s\n" + "movi v9.4s, #0x0\n" + "movi v29.4s, #0x0\n" + ".inst 0x4f98e189 // sdot v9.4s, v12.16b, v24.4b[0]\n" + ".inst 0x4fb8e19d // sdot v29.4s, v12.16b, v24.4b[1]\n" + "fmla v14.4s, v20.4s, v2.4s\n" + "movi v20.4s, #0x0\n" + "movi v2.4s, #0x0\n" + ".inst 0x4f98e994 // sdot v20.4s, v12.16b, v24.4b[2]\n" + ".inst 0x4fb8e982 // sdot v2.4s, v12.16b, v24.4b[3]\n" + "ldr q24, [x23, #0x20]\n" + ".inst 0x4f8ae3e9 // sdot v9.4s, v31.16b, v10.4b[0]\n" + ".inst 0x4faae3fd // sdot v29.4s, v31.16b, v10.4b[1]\n" + ".inst 0x4f8aebf4 // sdot v20.4s, v31.16b, v10.4b[2]\n" + ".inst 0x4faaebe2 // sdot v2.4s, v31.16b, v10.4b[3]\n" + "ldr q10, [x23, #0x30]\n" + ".inst 0x4f98e0c9 // sdot v9.4s, v6.16b, v24.4b[0]\n" + ".inst 0x4fb8e0dd // sdot v29.4s, v6.16b, v24.4b[1]\n" + ".inst 0x4f98e8d4 // sdot v20.4s, v6.16b, v24.4b[2]\n" + ".inst 0x4fb8e8c2 // sdot v2.4s, v6.16b, v24.4b[3]\n" + "ldr q24, [x23, #0x40]\n" + ".inst 0x4f8ae389 // sdot v9.4s, v28.16b, v10.4b[0]\n" + ".inst 0x4faae39d // sdot v29.4s, v28.16b, v10.4b[1]\n" + ".inst 0x4f8aeb94 // sdot v20.4s, v28.16b, v10.4b[2]\n" + ".inst 0x4faaeb82 // sdot v2.4s, v28.16b, v10.4b[3]\n" + "ldr q10, [x23, #0x50]\n" + ".inst 0x4f98e069 // sdot v9.4s, v3.16b, v24.4b[0]\n" + ".inst 0x4fb8e07d // sdot v29.4s, v3.16b, v24.4b[1]\n" + ".inst 0x4f98e874 // sdot v20.4s, v3.16b, v24.4b[2]\n" + ".inst 0x4fb8e862 // sdot v2.4s, v3.16b, v24.4b[3]\n" + "ldr q24, [x23, #0x60]\n" + ".inst 0x4f8ae2c9 // sdot v9.4s, v22.16b, v10.4b[0]\n" + ".inst 0x4faae2dd // sdot v29.4s, v22.16b, v10.4b[1]\n" + ".inst 0x4f8aead4 // sdot v20.4s, v22.16b, v10.4b[2]\n" + ".inst 0x4faaeac2 // sdot v2.4s, v22.16b, v10.4b[3]\n" + "ldr q10, [x23, #0x70]\n" + "add x23, x23, #0x88\n" + ".inst 0x4f98e369 // sdot v9.4s, v27.16b, v24.4b[0]\n" + ".inst 0x4fb8e37d // sdot v29.4s, v27.16b, v24.4b[1]\n" + ".inst 0x4f98eb74 // sdot v20.4s, v27.16b, v24.4b[2]\n" + ".inst 0x4fb8eb62 // sdot v2.4s, v27.16b, v24.4b[3]\n" + "ldr q24, [x22, #0x0]\n" + ".inst 0x4f8ae3c9 // sdot v9.4s, v30.16b, v10.4b[0]\n" + ".inst 0x4faae3dd // sdot v29.4s, v30.16b, v10.4b[1]\n" + ".inst 0x4f8aebd4 // sdot v20.4s, v30.16b, v10.4b[2]\n" + ".inst 0x4faaebc2 // sdot v2.4s, v30.16b, v10.4b[3]\n" + "fmul v10.4s, v17.4s, v26.s[0]\n" + "scvtf v9.4s, v9.4s, #0x4\n" + "scvtf v29.4s, v29.4s, #0x4\n" + "scvtf v20.4s, v20.4s, #0x4\n" + "scvtf v2.4s, v2.4s, #0x4\n" + "fmla v11.4s, v9.4s, v10.4s\n" + "ldr q9, [x22, #0x10]\n" + "fmul v10.4s, v17.4s, v26.s[1]\n" + "fmla v13.4s, v29.4s, v10.4s\n" + "ldr d29, [x22, #-0x8]\n" + "fmul v10.4s, v17.4s, v26.s[2]\n" + "fmul v26.4s, v17.4s, v26.s[3]\n" + "fcvtl v29.4s, v29.4h\n" + "fmla v23.4s, v20.4s, v10.4s\n" + "movi v20.4s, #0x0\n" + "movi v10.4s, #0x0\n" + "fmla v16.4s, v2.4s, v26.4s\n" + "movi v26.4s, #0x0\n" + "movi v2.4s, #0x0\n" + ".inst 0x4f98e194 // sdot v20.4s, v12.16b, v24.4b[0]\n" + ".inst 0x4fb8e18a // sdot v10.4s, v12.16b, v24.4b[1]\n" + ".inst 0x4f98e99a // sdot v26.4s, v12.16b, v24.4b[2]\n" + ".inst 0x4fb8e982 // sdot v2.4s, v12.16b, v24.4b[3]\n" + "ldr q24, [x22, #0x20]\n" + ".inst 0x4f89e3f4 // sdot v20.4s, v31.16b, v9.4b[0]\n" + ".inst 0x4fa9e3ea // sdot v10.4s, v31.16b, v9.4b[1]\n" + ".inst 0x4f89ebfa // sdot v26.4s, v31.16b, v9.4b[2]\n" + ".inst 0x4fa9ebe2 // sdot v2.4s, v31.16b, v9.4b[3]\n" + "ldr q9, [x22, #0x30]\n" + ".inst 0x4f98e0d4 // sdot v20.4s, v6.16b, v24.4b[0]\n" + ".inst 0x4fb8e0ca // sdot v10.4s, v6.16b, v24.4b[1]\n" + ".inst 0x4f98e8da // sdot v26.4s, v6.16b, v24.4b[2]\n" + ".inst 0x4fb8e8c2 // sdot v2.4s, v6.16b, v24.4b[3]\n" + "ldr q24, [x22, #0x40]\n" + ".inst 0x4f89e394 // sdot v20.4s, v28.16b, v9.4b[0]\n" + ".inst 0x4fa9e38a // sdot v10.4s, v28.16b, v9.4b[1]\n" + ".inst 0x4f89eb9a // sdot v26.4s, v28.16b, v9.4b[2]\n" + ".inst 0x4fa9eb82 // sdot v2.4s, v28.16b, v9.4b[3]\n" + "ldr q9, [x22, #0x50]\n" + ".inst 0x4f98e074 // sdot v20.4s, v3.16b, v24.4b[0]\n" + ".inst 0x4fb8e06a // sdot v10.4s, v3.16b, v24.4b[1]\n" + ".inst 0x4f98e87a // sdot v26.4s, v3.16b, v24.4b[2]\n" + ".inst 0x4fb8e862 // sdot v2.4s, v3.16b, v24.4b[3]\n" + "ldr q24, [x22, #0x60]\n" + ".inst 0x4f89e2d4 // sdot v20.4s, v22.16b, v9.4b[0]\n" + ".inst 0x4fa9e2ca // sdot v10.4s, v22.16b, v9.4b[1]\n" + ".inst 0x4f89eada // sdot v26.4s, v22.16b, v9.4b[2]\n" + ".inst 0x4fa9eac2 // sdot v2.4s, v22.16b, v9.4b[3]\n" + "ldr q9, [x22, #0x70]\n" + "add x22, x22, #0x88\n" + ".inst 0x4f98e374 // sdot v20.4s, v27.16b, v24.4b[0]\n" + ".inst 0x4fb8e36a // sdot v10.4s, v27.16b, v24.4b[1]\n" + ".inst 0x4f98eb7a // sdot v26.4s, v27.16b, v24.4b[2]\n" + ".inst 0x4fb8eb62 // sdot v2.4s, v27.16b, v24.4b[3]\n" + "ldr q24, [x21, #0x0]\n" + ".inst 0x4f89e3d4 // sdot v20.4s, v30.16b, v9.4b[0]\n" + ".inst 0x4fa9e3ca // sdot v10.4s, v30.16b, v9.4b[1]\n" + ".inst 0x4f89ebda // sdot v26.4s, v30.16b, v9.4b[2]\n" + ".inst 0x4fa9ebc2 // sdot v2.4s, v30.16b, v9.4b[3]\n" + "fmul v9.4s, v17.4s, v29.s[0]\n" + "scvtf v20.4s, v20.4s, #0x4\n" + "scvtf v10.4s, v10.4s, #0x4\n" + "scvtf v26.4s, v26.4s, #0x4\n" + "scvtf v2.4s, v2.4s, #0x4\n" + "fmla v25.4s, v20.4s, v9.4s\n" + "ldr q9, [x21, #0x10]\n" + "fmul v20.4s, v17.4s, v29.s[1]\n" + "fmla v7.4s, v10.4s, v20.4s\n" + "ldr d20, [x21, #-0x8]\n" + "fmul v10.4s, v17.4s, v29.s[2]\n" + "fmul v29.4s, v17.4s, v29.s[3]\n" + "fcvtl v20.4s, v20.4h\n" + "fmla v0.4s, v26.4s, v10.4s\n" + "movi v26.4s, #0x0\n" + "movi v10.4s, #0x0\n" + "fmla v4.4s, v2.4s, v29.4s\n" + "movi v2.4s, #0x0\n" + "movi v29.4s, #0x0\n" + ".inst 0x4f98e19a // sdot v26.4s, v12.16b, v24.4b[0]\n" + ".inst 0x4fb8e18a // sdot v10.4s, v12.16b, v24.4b[1]\n" + ".inst 0x4f98e982 // sdot v2.4s, v12.16b, v24.4b[2]\n" + ".inst 0x4fb8e99d // sdot v29.4s, v12.16b, v24.4b[3]\n" + "ldr q12, [x21, #0x20]\n" + "fmul v24.4s, v17.4s, v20.s[0]\n" + ".inst 0x4f89e3fa // sdot v26.4s, v31.16b, v9.4b[0]\n" + ".inst 0x4fa9e3ea // sdot v10.4s, v31.16b, v9.4b[1]\n" + ".inst 0x4f89ebe2 // sdot v2.4s, v31.16b, v9.4b[2]\n" + ".inst 0x4fa9ebfd // sdot v29.4s, v31.16b, v9.4b[3]\n" + "ldr q9, [x21, #0x30]\n" + "fmul v31.4s, v17.4s, v20.s[1]\n" + ".inst 0x4f8ce0da // sdot v26.4s, v6.16b, v12.4b[0]\n" + ".inst 0x4face0ca // sdot v10.4s, v6.16b, v12.4b[1]\n" + ".inst 0x4f8ce8c2 // sdot v2.4s, v6.16b, v12.4b[2]\n" + ".inst 0x4face8dd // sdot v29.4s, v6.16b, v12.4b[3]\n" + "ldr q12, [x21, #0x40]\n" + "fmul v6.4s, v17.4s, v20.s[2]\n" + "fmul v20.4s, v17.4s, v20.s[3]\n" + ".inst 0x4f89e39a // sdot v26.4s, v28.16b, v9.4b[0]\n" + ".inst 0x4fa9e38a // sdot v10.4s, v28.16b, v9.4b[1]\n" + ".inst 0x4f89eb82 // sdot v2.4s, v28.16b, v9.4b[2]\n" + ".inst 0x4fa9eb9d // sdot v29.4s, v28.16b, v9.4b[3]\n" + "ldr q9, [x21, #0x50]\n" + ".inst 0x4f8ce07a // sdot v26.4s, v3.16b, v12.4b[0]\n" + ".inst 0x4face06a // sdot v10.4s, v3.16b, v12.4b[1]\n" + ".inst 0x4f8ce862 // sdot v2.4s, v3.16b, v12.4b[2]\n" + ".inst 0x4face87d // sdot v29.4s, v3.16b, v12.4b[3]\n" + "ldr q12, [x21, #0x60]\n" + ".inst 0x4f89e2da // sdot v26.4s, v22.16b, v9.4b[0]\n" + ".inst 0x4fa9e2ca // sdot v10.4s, v22.16b, v9.4b[1]\n" + ".inst 0x4f89eac2 // sdot v2.4s, v22.16b, v9.4b[2]\n" + ".inst 0x4fa9eadd // sdot v29.4s, v22.16b, v9.4b[3]\n" + "ldr q17, [x21, #0x70]\n" + "add x21, x21, #0x88\n" + ".inst 0x4f8ce37a // sdot v26.4s, v27.16b, v12.4b[0]\n" + ".inst 0x4face36a // sdot v10.4s, v27.16b, v12.4b[1]\n" + ".inst 0x4f8ceb62 // sdot v2.4s, v27.16b, v12.4b[2]\n" + ".inst 0x4faceb7d // sdot v29.4s, v27.16b, v12.4b[3]\n" + ".inst 0x4f91e3da // sdot v26.4s, v30.16b, v17.4b[0]\n" + ".inst 0x4fb1e3ca // sdot v10.4s, v30.16b, v17.4b[1]\n" + ".inst 0x4f91ebc2 // sdot v2.4s, v30.16b, v17.4b[2]\n" + ".inst 0x4fb1ebdd // sdot v29.4s, v30.16b, v17.4b[3]\n" + "scvtf v26.4s, v26.4s, #0x4\n" + "scvtf v10.4s, v10.4s, #0x4\n" + "fmla v5.4s, v26.4s, v24.4s\n" + "scvtf v2.4s, v2.4s, #0x4\n" + "scvtf v29.4s, v29.4s, #0x4\n" + "fmla v21.4s, v10.4s, v31.4s\n" + "fmla v8.4s, v2.4s, v6.4s\n" + "fmla v1.4s, v29.4s, v20.4s\n" + "bgt 3b\n" + "mov x20, %x[res_ptr]\n" + "subs x27, x27, #0x4\n" + "add %x[res_ptr], %x[res_ptr], #0x10\n" + "str q15, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "str q19, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "str q18, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "str q14, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "str q11, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "str q13, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "str q23, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "str q16, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "str q25, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "str q7, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "str q0, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "str q4, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "str q5, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "str q21, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "str q8, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "str q1, [x20, #0x0]\n" + "bne 2b\n" + "mov x20, #0x4\n" + "sub x10, x10, #0x10\n" + "cmp x10, #0x10\n" + "mov %x[res_ptr], x26\n" + "madd %x[a_ptr], x20, x9, %x[a_ptr]\n" + "bge 1b\n" + "4:" // Row loop skip + "cbz x10, 9f\n" + "5:" // Row tail: Row loop + "add x24, %x[b_ptr], #0x8\n" + "mov x23, %x[nc]\n" + "add x22, %x[res_ptr], %x[res_stride], LSL #2\n" + "6:" // Row tail: Column loop + "movi v15.16b, #0x0\n" + "movi v19.16b, #0x0\n" + "add x25, %x[a_ptr], #0x8\n" + "mov x21, %x[nb]\n" + "movi v18.16b, #0x0\n" + "movi v14.16b, #0x0\n" + "7:" // Row tail: Block loop + "ldr q7, [x24, #0x0]\n" + "ldr q5, [x25, #0x0]\n" + "movi v9.16b, #0x4\n" + "movi v4.4s, #0x0\n" + "ldr q3, [x24, #0x10]\n" + "ldr q2, [x25, #0x10]\n" + "movi v1.4s, #0x0\n" + "movi v0.4s, #0x0\n" + "ldr q13, [x24, #0x20]\n" + "ldr q31, [x25, #0x20]\n" + "movi v30.4s, #0x0\n" + "movi v29.16b, #0xf0\n" + "ldr q28, [x24, #0x30]\n" + "ldr q27, [x25, #0x30]\n" + "sshl v20.16b, v7.16b, v9.16b\n" + "sub x20, x24, #0x8\n" + "ldr q26, [x25, #0x40]\n" + "ldr q25, [x25, #0x50]\n" + "sshl v17.16b, v3.16b, v9.16b\n" + "and v7.16b, v7.16b, v29.16b\n" + "ldr q24, [x25, #0x60]\n" + "ldr q16, [x25, #0x70]\n" + "sshl v22.16b, v13.16b, v9.16b\n" + "and v3.16b, v3.16b, v29.16b\n" + "ldr d21, [x20, #0x0]\n" + "ldr d12, [x25, #-0x8]\n" + ".inst 0x4f85e284 // sdot v4.4s, v20.16b, v5.4b[0]\n" + ".inst 0x4fa5e281 // sdot v1.4s, v20.16b, v5.4b[1]\n" + ".inst 0x4f85ea80 // sdot v0.4s, v20.16b, v5.4b[2]\n" + ".inst 0x4fa5ea9e // sdot v30.4s, v20.16b, v5.4b[3]\n" + "sshl v9.16b, v28.16b, v9.16b\n" + "subs x21, x21, #0x1\n" + "and v13.16b, v13.16b, v29.16b\n" + "and v28.16b, v28.16b, v29.16b\n" + "add x25, x25, #0x88\n" + "add x24, x24, #0x48\n" + "fcvtl v21.4s, v21.4h\n" + "fcvtl v12.4s, v12.4h\n" + ".inst 0x4f82e224 // sdot v4.4s, v17.16b, v2.4b[0]\n" + ".inst 0x4fa2e221 // sdot v1.4s, v17.16b, v2.4b[1]\n" + ".inst 0x4f82ea20 // sdot v0.4s, v17.16b, v2.4b[2]\n" + ".inst 0x4fa2ea3e // sdot v30.4s, v17.16b, v2.4b[3]\n" + "fmul v11.4s, v21.4s, v12.s[0]\n" + "fmul v23.4s, v21.4s, v12.s[1]\n" + "fmul v17.4s, v21.4s, v12.s[2]\n" + ".inst 0x4f9fe2c4 // sdot v4.4s, v22.16b, v31.4b[0]\n" + "fmul v6.4s, v21.4s, v12.s[3]\n" + ".inst 0x4fbfe2c1 // sdot v1.4s, v22.16b, v31.4b[1]\n" + ".inst 0x4f9feac0 // sdot v0.4s, v22.16b, v31.4b[2]\n" + ".inst 0x4fbfeade // sdot v30.4s, v22.16b, v31.4b[3]\n" + ".inst 0x4f9be124 // sdot v4.4s, v9.16b, v27.4b[0]\n" + ".inst 0x4fbbe121 // sdot v1.4s, v9.16b, v27.4b[1]\n" + ".inst 0x4f9be920 // sdot v0.4s, v9.16b, v27.4b[2]\n" + ".inst 0x4fbbe93e // sdot v30.4s, v9.16b, v27.4b[3]\n" + ".inst 0x4f9ae0e4 // sdot v4.4s, v7.16b, v26.4b[0]\n" + ".inst 0x4fbae0e1 // sdot v1.4s, v7.16b, v26.4b[1]\n" + ".inst 0x4f9ae8e0 // sdot v0.4s, v7.16b, v26.4b[2]\n" + ".inst 0x4fbae8fe // sdot v30.4s, v7.16b, v26.4b[3]\n" + ".inst 0x4f99e064 // sdot v4.4s, v3.16b, v25.4b[0]\n" + ".inst 0x4fb9e061 // sdot v1.4s, v3.16b, v25.4b[1]\n" + ".inst 0x4f99e860 // sdot v0.4s, v3.16b, v25.4b[2]\n" + ".inst 0x4fb9e87e // sdot v30.4s, v3.16b, v25.4b[3]\n" + ".inst 0x4f98e1a4 // sdot v4.4s, v13.16b, v24.4b[0]\n" + ".inst 0x4fb8e1a1 // sdot v1.4s, v13.16b, v24.4b[1]\n" + ".inst 0x4f98e9a0 // sdot v0.4s, v13.16b, v24.4b[2]\n" + ".inst 0x4fb8e9be // sdot v30.4s, v13.16b, v24.4b[3]\n" + ".inst 0x4f90e384 // sdot v4.4s, v28.16b, v16.4b[0]\n" + ".inst 0x4fb0e381 // sdot v1.4s, v28.16b, v16.4b[1]\n" + ".inst 0x4f90eb80 // sdot v0.4s, v28.16b, v16.4b[2]\n" + ".inst 0x4fb0eb9e // sdot v30.4s, v28.16b, v16.4b[3]\n" + "scvtf v4.4s, v4.4s, #0x4\n" + "scvtf v1.4s, v1.4s, #0x4\n" + "scvtf v0.4s, v0.4s, #0x4\n" + "fmla v15.4s, v4.4s, v11.4s\n" + "scvtf v30.4s, v30.4s, #0x4\n" + "fmla v19.4s, v1.4s, v23.4s\n" + "fmla v18.4s, v0.4s, v17.4s\n" + "fmla v14.4s, v30.4s, v6.4s\n" + "bgt 7b\n" + "mov x20, %x[res_ptr]\n" + "cmp x10, #0x1\n" + "str q15, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "ble 8f\n" + "cmp x10, #0x2\n" + "str q19, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "ble 8f\n" + "cmp x10, #0x3\n" + "str q18, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "ble 8f\n" + "str q14, [x20, #0x0]\n" + "8:" // Row tail: Accumulator store skip + "subs x23, x23, #0x4\n" + "add %x[res_ptr], %x[res_ptr], #0x10\n" + "bne 6b\n" + "subs x10, x10, #0x4\n" + "add %x[a_ptr], %x[a_ptr], x9\n" + "mov %x[res_ptr], x22\n" + "bgt 5b\n" + "9:" // Row tail: Row loop skip + : [a_ptr] "+&r" (a_ptr), [res_ptr] "+&r" (res_ptr) + : [b_ptr] "r" (b_ptr), [nr] "r" (nr), [nb] "r" (nb), [res_stride] "r" (res_stride), [nc] "r" (nc) + : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31", "x9", "x10", "x20", "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28" + ); + return; + } +#endif // #if ! ((defined(_MSC_VER)) && ! defined(__clang__)) && defined(__aarch64__) && defined(__ARM_NEON) + { + float sumf[4][4]; + int sumi; + + for (int y = 0; y < nr / 4; y++) { + const block_q8_0x4 * a_ptr = (const block_q8_0x4 *) vy + (y * nb); + for (int x = 0; x < nc / ncols_interleaved; x++) { + const block_q4_0x4 * b_ptr = (const block_q4_0x4 *) vx + (x * nb); + for (int m = 0; m < 4; m++) { + for (int j = 0; j < ncols_interleaved; j++) sumf[m][j] = 0.0; + } + for (int l = 0; l < nb; l++) { + for (int k = 0; k < (qk / (2 * blocklen)); k++) { + for (int m = 0; m < 4; m++) { + for (int j = 0; j < ncols_interleaved; j++) { + sumi = 0; + for (int i = 0; i < blocklen; ++i) { + const int v0 = (int8_t) (b_ptr[l].qs[k * ncols_interleaved * blocklen + j * blocklen + i] << 4); + const int v1 = (int8_t) (b_ptr[l].qs[k * ncols_interleaved * blocklen + j * blocklen + i] & 0xF0); + sumi += ((v0 * a_ptr[l].qs[k * 4 * blocklen + m * blocklen + i]) + + (v1 * a_ptr[l].qs[k * 4 * blocklen + m * blocklen + i + qk / 2 * 4])) >> 4; + } + sumf[m][j] += sumi * GGML_FP16_TO_FP32(b_ptr[l].d[j]) * GGML_FP16_TO_FP32(a_ptr[l].d[m]); + } + } + } + } + for (int m = 0; m < 4; m++) { + for (int j = 0; j < ncols_interleaved; j++) + s[(y * 4 + m) * bs + x * ncols_interleaved + j] = sumf[m][j]; + } + } + } + } +} + +void ggml_gemm_q4_0_4x8_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, const void * restrict vy, int nr, int nc) { + const int qk = QK8_0; + const int nb = n / qk; + const int ncols_interleaved = 4; + const int blocklen = 8; + + assert (n % qk == 0); + assert (nr % 4 == 0); + assert (nc % ncols_interleaved == 0); + + UNUSED(s); + UNUSED(bs); + UNUSED(vx); + UNUSED(vy); + UNUSED(nr); + UNUSED(nc); + UNUSED(nb); + UNUSED(ncols_interleaved); + UNUSED(blocklen); + +#if ! ((defined(_MSC_VER)) && ! defined(__clang__)) && defined(__aarch64__) && defined(__ARM_NEON) && defined(__ARM_FEATURE_MATMUL_INT8) + if (ggml_cpu_has_neon() && ggml_cpu_has_matmul_int8()) { + const void * b_ptr = vx; + const void * a_ptr = vy; + float * res_ptr = s; + size_t res_stride = bs * sizeof(float); + + __asm__ __volatile__( + "mov x10, %x[nr]\n" + "mov x9, #0x88\n" + "cmp x10, #0x10\n" + "mul x9, %x[nb], x9\n" + "blt 4f\n" + "1:" // Row loop + "add x28, %x[b_ptr], #0x8\n" + "mov x27, %x[nc]\n" + "add x26, %x[res_ptr], %x[res_stride], LSL #4\n" + "2:" // Column loop + "add x25, %x[a_ptr], #0x8\n" + "movi v2.16b, #0x0\n" + "movi v10.16b, #0x0\n" + "mov x24, %x[nb]\n" + "add x23, x25, x9\n" + "movi v12.16b, #0x0\n" + "movi v28.16b, #0x0\n" + "add x22, x23, x9\n" + "movi v11.16b, #0x0\n" + "movi v13.16b, #0x0\n" + "add x21, x22, x9\n" + "movi v22.16b, #0x0\n" + "movi v23.16b, #0x0\n" + "movi v25.16b, #0x0\n" + "movi v5.16b, #0x0\n" + "movi v7.16b, #0x0\n" + "movi v4.16b, #0x0\n" + "movi v6.16b, #0x0\n" + "movi v30.16b, #0x0\n" + "movi v24.16b, #0x0\n" + "movi v14.16b, #0x0\n" + "3:" // Block loop + "ldr q21, [x28, #0x0]\n" + "ldr q16, [x28, #0x10]\n" + "movi v1.16b, #0x4\n" + "movi v19.4s, #0x0\n" + "ldr q27, [x25, #0x0]\n" + "ldr q15, [x25, #0x10]\n" + "movi v26.4s, #0x0\n" + "movi v18.4s, #0x0\n" + "ldr q29, [x28, #0x20]\n" + "ldr q3, [x28, #0x30]\n" + "movi v17.4s, #0x0\n" + "movi v0.16b, #0xf0\n" + "ldr d20, [x25, #-0x8]\n" + "ldr d9, [x23, #-0x8]\n" + "sshl v8.16b, v21.16b, v1.16b\n" + "sshl v31.16b, v16.16b, v1.16b\n" + "and v21.16b, v21.16b, v0.16b\n" + "and v16.16b, v16.16b, v0.16b\n" + "sub x20, x28, #0x8\n" + "subs x24, x24, #0x1\n" + "add x28, x28, #0x48\n" + ".inst 0x4e88a773 // smmla v19.4s, v27.16b, v8.16b\n" + ".inst 0x4e9fa77a // smmla v26.4s, v27.16b, v31.16b\n" + "ldr q27, [x25, #0x20]\n" + ".inst 0x4e88a5f2 // smmla v18.4s, v15.16b, v8.16b\n" + ".inst 0x4e9fa5f1 // smmla v17.4s, v15.16b, v31.16b\n" + "sshl v15.16b, v29.16b, v1.16b\n" + "sshl v1.16b, v3.16b, v1.16b\n" + "and v29.16b, v29.16b, v0.16b\n" + "and v3.16b, v3.16b, v0.16b\n" + "ldr q0, [x25, #0x30]\n" + "fcvtl v20.4s, v20.4h\n" + ".inst 0x4e8fa773 // smmla v19.4s, v27.16b, v15.16b\n" + "fcvtl v9.4s, v9.4h\n" + ".inst 0x4e81a77a // smmla v26.4s, v27.16b, v1.16b\n" + "ldr q27, [x25, #0x40]\n" + ".inst 0x4e8fa412 // smmla v18.4s, v0.16b, v15.16b\n" + ".inst 0x4e81a411 // smmla v17.4s, v0.16b, v1.16b\n" + "ldr q0, [x25, #0x50]\n" + ".inst 0x4e95a773 // smmla v19.4s, v27.16b, v21.16b\n" + ".inst 0x4e90a77a // smmla v26.4s, v27.16b, v16.16b\n" + "ldr q27, [x25, #0x60]\n" + ".inst 0x4e95a412 // smmla v18.4s, v0.16b, v21.16b\n" + ".inst 0x4e90a411 // smmla v17.4s, v0.16b, v16.16b\n" + "ldr q0, [x25, #0x70]\n" + "add x25, x25, #0x88\n" + ".inst 0x4e9da773 // smmla v19.4s, v27.16b, v29.16b\n" + ".inst 0x4e83a77a // smmla v26.4s, v27.16b, v3.16b\n" + "ldr d27, [x20, #0x0]\n" + ".inst 0x4e9da412 // smmla v18.4s, v0.16b, v29.16b\n" + ".inst 0x4e83a411 // smmla v17.4s, v0.16b, v3.16b\n" + "fcvtl v27.4s, v27.4h\n" + "uzp1 v0.2d, v19.2d, v26.2d\n" + "uzp2 v26.2d, v19.2d, v26.2d\n" + "fmul v19.4s, v27.4s, v20.s[0]\n" + "scvtf v0.4s, v0.4s, #0x4\n" + "scvtf v26.4s, v26.4s, #0x4\n" + "fmla v2.4s, v0.4s, v19.4s\n" + "ldr q19, [x23, #0x0]\n" + "uzp1 v0.2d, v18.2d, v17.2d\n" + "uzp2 v18.2d, v18.2d, v17.2d\n" + "fmul v17.4s, v27.4s, v20.s[1]\n" + "scvtf v0.4s, v0.4s, #0x4\n" + "scvtf v18.4s, v18.4s, #0x4\n" + "fmla v10.4s, v26.4s, v17.4s\n" + "ldr q17, [x23, #0x10]\n" + "fmul v26.4s, v27.4s, v20.s[2]\n" + "fmul v20.4s, v27.4s, v20.s[3]\n" + "fmla v12.4s, v0.4s, v26.4s\n" + "ldr d0, [x22, #-0x8]\n" + "ldr d26, [x21, #-0x8]\n" + "fcvtl v0.4s, v0.4h\n" + "fmla v28.4s, v18.4s, v20.4s\n" + "movi v20.4s, #0x0\n" + "movi v18.4s, #0x0\n" + ".inst 0x4e88a674 // smmla v20.4s, v19.16b, v8.16b\n" + ".inst 0x4e9fa672 // smmla v18.4s, v19.16b, v31.16b\n" + "ldr q19, [x23, #0x20]\n" + "fcvtl v26.4s, v26.4h\n" + ".inst 0x4e8fa674 // smmla v20.4s, v19.16b, v15.16b\n" + ".inst 0x4e81a672 // smmla v18.4s, v19.16b, v1.16b\n" + "ldr q19, [x23, #0x40]\n" + ".inst 0x4e95a674 // smmla v20.4s, v19.16b, v21.16b\n" + ".inst 0x4e90a672 // smmla v18.4s, v19.16b, v16.16b\n" + "ldr q19, [x23, #0x60]\n" + ".inst 0x4e9da674 // smmla v20.4s, v19.16b, v29.16b\n" + ".inst 0x4e83a672 // smmla v18.4s, v19.16b, v3.16b\n" + "uzp1 v19.2d, v20.2d, v18.2d\n" + "scvtf v19.4s, v19.4s, #0x4\n" + "uzp2 v20.2d, v20.2d, v18.2d\n" + "fmul v18.4s, v27.4s, v9.s[0]\n" + "scvtf v20.4s, v20.4s, #0x4\n" + "fmla v11.4s, v19.4s, v18.4s\n" + "ldr q18, [x22, #0x0]\n" + "fmul v19.4s, v27.4s, v9.s[1]\n" + "fmla v13.4s, v20.4s, v19.4s\n" + "movi v19.4s, #0x0\n" + "movi v20.4s, #0x0\n" + ".inst 0x4e88a633 // smmla v19.4s, v17.16b, v8.16b\n" + ".inst 0x4e9fa634 // smmla v20.4s, v17.16b, v31.16b\n" + "ldr q17, [x23, #0x30]\n" + ".inst 0x4e8fa633 // smmla v19.4s, v17.16b, v15.16b\n" + ".inst 0x4e81a634 // smmla v20.4s, v17.16b, v1.16b\n" + "ldr q17, [x23, #0x50]\n" + ".inst 0x4e95a633 // smmla v19.4s, v17.16b, v21.16b\n" + ".inst 0x4e90a634 // smmla v20.4s, v17.16b, v16.16b\n" + "ldr q17, [x23, #0x70]\n" + "add x23, x23, #0x88\n" + ".inst 0x4e9da633 // smmla v19.4s, v17.16b, v29.16b\n" + ".inst 0x4e83a634 // smmla v20.4s, v17.16b, v3.16b\n" + "uzp1 v17.2d, v19.2d, v20.2d\n" + "scvtf v17.4s, v17.4s, #0x4\n" + "uzp2 v20.2d, v19.2d, v20.2d\n" + "fmul v19.4s, v27.4s, v9.s[2]\n" + "fmul v9.4s, v27.4s, v9.s[3]\n" + "scvtf v20.4s, v20.4s, #0x4\n" + "fmla v22.4s, v17.4s, v19.4s\n" + "ldr q17, [x22, #0x10]\n" + "movi v19.4s, #0x0\n" + ".inst 0x4e88a653 // smmla v19.4s, v18.16b, v8.16b\n" + "fmla v23.4s, v20.4s, v9.4s\n" + "movi v20.4s, #0x0\n" + "movi v9.4s, #0x0\n" + ".inst 0x4e9fa654 // smmla v20.4s, v18.16b, v31.16b\n" + "ldr q18, [x22, #0x20]\n" + ".inst 0x4e88a629 // smmla v9.4s, v17.16b, v8.16b\n" + ".inst 0x4e8fa653 // smmla v19.4s, v18.16b, v15.16b\n" + ".inst 0x4e81a654 // smmla v20.4s, v18.16b, v1.16b\n" + "ldr q18, [x22, #0x40]\n" + ".inst 0x4e95a653 // smmla v19.4s, v18.16b, v21.16b\n" + ".inst 0x4e90a654 // smmla v20.4s, v18.16b, v16.16b\n" + "ldr q18, [x22, #0x60]\n" + ".inst 0x4e9da653 // smmla v19.4s, v18.16b, v29.16b\n" + ".inst 0x4e83a654 // smmla v20.4s, v18.16b, v3.16b\n" + "movi v18.4s, #0x0\n" + ".inst 0x4e9fa632 // smmla v18.4s, v17.16b, v31.16b\n" + "ldr q17, [x22, #0x30]\n" + ".inst 0x4e8fa629 // smmla v9.4s, v17.16b, v15.16b\n" + ".inst 0x4e81a632 // smmla v18.4s, v17.16b, v1.16b\n" + "ldr q17, [x22, #0x50]\n" + ".inst 0x4e95a629 // smmla v9.4s, v17.16b, v21.16b\n" + ".inst 0x4e90a632 // smmla v18.4s, v17.16b, v16.16b\n" + "ldr q17, [x22, #0x70]\n" + "add x22, x22, #0x88\n" + ".inst 0x4e9da629 // smmla v9.4s, v17.16b, v29.16b\n" + ".inst 0x4e83a632 // smmla v18.4s, v17.16b, v3.16b\n" + "uzp1 v17.2d, v19.2d, v20.2d\n" + "uzp2 v20.2d, v19.2d, v20.2d\n" + "fmul v19.4s, v27.4s, v0.s[0]\n" + "scvtf v17.4s, v17.4s, #0x4\n" + "scvtf v20.4s, v20.4s, #0x4\n" + "fmla v25.4s, v17.4s, v19.4s\n" + "ldr q19, [x21, #0x0]\n" + "fmul v17.4s, v27.4s, v0.s[1]\n" + "fmla v5.4s, v20.4s, v17.4s\n" + "ldr q17, [x21, #0x10]\n" + "uzp1 v20.2d, v9.2d, v18.2d\n" + "uzp2 v9.2d, v9.2d, v18.2d\n" + "fmul v18.4s, v27.4s, v0.s[2]\n" + "fmul v0.4s, v27.4s, v0.s[3]\n" + "scvtf v20.4s, v20.4s, #0x4\n" + "scvtf v9.4s, v9.4s, #0x4\n" + "fmla v7.4s, v20.4s, v18.4s\n" + "movi v20.4s, #0x0\n" + "movi v18.4s, #0x0\n" + ".inst 0x4e88a674 // smmla v20.4s, v19.16b, v8.16b\n" + ".inst 0x4e9fa672 // smmla v18.4s, v19.16b, v31.16b\n" + "ldr q19, [x21, #0x20]\n" + "fmla v4.4s, v9.4s, v0.4s\n" + "movi v9.4s, #0x0\n" + "movi v0.4s, #0x0\n" + ".inst 0x4e88a629 // smmla v9.4s, v17.16b, v8.16b\n" + "fmul v8.4s, v27.4s, v26.s[0]\n" + ".inst 0x4e9fa620 // smmla v0.4s, v17.16b, v31.16b\n" + "ldr q17, [x21, #0x30]\n" + ".inst 0x4e8fa674 // smmla v20.4s, v19.16b, v15.16b\n" + "fmul v31.4s, v27.4s, v26.s[1]\n" + ".inst 0x4e81a672 // smmla v18.4s, v19.16b, v1.16b\n" + "ldr q19, [x21, #0x40]\n" + ".inst 0x4e8fa629 // smmla v9.4s, v17.16b, v15.16b\n" + "fmul v15.4s, v27.4s, v26.s[2]\n" + "fmul v27.4s, v27.4s, v26.s[3]\n" + ".inst 0x4e81a620 // smmla v0.4s, v17.16b, v1.16b\n" + "ldr q1, [x21, #0x50]\n" + ".inst 0x4e95a674 // smmla v20.4s, v19.16b, v21.16b\n" + ".inst 0x4e90a672 // smmla v18.4s, v19.16b, v16.16b\n" + "ldr q26, [x21, #0x60]\n" + ".inst 0x4e95a429 // smmla v9.4s, v1.16b, v21.16b\n" + ".inst 0x4e90a420 // smmla v0.4s, v1.16b, v16.16b\n" + "ldr q21, [x21, #0x70]\n" + "add x21, x21, #0x88\n" + ".inst 0x4e9da754 // smmla v20.4s, v26.16b, v29.16b\n" + ".inst 0x4e83a752 // smmla v18.4s, v26.16b, v3.16b\n" + ".inst 0x4e9da6a9 // smmla v9.4s, v21.16b, v29.16b\n" + ".inst 0x4e83a6a0 // smmla v0.4s, v21.16b, v3.16b\n" + "uzp1 v29.2d, v20.2d, v18.2d\n" + "uzp2 v21.2d, v20.2d, v18.2d\n" + "scvtf v29.4s, v29.4s, #0x4\n" + "uzp1 v18.2d, v9.2d, v0.2d\n" + "uzp2 v16.2d, v9.2d, v0.2d\n" + "scvtf v21.4s, v21.4s, #0x4\n" + "fmla v6.4s, v29.4s, v8.4s\n" + "scvtf v18.4s, v18.4s, #0x4\n" + "scvtf v16.4s, v16.4s, #0x4\n" + "fmla v30.4s, v21.4s, v31.4s\n" + "fmla v24.4s, v18.4s, v15.4s\n" + "fmla v14.4s, v16.4s, v27.4s\n" + "bgt 3b\n" + "mov x20, %x[res_ptr]\n" + "subs x27, x27, #0x4\n" + "add %x[res_ptr], %x[res_ptr], #0x10\n" + "str q2, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "str q10, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "str q12, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "str q28, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "str q11, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "str q13, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "str q22, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "str q23, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "str q25, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "str q5, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "str q7, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "str q4, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "str q6, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "str q30, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "str q24, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "str q14, [x20, #0x0]\n" + "bne 2b\n" + "mov x20, #0x4\n" + "sub x10, x10, #0x10\n" + "cmp x10, #0x10\n" + "mov %x[res_ptr], x26\n" + "madd %x[a_ptr], x20, x9, %x[a_ptr]\n" + "bge 1b\n" + "4:" // Row loop skip + "cbz x10, 9f\n" + "5:" // Row tail: Row loop + "add x24, %x[b_ptr], #0x8\n" + "mov x23, %x[nc]\n" + "add x22, %x[res_ptr], %x[res_stride], LSL #2\n" + "6:" // Row tail: Column loop + "movi v2.16b, #0x0\n" + "movi v10.16b, #0x0\n" + "add x25, %x[a_ptr], #0x8\n" + "mov x21, %x[nb]\n" + "movi v12.16b, #0x0\n" + "movi v28.16b, #0x0\n" + "7:" // Row tail: Block loop + "ldr q6, [x24, #0x0]\n" + "ldr q5, [x24, #0x10]\n" + "movi v17.16b, #0x4\n" + "movi v8.4s, #0x0\n" + "ldr q4, [x25, #0x0]\n" + "ldr q13, [x25, #0x10]\n" + "movi v27.4s, #0x0\n" + "movi v0.4s, #0x0\n" + "ldr q31, [x24, #0x20]\n" + "ldr q14, [x24, #0x30]\n" + "movi v29.4s, #0x0\n" + "movi v22.16b, #0xf0\n" + "ldr q11, [x25, #0x20]\n" + "ldr q23, [x25, #0x30]\n" + "sshl v21.16b, v6.16b, v17.16b\n" + "sshl v16.16b, v5.16b, v17.16b\n" + "ldr q20, [x25, #0x40]\n" + "ldr q26, [x25, #0x50]\n" + "and v6.16b, v6.16b, v22.16b\n" + "and v5.16b, v5.16b, v22.16b\n" + "ldr q25, [x25, #0x60]\n" + "ldr q3, [x25, #0x70]\n" + "sshl v19.16b, v31.16b, v17.16b\n" + "sshl v18.16b, v14.16b, v17.16b\n" + "ldr d17, [x25, #-0x8]\n" + ".inst 0x4e95a488 // smmla v8.4s, v4.16b, v21.16b\n" + ".inst 0x4e90a49b // smmla v27.4s, v4.16b, v16.16b\n" + "and v31.16b, v31.16b, v22.16b\n" + ".inst 0x4e95a5a0 // smmla v0.4s, v13.16b, v21.16b\n" + ".inst 0x4e90a5bd // smmla v29.4s, v13.16b, v16.16b\n" + "and v14.16b, v14.16b, v22.16b\n" + "sub x20, x24, #0x8\n" + "ldr d16, [x20, #0x0]\n" + "subs x21, x21, #0x1\n" + "add x25, x25, #0x88\n" + "fcvtl v17.4s, v17.4h\n" + "add x24, x24, #0x48\n" + ".inst 0x4e93a568 // smmla v8.4s, v11.16b, v19.16b\n" + ".inst 0x4e92a57b // smmla v27.4s, v11.16b, v18.16b\n" + ".inst 0x4e93a6e0 // smmla v0.4s, v23.16b, v19.16b\n" + ".inst 0x4e92a6fd // smmla v29.4s, v23.16b, v18.16b\n" + "fcvtl v16.4s, v16.4h\n" + ".inst 0x4e86a688 // smmla v8.4s, v20.16b, v6.16b\n" + ".inst 0x4e85a69b // smmla v27.4s, v20.16b, v5.16b\n" + "fmul v23.4s, v16.4s, v17.s[0]\n" + "fmul v21.4s, v16.4s, v17.s[1]\n" + "fmul v1.4s, v16.4s, v17.s[2]\n" + "fmul v20.4s, v16.4s, v17.s[3]\n" + ".inst 0x4e86a740 // smmla v0.4s, v26.16b, v6.16b\n" + ".inst 0x4e85a75d // smmla v29.4s, v26.16b, v5.16b\n" + ".inst 0x4e9fa728 // smmla v8.4s, v25.16b, v31.16b\n" + ".inst 0x4e8ea73b // smmla v27.4s, v25.16b, v14.16b\n" + ".inst 0x4e9fa460 // smmla v0.4s, v3.16b, v31.16b\n" + ".inst 0x4e8ea47d // smmla v29.4s, v3.16b, v14.16b\n" + "uzp1 v19.2d, v8.2d, v27.2d\n" + "uzp2 v18.2d, v8.2d, v27.2d\n" + "scvtf v19.4s, v19.4s, #0x4\n" + "uzp1 v17.2d, v0.2d, v29.2d\n" + "uzp2 v16.2d, v0.2d, v29.2d\n" + "scvtf v18.4s, v18.4s, #0x4\n" + "fmla v2.4s, v19.4s, v23.4s\n" + "scvtf v17.4s, v17.4s, #0x4\n" + "scvtf v16.4s, v16.4s, #0x4\n" + "fmla v10.4s, v18.4s, v21.4s\n" + "fmla v12.4s, v17.4s, v1.4s\n" + "fmla v28.4s, v16.4s, v20.4s\n" + "bgt 7b\n" + "mov x20, %x[res_ptr]\n" + "cmp x10, #0x1\n" + "str q2, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "ble 8f\n" + "cmp x10, #0x2\n" + "str q10, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "ble 8f\n" + "cmp x10, #0x3\n" + "str q12, [x20, #0x0]\n" + "add x20, x20, %x[res_stride]\n" + "ble 8f\n" + "str q28, [x20, #0x0]\n" + "8:" // Row tail: Accumulator store skip + "subs x23, x23, #0x4\n" + "add %x[res_ptr], %x[res_ptr], #0x10\n" + "bne 6b\n" + "subs x10, x10, #0x4\n" + "add %x[a_ptr], %x[a_ptr], x9\n" + "mov %x[res_ptr], x22\n" + "bgt 5b\n" + "9:" // Row tail: Row loop skip + : [a_ptr] "+&r" (a_ptr), [res_ptr] "+&r" (res_ptr) + : [b_ptr] "r" (b_ptr), [nr] "r" (nr), [nb] "r" (nb), [res_stride] "r" (res_stride), [nc] "r" (nc) + : "cc", "memory", "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31", "x9", "x10", "x20", "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28" + ); + return; + } +#endif // #if ! ((defined(_MSC_VER)) && ! defined(__clang__)) && defined(__aarch64__) && defined(__ARM_NEON) && defined(__ARM_FEATURE_MATMUL_INT8) + float sumf[4][4]; + int sumi; + + for (int y = 0; y < nr / 4; y++) { + const block_q8_0x4 * a_ptr = (const block_q8_0x4 *) vy + (y * nb); + for (int x = 0; x < nc / ncols_interleaved; x++) { + const block_q4_0x4 * b_ptr = (const block_q4_0x4 *) vx + (x * nb); + for (int m = 0; m < 4; m++) { + for (int j = 0; j < ncols_interleaved; j++) sumf[m][j] = 0.0; + } + for (int l = 0; l < nb; l++) { + for (int k = 0; k < (qk / (2 * blocklen)); k++) { + for (int m = 0; m < 4; m++) { + for (int j = 0; j < ncols_interleaved; j++) { + sumi = 0; + for (int i = 0; i < blocklen; ++i) { + const int v0 = (int8_t) (b_ptr[l].qs[k * ncols_interleaved * blocklen + j * blocklen + i] << 4); + const int v1 = (int8_t) (b_ptr[l].qs[k * ncols_interleaved * blocklen + j * blocklen + i] & 0xF0); + sumi += ((v0 * a_ptr[l].qs[k * 4 * blocklen + m * blocklen + i]) + + (v1 * a_ptr[l].qs[k * 4 * blocklen + m * blocklen + i + qk / 2 * 4])) >> 4; + } + sumf[m][j] += sumi * GGML_FP16_TO_FP32(b_ptr[l].d[j]) * GGML_FP16_TO_FP32(a_ptr[l].d[m]); + } + } + } + } + for (int m = 0; m < 4; m++) { + for (int j = 0; j < ncols_interleaved; j++) + s[(y * 4 + m) * bs + x * ncols_interleaved + j] = sumf[m][j]; + } + } + } +} + +void ggml_gemm_q4_0_8x8_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, const void * restrict vy, int nr, int nc) { + const int qk = QK8_0; + const int nb = n / qk; + const int ncols_interleaved = 8; + const int blocklen = 8; + + assert (n % qk == 0); + assert (nr % 4 == 0); + assert (nc % ncols_interleaved == 0); + + UNUSED(s); + UNUSED(bs); + UNUSED(vx); + UNUSED(vy); + UNUSED(nr); + UNUSED(nc); + UNUSED(nb); + UNUSED(ncols_interleaved); + UNUSED(blocklen); + +#if ! ((defined(_MSC_VER)) && ! defined(__clang__)) && defined(__aarch64__) +#if defined(__ARM_FEATURE_SVE) && defined(__ARM_FEATURE_MATMUL_INT8) + if (ggml_cpu_has_sve() && ggml_cpu_has_matmul_int8() && ggml_cpu_get_sve_cnt() == QK8_0) { + const void * b_ptr = vx; + const void * a_ptr = vy; + float * res_ptr = s; + size_t res_stride = bs * sizeof(float); + + __asm__ __volatile__( + "mov x20, #0x4\n" + "mov x13, %x[nr]\n" + "mov z28.s, #-0x4\n" + "mov x12, #0x88\n" + "ptrue p1.b\n" + "whilelt p0.s, XZR, x20\n" + "cmp x13, #0x10\n" + "mul x12, %x[nb], x12\n" + "blt 4f\n" + "1:" // Row loop + "add x11, %x[b_ptr], #0x10\n" + "mov x10, %x[nc]\n" + "add x9, %x[res_ptr], %x[res_stride], LSL #4\n" + "2:" // Column loop + "add x28, %x[a_ptr], #0x8\n" + "mov z24.b, #0x0\n" + "mov z15.b, #0x0\n" + "mov x27, %x[nb]\n" + "add x26, x28, x12\n" + "mov z12.b, #0x0\n" + "mov z0.b, #0x0\n" + "add x25, x26, x12\n" + "mov z13.b, #0x0\n" + "mov z1.b, #0x0\n" + "add x24, x25, x12\n" + "mov z20.b, #0x0\n" + "mov z25.b, #0x0\n" + "mov z11.b, #0x0\n" + "mov z16.b, #0x0\n" + "mov z19.b, #0x0\n" + "mov z26.b, #0x0\n" + "mov z8.b, #0x0\n" + "mov z29.b, #0x0\n" + "mov z27.b, #0x0\n" + "mov z10.b, #0x0\n" + "3:" // Block loop + "ld1b { z30.b }, p1/Z, [x11]\n" + "ld1b { z21.b }, p1/Z, [x11, #1, MUL VL]\n" + "mov z18.s, #0x0\n" + "mov z7.s, #0x0\n" + "ld1rqb { z3.b }, p1/Z, [x28]\n" + "ld1rqb { z5.b }, p1/Z, [x28, #16]\n" + "mov z9.s, #0x0\n" + "mov z22.s, #0x0\n" + "ld1b { z4.b }, p1/Z, [x11, #2, MUL VL]\n" + "ld1b { z17.b }, p1/Z, [x11, #3, MUL VL]\n" + "sub x20, x11, #0x10\n" + "sub x23, x28, #0x8\n" + "lsl z31.b, z30.b, #0x4\n" + "lsl z6.b, z21.b, #0x4\n" + "ld1h { z23.s }, p1/Z, [x20]\n" + "sub x22, x26, #0x8\n" + "and z30.b, z30.b, #0xf0\n" + "and z21.b, z21.b, #0xf0\n" + "sub x21, x25, #0x8\n" + "sub x20, x24, #0x8\n" + "lsl z14.b, z4.b, #0x4\n" + "lsl z2.b, z17.b, #0x4\n" + "subs x27, x27, #0x1\n" + "add x11, x11, #0x90\n" + ".inst 0x451f9872 // smmla z18.s, z3.b, z31.b\n" + ".inst 0x45069867 // smmla z7.s, z3.b, z6.b\n" + "ld1rqb { z3.b }, p1/Z, [x28, #32]\n" + "and z4.b, z4.b, #0xf0\n" + ".inst 0x451f98a9 // smmla z9.s, z5.b, z31.b\n" + ".inst 0x450698b6 // smmla z22.s, z5.b, z6.b\n" + "ld1rqb { z5.b }, p1/Z, [x28, #48]\n" + "and z17.b, z17.b, #0xf0\n" + "fcvt z23.s, p1/m, z23.h\n" + ".inst 0x450e9872 // smmla z18.s, z3.b, z14.b\n" + ".inst 0x45029867 // smmla z7.s, z3.b, z2.b\n" + "ld1rqb { z3.b }, p1/Z, [x28, #64]\n" + ".inst 0x450e98a9 // smmla z9.s, z5.b, z14.b\n" + ".inst 0x450298b6 // smmla z22.s, z5.b, z2.b\n" + "ld1rqb { z5.b }, p1/Z, [x28, #80]\n" + "fscale z23.s, p1/m, z23.s, z28.s\n" + ".inst 0x451e9872 // smmla z18.s, z3.b, z30.b\n" + ".inst 0x45159867 // smmla z7.s, z3.b, z21.b\n" + "ld1rqb { z3.b }, p1/Z, [x28, #96]\n" + ".inst 0x451e98a9 // smmla z9.s, z5.b, z30.b\n" + ".inst 0x451598b6 // smmla z22.s, z5.b, z21.b\n" + "ld1rqb { z5.b }, p1/Z, [x28, #112]\n" + "add x28, x28, #0x88\n" + ".inst 0x45049872 // smmla z18.s, z3.b, z4.b\n" + ".inst 0x45119867 // smmla z7.s, z3.b, z17.b\n" + "ld1h { z3.s }, p0/Z, [x23]\n" + ".inst 0x450498a9 // smmla z9.s, z5.b, z4.b\n" + ".inst 0x451198b6 // smmla z22.s, z5.b, z17.b\n" + "fcvt z3.s, p1/m, z3.h\n" + "uzp1 z5.d, z18.d, z7.d\n" + "uzp2 z18.d, z18.d, z7.d\n" + "mov z3.q, z3.q[0]\n" + "uzp1 z7.d, z9.d, z22.d\n" + "uzp2 z22.d, z9.d, z22.d\n" + "fmul z9.s, z23.s, z3.s[0]\n" + "scvtf z5.s, p1/m, z5.s\n" + "scvtf z18.s, p1/m, z18.s\n" + "scvtf z7.s, p1/m, z7.s\n" + "scvtf z22.s, p1/m, z22.s\n" + "fmla z24.s, p1/M, z5.s, z9.s\n" + "ld1rqb { z5.b }, p1/Z, [x26]\n" + "fmul z9.s, z23.s, z3.s[1]\n" + "fmla z15.s, p1/M, z18.s, z9.s\n" + "ld1rqb { z18.b }, p1/Z, [x26, #16]\n" + "fmul z9.s, z23.s, z3.s[2]\n" + "fmul z3.s, z23.s, z3.s[3]\n" + "fmla z12.s, p1/M, z7.s, z9.s\n" + "mov z9.s, #0x0\n" + "ld1h { z7.s }, p0/Z, [x22]\n" + ".inst 0x451f98a9 // smmla z9.s, z5.b, z31.b\n" + "fmla z0.s, p1/M, z22.s, z3.s\n" + "mov z22.s, #0x0\n" + "ld1h { z3.s }, p0/Z, [x21]\n" + ".inst 0x450698b6 // smmla z22.s, z5.b, z6.b\n" + "ld1rqb { z5.b }, p1/Z, [x26, #32]\n" + "fcvt z7.s, p1/m, z7.h\n" + "fcvt z3.s, p1/m, z3.h\n" + ".inst 0x450e98a9 // smmla z9.s, z5.b, z14.b\n" + ".inst 0x450298b6 // smmla z22.s, z5.b, z2.b\n" + "ld1rqb { z5.b }, p1/Z, [x26, #64]\n" + "mov z7.q, z7.q[0]\n" + "mov z3.q, z3.q[0]\n" + ".inst 0x451e98a9 // smmla z9.s, z5.b, z30.b\n" + ".inst 0x451598b6 // smmla z22.s, z5.b, z21.b\n" + "ld1rqb { z5.b }, p1/Z, [x26, #96]\n" + ".inst 0x450498a9 // smmla z9.s, z5.b, z4.b\n" + ".inst 0x451198b6 // smmla z22.s, z5.b, z17.b\n" + "uzp1 z5.d, z9.d, z22.d\n" + "scvtf z5.s, p1/m, z5.s\n" + "uzp2 z22.d, z9.d, z22.d\n" + "fmul z9.s, z23.s, z7.s[0]\n" + "scvtf z22.s, p1/m, z22.s\n" + "fmla z13.s, p1/M, z5.s, z9.s\n" + "ld1rqb { z9.b }, p1/Z, [x25]\n" + "fmul z5.s, z23.s, z7.s[1]\n" + "fmla z1.s, p1/M, z22.s, z5.s\n" + "mov z5.s, #0x0\n" + "mov z22.s, #0x0\n" + ".inst 0x451f9a45 // smmla z5.s, z18.b, z31.b\n" + ".inst 0x45069a56 // smmla z22.s, z18.b, z6.b\n" + "ld1rqb { z18.b }, p1/Z, [x26, #48]\n" + ".inst 0x450e9a45 // smmla z5.s, z18.b, z14.b\n" + ".inst 0x45029a56 // smmla z22.s, z18.b, z2.b\n" + "ld1rqb { z18.b }, p1/Z, [x26, #80]\n" + ".inst 0x451e9a45 // smmla z5.s, z18.b, z30.b\n" + ".inst 0x45159a56 // smmla z22.s, z18.b, z21.b\n" + "ld1rqb { z18.b }, p1/Z, [x26, #112]\n" + "add x26, x26, #0x88\n" + ".inst 0x45049a45 // smmla z5.s, z18.b, z4.b\n" + ".inst 0x45119a56 // smmla z22.s, z18.b, z17.b\n" + "uzp1 z18.d, z5.d, z22.d\n" + "scvtf z18.s, p1/m, z18.s\n" + "uzp2 z22.d, z5.d, z22.d\n" + "fmul z5.s, z23.s, z7.s[2]\n" + "fmul z7.s, z23.s, z7.s[3]\n" + "scvtf z22.s, p1/m, z22.s\n" + "fmla z20.s, p1/M, z18.s, z5.s\n" + "ld1rqb { z18.b }, p1/Z, [x25, #16]\n" + "ld1h { z5.s }, p0/Z, [x20]\n" + "fcvt z5.s, p1/m, z5.h\n" + "fmla z25.s, p1/M, z22.s, z7.s\n" + "mov z22.s, #0x0\n" + "mov z7.s, #0x0\n" + ".inst 0x451f9936 // smmla z22.s, z9.b, z31.b\n" + ".inst 0x45069927 // smmla z7.s, z9.b, z6.b\n" + "ld1rqb { z9.b }, p1/Z, [x25, #32]\n" + "mov z5.q, z5.q[0]\n" + ".inst 0x450e9936 // smmla z22.s, z9.b, z14.b\n" + ".inst 0x45029927 // smmla z7.s, z9.b, z2.b\n" + "ld1rqb { z9.b }, p1/Z, [x25, #64]\n" + ".inst 0x451e9936 // smmla z22.s, z9.b, z30.b\n" + ".inst 0x45159927 // smmla z7.s, z9.b, z21.b\n" + "ld1rqb { z9.b }, p1/Z, [x25, #96]\n" + ".inst 0x45049936 // smmla z22.s, z9.b, z4.b\n" + ".inst 0x45119927 // smmla z7.s, z9.b, z17.b\n" + "uzp1 z9.d, z22.d, z7.d\n" + "scvtf z9.s, p1/m, z9.s\n" + "uzp2 z22.d, z22.d, z7.d\n" + "fmul z7.s, z23.s, z3.s[0]\n" + "scvtf z22.s, p1/m, z22.s\n" + "fmla z11.s, p1/M, z9.s, z7.s\n" + "ld1rqb { z9.b }, p1/Z, [x24]\n" + "fmul z7.s, z23.s, z3.s[1]\n" + "fmla z16.s, p1/M, z22.s, z7.s\n" + "mov z22.s, #0x0\n" + "mov z7.s, #0x0\n" + ".inst 0x451f9a56 // smmla z22.s, z18.b, z31.b\n" + ".inst 0x45069a47 // smmla z7.s, z18.b, z6.b\n" + "ld1rqb { z18.b }, p1/Z, [x25, #48]\n" + ".inst 0x450e9a56 // smmla z22.s, z18.b, z14.b\n" + ".inst 0x45029a47 // smmla z7.s, z18.b, z2.b\n" + "ld1rqb { z18.b }, p1/Z, [x25, #80]\n" + ".inst 0x451e9a56 // smmla z22.s, z18.b, z30.b\n" + ".inst 0x45159a47 // smmla z7.s, z18.b, z21.b\n" + "ld1rqb { z18.b }, p1/Z, [x25, #112]\n" + "add x25, x25, #0x88\n" + ".inst 0x45049a56 // smmla z22.s, z18.b, z4.b\n" + ".inst 0x45119a47 // smmla z7.s, z18.b, z17.b\n" + "uzp1 z18.d, z22.d, z7.d\n" + "scvtf z18.s, p1/m, z18.s\n" + "uzp2 z7.d, z22.d, z7.d\n" + "fmul z22.s, z23.s, z3.s[2]\n" + "fmul z3.s, z23.s, z3.s[3]\n" + "scvtf z7.s, p1/m, z7.s\n" + "fmla z19.s, p1/M, z18.s, z22.s\n" + "ld1rqb { z18.b }, p1/Z, [x24, #16]\n" + "fmul z22.s, z23.s, z5.s[0]\n" + "fmla z26.s, p1/M, z7.s, z3.s\n" + "mov z3.s, #0x0\n" + "mov z7.s, #0x0\n" + ".inst 0x451f9923 // smmla z3.s, z9.b, z31.b\n" + ".inst 0x45069927 // smmla z7.s, z9.b, z6.b\n" + "ld1rqb { z9.b }, p1/Z, [x24, #32]\n" + ".inst 0x450e9923 // smmla z3.s, z9.b, z14.b\n" + ".inst 0x45029927 // smmla z7.s, z9.b, z2.b\n" + "mov z9.s, #0x0\n" + ".inst 0x451f9a49 // smmla z9.s, z18.b, z31.b\n" + "mov z31.s, #0x0\n" + ".inst 0x45069a5f // smmla z31.s, z18.b, z6.b\n" + "ld1rqb { z6.b }, p1/Z, [x24, #48]\n" + "ld1rqb { z18.b }, p1/Z, [x24, #64]\n" + ".inst 0x450e98c9 // smmla z9.s, z6.b, z14.b\n" + "fmul z14.s, z23.s, z5.s[1]\n" + ".inst 0x450298df // smmla z31.s, z6.b, z2.b\n" + "ld1rqb { z6.b }, p1/Z, [x24, #80]\n" + "fmul z2.s, z23.s, z5.s[2]\n" + "fmul z23.s, z23.s, z5.s[3]\n" + ".inst 0x451e9a43 // smmla z3.s, z18.b, z30.b\n" + ".inst 0x45159a47 // smmla z7.s, z18.b, z21.b\n" + "ld1rqb { z5.b }, p1/Z, [x24, #96]\n" + ".inst 0x451e98c9 // smmla z9.s, z6.b, z30.b\n" + ".inst 0x451598df // smmla z31.s, z6.b, z21.b\n" + "ld1rqb { z18.b }, p1/Z, [x24, #112]\n" + "add x24, x24, #0x88\n" + ".inst 0x450498a3 // smmla z3.s, z5.b, z4.b\n" + ".inst 0x451198a7 // smmla z7.s, z5.b, z17.b\n" + ".inst 0x45049a49 // smmla z9.s, z18.b, z4.b\n" + ".inst 0x45119a5f // smmla z31.s, z18.b, z17.b\n" + "uzp1 z18.d, z3.d, z7.d\n" + "uzp2 z5.d, z3.d, z7.d\n" + "scvtf z18.s, p1/m, z18.s\n" + "uzp1 z6.d, z9.d, z31.d\n" + "uzp2 z9.d, z9.d, z31.d\n" + "scvtf z5.s, p1/m, z5.s\n" + "fmla z8.s, p1/M, z18.s, z22.s\n" + "scvtf z6.s, p1/m, z6.s\n" + "scvtf z9.s, p1/m, z9.s\n" + "fmla z29.s, p1/M, z5.s, z14.s\n" + "fmla z27.s, p1/M, z6.s, z2.s\n" + "fmla z10.s, p1/M, z9.s, z23.s\n" + "bgt 3b\n" + "mov x20, %x[res_ptr]\n" + "subs x10, x10, #0x8\n" + "add %x[res_ptr], %x[res_ptr], #0x20\n" + "st1w { z24.s }, p1, [x20]\n" + "add x20, x20, %x[res_stride]\n" + "st1w { z15.s }, p1, [x20]\n" + "add x20, x20, %x[res_stride]\n" + "st1w { z12.s }, p1, [x20]\n" + "add x20, x20, %x[res_stride]\n" + "st1w { z0.s }, p1, [x20]\n" + "add x20, x20, %x[res_stride]\n" + "st1w { z13.s }, p1, [x20]\n" + "add x20, x20, %x[res_stride]\n" + "st1w { z1.s }, p1, [x20]\n" + "add x20, x20, %x[res_stride]\n" + "st1w { z20.s }, p1, [x20]\n" + "add x20, x20, %x[res_stride]\n" + "st1w { z25.s }, p1, [x20]\n" + "add x20, x20, %x[res_stride]\n" + "st1w { z11.s }, p1, [x20]\n" + "add x20, x20, %x[res_stride]\n" + "st1w { z16.s }, p1, [x20]\n" + "add x20, x20, %x[res_stride]\n" + "st1w { z19.s }, p1, [x20]\n" + "add x20, x20, %x[res_stride]\n" + "st1w { z26.s }, p1, [x20]\n" + "add x20, x20, %x[res_stride]\n" + "st1w { z8.s }, p1, [x20]\n" + "add x20, x20, %x[res_stride]\n" + "st1w { z29.s }, p1, [x20]\n" + "add x20, x20, %x[res_stride]\n" + "st1w { z27.s }, p1, [x20]\n" + "add x20, x20, %x[res_stride]\n" + "st1w { z10.s }, p1, [x20]\n" + "bne 2b\n" + "mov x20, #0x4\n" + "sub x13, x13, #0x10\n" + "cmp x13, #0x10\n" + "mov %x[res_ptr], x9\n" + "madd %x[a_ptr], x20, x12, %x[a_ptr]\n" + "bge 1b\n" + "4:" // Row loop skip + "cbz x13, 9f\n" + "5:" // Row tail: Row loop + "add x25, %x[b_ptr], #0x10\n" + "mov x24, %x[nc]\n" + "add x23, %x[res_ptr], %x[res_stride], LSL #2\n" + "6:" // Row tail: Column loop + "mov z24.b, #0x0\n" + "mov z15.b, #0x0\n" + "add x28, %x[a_ptr], #0x8\n" + "mov x22, %x[nb]\n" + "mov z12.b, #0x0\n" + "mov z0.b, #0x0\n" + "7:" // Row tail: Block loop + "ld1b { z3.b }, p1/Z, [x25]\n" + "ld1b { z6.b }, p1/Z, [x25, #1, MUL VL]\n" + "mov z2.s, #0x0\n" + "mov z25.s, #0x0\n" + "ld1rqb { z26.b }, p1/Z, [x28]\n" + "ld1rqb { z21.b }, p1/Z, [x28, #16]\n" + "mov z27.s, #0x0\n" + "mov z19.s, #0x0\n" + "ld1b { z29.b }, p1/Z, [x25, #2, MUL VL]\n" + "ld1b { z16.b }, p1/Z, [x25, #3, MUL VL]\n" + "sub x21, x25, #0x10\n" + "sub x20, x28, #0x8\n" + "lsl z20.b, z3.b, #0x4\n" + "lsl z4.b, z6.b, #0x4\n" + "ld1rqb { z10.b }, p1/Z, [x28, #32]\n" + "ld1rqb { z23.b }, p1/Z, [x28, #48]\n" + "and z3.b, z3.b, #0xf0\n" + "and z6.b, z6.b, #0xf0\n" + "ld1rqb { z11.b }, p1/Z, [x28, #64]\n" + "ld1rqb { z7.b }, p1/Z, [x28, #80]\n" + "lsl z8.b, z29.b, #0x4\n" + "lsl z14.b, z16.b, #0x4\n" + "ld1rqb { z18.b }, p1/Z, [x28, #96]\n" + "ld1rqb { z30.b }, p1/Z, [x28, #112]\n" + ".inst 0x45149b42 // smmla z2.s, z26.b, z20.b\n" + ".inst 0x45049b59 // smmla z25.s, z26.b, z4.b\n" + "and z29.b, z29.b, #0xf0\n" + "ld1h { z17.s }, p1/Z, [x21]\n" + ".inst 0x45149abb // smmla z27.s, z21.b, z20.b\n" + ".inst 0x45049ab3 // smmla z19.s, z21.b, z4.b\n" + "and z16.b, z16.b, #0xf0\n" + "ld1h { z4.s }, p0/Z, [x20]\n" + "subs x22, x22, #0x1\n" + "add x28, x28, #0x88\n" + "fcvt z17.s, p1/m, z17.h\n" + "add x25, x25, #0x90\n" + ".inst 0x45089942 // smmla z2.s, z10.b, z8.b\n" + ".inst 0x450e9959 // smmla z25.s, z10.b, z14.b\n" + "fcvt z4.s, p1/m, z4.h\n" + ".inst 0x45089afb // smmla z27.s, z23.b, z8.b\n" + ".inst 0x450e9af3 // smmla z19.s, z23.b, z14.b\n" + "fscale z17.s, p1/m, z17.s, z28.s\n" + "mov z4.q, z4.q[0]\n" + ".inst 0x45039962 // smmla z2.s, z11.b, z3.b\n" + ".inst 0x45069979 // smmla z25.s, z11.b, z6.b\n" + "fmul z23.s, z17.s, z4.s[0]\n" + "fmul z9.s, z17.s, z4.s[1]\n" + "fmul z21.s, z17.s, z4.s[2]\n" + "fmul z4.s, z17.s, z4.s[3]\n" + ".inst 0x450398fb // smmla z27.s, z7.b, z3.b\n" + ".inst 0x450698f3 // smmla z19.s, z7.b, z6.b\n" + ".inst 0x451d9a42 // smmla z2.s, z18.b, z29.b\n" + ".inst 0x45109a59 // smmla z25.s, z18.b, z16.b\n" + ".inst 0x451d9bdb // smmla z27.s, z30.b, z29.b\n" + ".inst 0x45109bd3 // smmla z19.s, z30.b, z16.b\n" + "uzp1 z31.d, z2.d, z25.d\n" + "uzp2 z13.d, z2.d, z25.d\n" + "scvtf z31.s, p1/m, z31.s\n" + "uzp1 z17.d, z27.d, z19.d\n" + "uzp2 z18.d, z27.d, z19.d\n" + "scvtf z13.s, p1/m, z13.s\n" + "fmla z24.s, p1/M, z31.s, z23.s\n" + "scvtf z17.s, p1/m, z17.s\n" + "scvtf z18.s, p1/m, z18.s\n" + "fmla z15.s, p1/M, z13.s, z9.s\n" + "fmla z12.s, p1/M, z17.s, z21.s\n" + "fmla z0.s, p1/M, z18.s, z4.s\n" + "bgt 7b\n" + "mov x20, %x[res_ptr]\n" + "cmp x13, #0x1\n" + "st1w { z24.s }, p1, [x20]\n" + "add x20, x20, %x[res_stride]\n" + "ble 8f\n" + "cmp x13, #0x2\n" + "st1w { z15.s }, p1, [x20]\n" + "add x20, x20, %x[res_stride]\n" + "ble 8f\n" + "cmp x13, #0x3\n" + "st1w { z12.s }, p1, [x20]\n" + "add x20, x20, %x[res_stride]\n" + "ble 8f\n" + "st1w { z0.s }, p1, [x20]\n" + "8:" // Row tail: Accumulator store skip + "subs x24, x24, #0x8\n" + "add %x[res_ptr], %x[res_ptr], #0x20\n" + "bne 6b\n" + "subs x13, x13, #0x4\n" + "add %x[a_ptr], %x[a_ptr], x12\n" + "mov %x[res_ptr], x23\n" + "bgt 5b\n" + "9:" // Row tail: Row loop skip + : [a_ptr] "+&r" (a_ptr), [res_ptr] "+&r" (res_ptr) + : [b_ptr] "r" (b_ptr), [nr] "r" (nr), [nb] "r" (nb), [res_stride] "r" (res_stride), [nc] "r" (nc) + : "cc", "memory", "p0", "p1", "x9", "x10", "x11", "x12", "x13", "x20", "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "z0", "z1", "z2", "z3", "z4", "z5", "z6", "z7", "z8", "z9", "z10", "z11", "z12", "z13", "z14", "z15", "z16", "z17", "z18", "z19", "z20", "z21", "z22", "z23", "z24", "z25", "z26", "z27", "z28", "z29", "z30", "z31" + ); + return; + } +#endif // #if defined(__ARM_FEATURE_SVE) && defined(__ARM_FEATURE_MATMUL_INT8) +#elif defined(__AVX2__) || defined(__AVX512F__) + { + const block_q4_0x8 * b_ptr_start = (const block_q4_0x8 *)vx; + const block_q8_0x4 * a_ptr_start = (const block_q8_0x4 *)vy; + int64_t b_nb = n / QK4_0; + int64_t y = 0; + // Mask to mask out nibbles from packed bytes + const __m256i m4b = _mm256_set1_epi8(0x0F); + const __m128i loadMask = _mm_blend_epi32(_mm_setzero_si128(), _mm_set1_epi32(0xFFFFFFFF), 3); + // Lookup table to convert signed nibbles to signed bytes + __m256i signextendlut = _mm256_castsi128_si256(_mm_set_epi8(-1, -2, -3, -4, -5, -6, -7, -8, 7, 6, 5, 4, 3, 2, 1, 0)); + signextendlut = _mm256_permute2f128_si256(signextendlut, signextendlut, 0); + // Permute mask used for easier vector processing at later stages + __m256i requiredOrder = _mm256_set_epi32(3, 2, 1, 0, 7, 6, 5, 4); + int64_t xstart = 0; + int anr = nr - nr%16; // Used to align nr with boundary of 16 + #ifdef __AVX512F__ + int anc = nc - nc%16; // Used to align nc with boundary of 16 + // Mask to mask out nibbles from packed bytes expanded to 512 bit length + const __m512i m4bexpanded = _mm512_set1_epi8(0x0F); + // Lookup table to convert signed nibbles to signed bytes expanded to 512 bit length + __m512i signextendlutexpanded = _mm512_inserti32x8(_mm512_castsi256_si512(signextendlut), signextendlut, 1); + + // Take group of four block_q8_0x4 structures at each pass of the loop and perform dot product operation + for (; y < anr / 4; y += 4) { + + const block_q8_0x4 * a_ptrs[4]; + + a_ptrs[0] = a_ptr_start + (y * nb); + for (int i = 0; i < 3; ++i) { + a_ptrs[i + 1] = a_ptrs[i] + nb; + } + + // Take group of two block_q4_0x8 structures at each pass of the loop and perform dot product operation + for (int64_t x = 0; x < anc / 8; x += 2) { + + const block_q4_0x8 * b_ptr_0 = b_ptr_start + ((x) * b_nb); + const block_q4_0x8 * b_ptr_1 = b_ptr_start + ((x + 1) * b_nb); + + // Master FP accumulators + __m512 acc_rows[16]; + for (int i = 0; i < 16; i++) { + acc_rows[i] = _mm512_setzero_ps(); + } + + for (int64_t b = 0; b < nb; b++) { + // Load the sixteen block_q4_0 quantized values interleaved with each other in chunks of eight - B0,B1 ....BE,BF + const __m256i rhs_raw_mat_0123_0 = _mm256_loadu_si256((const __m256i *)(b_ptr_0[b].qs)); + const __m256i rhs_raw_mat_4567_0 = _mm256_loadu_si256((const __m256i *)(b_ptr_0[b].qs + 32)); + const __m256i rhs_raw_mat_0123_1 = _mm256_loadu_si256((const __m256i *)(b_ptr_0[b].qs + 64)); + const __m256i rhs_raw_mat_4567_1 = _mm256_loadu_si256((const __m256i *)(b_ptr_0[b].qs + 96)); + + const __m256i rhs_raw_mat_89AB_0 = _mm256_loadu_si256((const __m256i *)(b_ptr_1[b].qs)); + const __m256i rhs_raw_mat_CDEF_0 = _mm256_loadu_si256((const __m256i *)(b_ptr_1[b].qs + 32)); + const __m256i rhs_raw_mat_89AB_1 = _mm256_loadu_si256((const __m256i *)(b_ptr_1[b].qs + 64)); + const __m256i rhs_raw_mat_CDEF_1 = _mm256_loadu_si256((const __m256i *)(b_ptr_1[b].qs + 96)); + + // Save the values in the following vectors in the formats B0B1B4B5B8B9BCBD, B2B3B6B7BABBBEBF for further processing and storing of values + const __m256i rhs_raw_mat_0145_0 = _mm256_blend_epi32(rhs_raw_mat_0123_0, _mm256_permutevar8x32_epi32(rhs_raw_mat_4567_0, requiredOrder), 240); + const __m256i rhs_raw_mat_2367_0 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_0123_0, requiredOrder), rhs_raw_mat_4567_0, 240); + const __m256i rhs_raw_mat_0145_1 = _mm256_blend_epi32(rhs_raw_mat_0123_1, _mm256_permutevar8x32_epi32(rhs_raw_mat_4567_1, requiredOrder), 240); + const __m256i rhs_raw_mat_2367_1 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_0123_1, requiredOrder), rhs_raw_mat_4567_1, 240); + + const __m256i rhs_raw_mat_89CD_0 = _mm256_blend_epi32(rhs_raw_mat_89AB_0, _mm256_permutevar8x32_epi32(rhs_raw_mat_CDEF_0, requiredOrder), 240); + const __m256i rhs_raw_mat_ABEF_0 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_89AB_0, requiredOrder), rhs_raw_mat_CDEF_0, 240); + const __m256i rhs_raw_mat_89CD_1 = _mm256_blend_epi32(rhs_raw_mat_89AB_1, _mm256_permutevar8x32_epi32(rhs_raw_mat_CDEF_1, requiredOrder), 240); + const __m256i rhs_raw_mat_ABEF_1 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_89AB_1, requiredOrder), rhs_raw_mat_CDEF_1, 240); + + const __m512i rhs_raw_mat_014589CD_0 = _mm512_inserti32x8(_mm512_castsi256_si512(rhs_raw_mat_0145_0), rhs_raw_mat_89CD_0, 1); + const __m512i rhs_raw_mat_2367ABEF_0 = _mm512_inserti32x8(_mm512_castsi256_si512(rhs_raw_mat_2367_0), rhs_raw_mat_ABEF_0, 1); + const __m512i rhs_raw_mat_014589CD_1 = _mm512_inserti32x8(_mm512_castsi256_si512(rhs_raw_mat_0145_1), rhs_raw_mat_89CD_1, 1); + const __m512i rhs_raw_mat_2367ABEF_1 = _mm512_inserti32x8(_mm512_castsi256_si512(rhs_raw_mat_2367_1), rhs_raw_mat_ABEF_1, 1); + + // 4-bit -> 8-bit - Sign is maintained + const __m512i rhs_mat_014589CD_0 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(rhs_raw_mat_014589CD_0, m4bexpanded)); //B0(0-7) B1(0-7) B4(0-7) B5(0-7) B8(0-7) B9(0-7) BC(0-7) BD(0-7) + const __m512i rhs_mat_2367ABEF_0 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(rhs_raw_mat_2367ABEF_0, m4bexpanded)); //B2(0-7) B3(0-7) B6(0-7) B7(0-7) BA(0-7) BB(0-7) BE(0-7) BF(0-7) + + const __m512i rhs_mat_014589CD_1 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(rhs_raw_mat_014589CD_1, m4bexpanded)); //B0(8-15) B1(8-15) B4(8-15) B5(8-15) B8(8-15) B9(8-15) BC(8-15) BD(8-15) + const __m512i rhs_mat_2367ABEF_1 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(rhs_raw_mat_2367ABEF_1, m4bexpanded)); //B2(8-15) B3(8-15) B6(8-15) B7(8-15) BA(8-15) BB(8-15) BE(8-15) BF(8-15) + + const __m512i rhs_mat_014589CD_2 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(_mm512_srli_epi16(rhs_raw_mat_014589CD_0, 4), m4bexpanded)); //B0(16-23) B1(16-23) B4(16-23) B5(16-23) B8(16-23) B9(16-23) BC(16-23) BD(16-23) + const __m512i rhs_mat_2367ABEF_2 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(_mm512_srli_epi16(rhs_raw_mat_2367ABEF_0, 4), m4bexpanded)); //B2(16-23) B3(16-23) B6(16-23) B7(16-23) BA(16-23) BB(16-23) BE(16-23) BF(16-23) + + const __m512i rhs_mat_014589CD_3 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(_mm512_srli_epi16(rhs_raw_mat_014589CD_1, 4), m4bexpanded)); //B0(24-31) B1(24-31) B4(24-31) B5(24-31) B8(24-31) B9(24-31) BC(24-31) BD(24-31) + const __m512i rhs_mat_2367ABEF_3 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(_mm512_srli_epi16(rhs_raw_mat_2367ABEF_1, 4), m4bexpanded)); //B2(24-31) B3(24-31) B6(24-31) B7(24-31) BA(24-31) BB(24-31) BE(24-31) BF(24-31) + + // Shuffle pattern one - right side input + const __m512i rhs_mat_014589CD_0_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_0, 136); //B0(0-3) B1(0-3) B0(0-3) B1(0-3) B4(0-3) B5(0-3) B4(0-3) B5(0-3) B8(0-3) B9(0-3) B8(0-3) B9(0-3) BC(0-3) BD(0-3) BC(0-3) BD(0-3) + const __m512i rhs_mat_2367ABEF_0_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_0, 136); //B2(0-3) B3(0-3) B2(0-3) B3(0-3) B6(0-3) B7(0-3) B6(0-3) B7(0-3) BA(0-3) BB(0-3) BA(0-3) BB(0-3) BE(0-3) BF(0-3) BE(0-3) BF(0-3) + + const __m512i rhs_mat_014589CD_1_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_1, 136); //B0(8-11) B1(8-11) B0(8-11) B1(8-11) B4(8-11) B5(8-11) B4(8-11) B5(8-11) B8(8-11) B9(8-11) B8(8-11) B9(8-11) BC(8-11) BD(8-11) BC(8-11) BD(8-11) + const __m512i rhs_mat_2367ABEF_1_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_1, 136); //B2(8-11) B3(8-11) B2(8-11) B3(8-11) B6(8-11) B7(8-11) B6(8-11) B7(8-11) BA(8-11) BB(8-11) BA(8-11) BB(8-11) BE(8-11) BF(8-11) BE(8-11) BF(8-11) + + const __m512i rhs_mat_014589CD_2_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_2, 136); //B0(16-19) B1(16-19) B0(16-19) B1(16-19) B4(16-19) B5(16-19) B4(16-19) B5(16-19) B8(16-19) B9(16-19) B8(16-19) B9(16-19) BC(16-19) BD(16-19) BC(16-19) BD(16-19) + const __m512i rhs_mat_2367ABEF_2_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_2, 136); //B2(16-19) B3(16-19) B2(16-19) B3(16-19) B6(16-19) B7(16-19) B6(16-19) B7(16-19) BA(16-19) BB(16-19) BA(16-19) BB(16-19) BE(16-19) BF(16-19) BE(16-19) BF(16-19) + + const __m512i rhs_mat_014589CD_3_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_3, 136); //B0(24-27) B1(24-27) B0(24-27) B1(24-27) B4(24-27) B5(24-27) B4(24-27) B5(24-27) B8(24-27) B9(24-27) B8(24-27) B9(24-27) BC(24-27) BD(24-27) BC(24-27) BD(24-27) + const __m512i rhs_mat_2367ABEF_3_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_3, 136); //B2(24-27) B3(24-27) B2(24-27) B3(24-27) B6(24-27) B7(24-27) B6(24-27) B7(24-27) BA(24-27) BB(24-27) BA(24-27) BB(24-27) BE(24-27) BF(24-27) BE(24-27) BF(24-27) + + // Shuffle pattern two - right side input + + const __m512i rhs_mat_014589CD_0_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_0, 221); //B0(4-7) B1(4-7) B0(4-7) B1(4-7) B4(4-7) B5(4-7) B4(4-7) B5(4-7) B8(4-7) B9(4-7) B8(4-7) B9(4-7) BC(4-7) BD(4-7) BC(4-7) BD(4-7) + const __m512i rhs_mat_2367ABEF_0_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_0, 221); //B2(4-7) B3(4-7) B2(4-7) B3(4-7) B6(4-7) B7(4-7) B6(4-7) B7(4-7) BA(4-7) BB(4-7) BA(4-7) BB(4-7) BE(4-7) BF(4-7) BE(4-7) BF(4-7) + + const __m512i rhs_mat_014589CD_1_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_1, 221); //B0(12-15) B1(12-15) B0(12-15) B1(12-15) B4(12-15) B5(12-15) B4(12-15) B5(12-15) B8(12-15) B9(12-15) B8(12-15) B9(12-15) BC(12-15) BD(12-15) BC(12-15) BD(12-15) + const __m512i rhs_mat_2367ABEF_1_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_1, 221); //B2(12-15) B3(12-15) B2(12-15) B3(12-15) B6(12-15) B7(12-15) B6(12-15) B7(12-15) BA(12-15) BB(12-15) BA(12-15) BB(12-15) BE(12-15) BF(12-15) BE(12-15) BF(12-15) + + const __m512i rhs_mat_014589CD_2_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_2, 221); //B0(20-23) B1(20-23) B0(20-23) B1(20-23) B4(20-23) B5(20-23) B4(20-23) B5(20-23) B8(20-23) B9(20-23) B8(20-23) B9(20-23) BC(20-23) BD(20-23) BC(20-23) BD(20-23) + const __m512i rhs_mat_2367ABEF_2_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_2, 221); //B2(20-23) B3(20-23) B2(20-23) B3(20-23) B6(20-23) B7(20-23) B6(20-23) B7(20-23) BA(20-23) BB(20-23) BA(20-23) BB(20-23) BE(20-23) BF(20-23) BE(20-23) BF(20-23) + + const __m512i rhs_mat_014589CD_3_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_3, 221); //B0(28-31) B1(28-31) B0(28-31) B1(28-31) B4(28-31) B5(28-31) B4(28-31) B5(28-31) B8(28-31) B9(28-31) B8(28-31) B9(28-31) BC(28-31) BD(28-31) BC(28-31) BD(28-31) + const __m512i rhs_mat_2367ABEF_3_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_3, 221); //B2(28-31) B3(28-31) B2(28-31) B3(28-31) B6(28-31) B7(28-31) B6(28-31) B7(28-31) BA(28-31) BB(28-31) BA(28-31) BB(28-31) BE(28-31) BF(28-31) BE(28-31) BF(28-31) + + // Scale values - Load the weight scale values of two block_q4_0x8 + const __m512 col_scale_f32 = GGML_F32Cx8x2_LOAD(b_ptr_0[b].d, b_ptr_1[b].d); + + // Process LHS in pairs of rows + for (int rp = 0; rp < 4; rp++) { + + // Load the four block_q4_0 quantized values interleaved with each other in chunks of eight - A0,A1,A2,A3 + // Loaded as set of 128 bit vectors and repeated and stored into a 256 bit vector before again repeating into 512 bit vector + __m256i lhs_mat_ymm_0123_0 = _mm256_loadu_si256((const __m256i *)((a_ptrs[rp][b].qs))); + __m256i lhs_mat_ymm_01_0 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_0, lhs_mat_ymm_0123_0, 0); + __m256i lhs_mat_ymm_23_0 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_0, lhs_mat_ymm_0123_0, 17); + __m256i lhs_mat_ymm_0123_1 = _mm256_loadu_si256((const __m256i *)((a_ptrs[rp][b].qs + 32))); + __m256i lhs_mat_ymm_01_1 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_1, lhs_mat_ymm_0123_1, 0); + __m256i lhs_mat_ymm_23_1 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_1, lhs_mat_ymm_0123_1, 17); + __m256i lhs_mat_ymm_0123_2 = _mm256_loadu_si256((const __m256i *)((a_ptrs[rp][b].qs + 64))); + __m256i lhs_mat_ymm_01_2 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_2, lhs_mat_ymm_0123_2, 0); + __m256i lhs_mat_ymm_23_2 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_2, lhs_mat_ymm_0123_2, 17); + __m256i lhs_mat_ymm_0123_3 = _mm256_loadu_si256((const __m256i *)((a_ptrs[rp][b].qs + 96))); + __m256i lhs_mat_ymm_01_3 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_3, lhs_mat_ymm_0123_3, 0); + __m256i lhs_mat_ymm_23_3 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_3, lhs_mat_ymm_0123_3, 17); + + __m512i lhs_mat_01_0 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_01_0), lhs_mat_ymm_01_0, 1); + __m512i lhs_mat_23_0 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_23_0), lhs_mat_ymm_23_0, 1); + __m512i lhs_mat_01_1 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_01_1), lhs_mat_ymm_01_1, 1); + __m512i lhs_mat_23_1 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_23_1), lhs_mat_ymm_23_1, 1); + __m512i lhs_mat_01_2 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_01_2), lhs_mat_ymm_01_2, 1); + __m512i lhs_mat_23_2 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_23_2), lhs_mat_ymm_23_2, 1); + __m512i lhs_mat_01_3 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_01_3), lhs_mat_ymm_01_3, 1); + __m512i lhs_mat_23_3 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_23_3), lhs_mat_ymm_23_3, 1); + + // Shuffle pattern one - left side input + + const __m512i lhs_mat_01_0_sp1 = _mm512_shuffle_epi32(lhs_mat_01_0, 160); //A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) + const __m512i lhs_mat_23_0_sp1 = _mm512_shuffle_epi32(lhs_mat_23_0, 160); //A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) + + const __m512i lhs_mat_01_1_sp1 = _mm512_shuffle_epi32(lhs_mat_01_1, 160); //A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) + const __m512i lhs_mat_23_1_sp1 = _mm512_shuffle_epi32(lhs_mat_23_1, 160); //A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) + + const __m512i lhs_mat_01_2_sp1 = _mm512_shuffle_epi32(lhs_mat_01_2, 160); //A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) + const __m512i lhs_mat_23_2_sp1 = _mm512_shuffle_epi32(lhs_mat_23_2, 160); //A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) + + const __m512i lhs_mat_01_3_sp1 = _mm512_shuffle_epi32(lhs_mat_01_3, 160); //A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) + const __m512i lhs_mat_23_3_sp1 = _mm512_shuffle_epi32(lhs_mat_23_3, 160); //A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) + + // Shuffle pattern two - left side input + + const __m512i lhs_mat_01_0_sp2 = _mm512_shuffle_epi32(lhs_mat_01_0, 245); //A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) + const __m512i lhs_mat_23_0_sp2 = _mm512_shuffle_epi32(lhs_mat_23_0, 245); //A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) + + const __m512i lhs_mat_01_1_sp2 = _mm512_shuffle_epi32(lhs_mat_01_1, 245); //A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) + const __m512i lhs_mat_23_1_sp2 = _mm512_shuffle_epi32(lhs_mat_23_1, 245); //A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) + + const __m512i lhs_mat_01_2_sp2 = _mm512_shuffle_epi32(lhs_mat_01_2, 245); //A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) + const __m512i lhs_mat_23_2_sp2 = _mm512_shuffle_epi32(lhs_mat_23_2, 245); //A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) + + const __m512i lhs_mat_01_3_sp2 = _mm512_shuffle_epi32(lhs_mat_01_3, 245); //A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) + const __m512i lhs_mat_23_3_sp2 = _mm512_shuffle_epi32(lhs_mat_23_3, 245); //A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) + + // The values arranged in shuffle patterns are operated with dot product operation within 32 bit lane i.e corresponding bytes and multiplied and added into 32 bit integers within 32 bit lane + // Resembles MMLAs into 2x2 matrices in ARM Version + __m512i iacc_mat_00_sp1 = + _mm512_add_epi32(_mm512_add_epi32(_mm512_add_epi32(mul_sum_i8_pairs_int32x16(lhs_mat_01_3_sp1, rhs_mat_014589CD_3_sp1), mul_sum_i8_pairs_int32x16(lhs_mat_01_2_sp1, rhs_mat_014589CD_2_sp1)), mul_sum_i8_pairs_int32x16(lhs_mat_01_1_sp1, rhs_mat_014589CD_1_sp1)), mul_sum_i8_pairs_int32x16(lhs_mat_01_0_sp1, rhs_mat_014589CD_0_sp1)); + __m512i iacc_mat_01_sp1 = + _mm512_add_epi32(_mm512_add_epi32(_mm512_add_epi32(mul_sum_i8_pairs_int32x16(lhs_mat_01_3_sp1, rhs_mat_2367ABEF_3_sp1), mul_sum_i8_pairs_int32x16(lhs_mat_01_2_sp1, rhs_mat_2367ABEF_2_sp1)), mul_sum_i8_pairs_int32x16(lhs_mat_01_1_sp1, rhs_mat_2367ABEF_1_sp1)), mul_sum_i8_pairs_int32x16(lhs_mat_01_0_sp1, rhs_mat_2367ABEF_0_sp1)); + __m512i iacc_mat_10_sp1 = + _mm512_add_epi32(_mm512_add_epi32(_mm512_add_epi32(mul_sum_i8_pairs_int32x16(lhs_mat_23_3_sp1, rhs_mat_014589CD_3_sp1), mul_sum_i8_pairs_int32x16(lhs_mat_23_2_sp1, rhs_mat_014589CD_2_sp1)), mul_sum_i8_pairs_int32x16(lhs_mat_23_1_sp1, rhs_mat_014589CD_1_sp1)), mul_sum_i8_pairs_int32x16(lhs_mat_23_0_sp1, rhs_mat_014589CD_0_sp1)); + __m512i iacc_mat_11_sp1 = + _mm512_add_epi32(_mm512_add_epi32(_mm512_add_epi32(mul_sum_i8_pairs_int32x16(lhs_mat_23_3_sp1, rhs_mat_2367ABEF_3_sp1), mul_sum_i8_pairs_int32x16(lhs_mat_23_2_sp1, rhs_mat_2367ABEF_2_sp1)), mul_sum_i8_pairs_int32x16(lhs_mat_23_1_sp1, rhs_mat_2367ABEF_1_sp1)), mul_sum_i8_pairs_int32x16(lhs_mat_23_0_sp1, rhs_mat_2367ABEF_0_sp1)); + __m512i iacc_mat_00_sp2 = + _mm512_add_epi32(_mm512_add_epi32(_mm512_add_epi32(mul_sum_i8_pairs_int32x16(lhs_mat_01_3_sp2, rhs_mat_014589CD_3_sp2), mul_sum_i8_pairs_int32x16(lhs_mat_01_2_sp2, rhs_mat_014589CD_2_sp2)), mul_sum_i8_pairs_int32x16(lhs_mat_01_1_sp2, rhs_mat_014589CD_1_sp2)), mul_sum_i8_pairs_int32x16(lhs_mat_01_0_sp2, rhs_mat_014589CD_0_sp2)); + __m512i iacc_mat_01_sp2 = + _mm512_add_epi32(_mm512_add_epi32(_mm512_add_epi32(mul_sum_i8_pairs_int32x16(lhs_mat_01_3_sp2, rhs_mat_2367ABEF_3_sp2), mul_sum_i8_pairs_int32x16(lhs_mat_01_2_sp2, rhs_mat_2367ABEF_2_sp2)), mul_sum_i8_pairs_int32x16(lhs_mat_01_1_sp2, rhs_mat_2367ABEF_1_sp2)), mul_sum_i8_pairs_int32x16(lhs_mat_01_0_sp2, rhs_mat_2367ABEF_0_sp2)); + __m512i iacc_mat_10_sp2 = + _mm512_add_epi32(_mm512_add_epi32(_mm512_add_epi32(mul_sum_i8_pairs_int32x16(lhs_mat_23_3_sp2, rhs_mat_014589CD_3_sp2), mul_sum_i8_pairs_int32x16(lhs_mat_23_2_sp2, rhs_mat_014589CD_2_sp2)), mul_sum_i8_pairs_int32x16(lhs_mat_23_1_sp2, rhs_mat_014589CD_1_sp2)), mul_sum_i8_pairs_int32x16(lhs_mat_23_0_sp2, rhs_mat_014589CD_0_sp2)); + __m512i iacc_mat_11_sp2 = + _mm512_add_epi32(_mm512_add_epi32(_mm512_add_epi32(mul_sum_i8_pairs_int32x16(lhs_mat_23_3_sp2, rhs_mat_2367ABEF_3_sp2), mul_sum_i8_pairs_int32x16(lhs_mat_23_2_sp2, rhs_mat_2367ABEF_2_sp2)), mul_sum_i8_pairs_int32x16(lhs_mat_23_1_sp2, rhs_mat_2367ABEF_1_sp2)), mul_sum_i8_pairs_int32x16(lhs_mat_23_0_sp2, rhs_mat_2367ABEF_0_sp2)); + + // Output of both shuffle patterns are added in order to sum dot product outputs of all 32 values in block + __m512i iacc_mat_00 = _mm512_add_epi32(iacc_mat_00_sp1, iacc_mat_00_sp2); + __m512i iacc_mat_01 = _mm512_add_epi32(iacc_mat_01_sp1, iacc_mat_01_sp2); + __m512i iacc_mat_10 = _mm512_add_epi32(iacc_mat_10_sp1, iacc_mat_10_sp2); + __m512i iacc_mat_11 = _mm512_add_epi32(iacc_mat_11_sp1, iacc_mat_11_sp2); + + + // Straighten out to make 4 row vectors + __m512i iacc_row_0 = _mm512_mask_blend_epi32(0xCCCC, iacc_mat_00, _mm512_shuffle_epi32(iacc_mat_01, 78)); + __m512i iacc_row_1 = _mm512_mask_blend_epi32(0xCCCC, _mm512_shuffle_epi32(iacc_mat_00, 78), iacc_mat_01); + __m512i iacc_row_2 = _mm512_mask_blend_epi32(0xCCCC, iacc_mat_10, _mm512_shuffle_epi32(iacc_mat_11, 78)); + __m512i iacc_row_3 = _mm512_mask_blend_epi32(0xCCCC, _mm512_shuffle_epi32(iacc_mat_10, 78), iacc_mat_11); + + // Load the scale(d) values for all the 4 Q8_0 blocks and repeat it across lanes + const __m128i row_scale_f16 = _mm_shuffle_epi32(_mm_maskload_epi32((int const*)(a_ptrs[rp][b].d), loadMask), 68); + const __m512 row_scale_f32 = GGML_F32Cx16_REPEAT_LOAD(row_scale_f16); + + // Multiply with appropiate scales and accumulate + acc_rows[rp * 4] = _mm512_fmadd_ps(_mm512_cvtepi32_ps(iacc_row_0), _mm512_mul_ps(col_scale_f32, _mm512_shuffle_ps(row_scale_f32, row_scale_f32, 0)), acc_rows[rp * 4]); + acc_rows[rp * 4 + 1] = _mm512_fmadd_ps(_mm512_cvtepi32_ps(iacc_row_1), _mm512_mul_ps(col_scale_f32, _mm512_shuffle_ps(row_scale_f32, row_scale_f32, 85)), acc_rows[rp * 4 + 1]); + acc_rows[rp * 4 + 2] = _mm512_fmadd_ps(_mm512_cvtepi32_ps(iacc_row_2), _mm512_mul_ps(col_scale_f32, _mm512_shuffle_ps(row_scale_f32, row_scale_f32, 170)), acc_rows[rp * 4 + 2]); + acc_rows[rp * 4 + 3] = _mm512_fmadd_ps(_mm512_cvtepi32_ps(iacc_row_3), _mm512_mul_ps(col_scale_f32, _mm512_shuffle_ps(row_scale_f32, row_scale_f32, 255)), acc_rows[rp * 4 + 3]); + } + } + + // Store the accumulated values + for (int i = 0; i < 16; i++) { + _mm512_storeu_ps((float *)(s + ((y * 4 + i) * bs + x * 8)), acc_rows[i]); + } + } + } + // Take a block_q8_0x4 structures at each pass of the loop and perform dot product operation + for (; y < nr / 4; y ++) { + + const block_q8_0x4 * a_ptr = a_ptr_start + (y * nb); + + // Take group of two block_q4_0x8 structures at each pass of the loop and perform dot product operation + for (int64_t x = 0; x < anc / 8; x += 2) { + + const block_q4_0x8 * b_ptr_0 = b_ptr_start + ((x) * b_nb); + const block_q4_0x8 * b_ptr_1 = b_ptr_start + ((x + 1) * b_nb); + + // Master FP accumulators + __m512 acc_rows[4]; + for (int i = 0; i < 4; i++) { + acc_rows[i] = _mm512_setzero_ps(); + } + + for (int64_t b = 0; b < nb; b++) { + // Load the sixteen block_q4_0 quantized values interleaved with each other in chunks of eight - B0,B1 ....BE,BF + const __m256i rhs_raw_mat_0123_0 = _mm256_loadu_si256((const __m256i *)(b_ptr_0[b].qs)); + const __m256i rhs_raw_mat_4567_0 = _mm256_loadu_si256((const __m256i *)(b_ptr_0[b].qs + 32)); + const __m256i rhs_raw_mat_0123_1 = _mm256_loadu_si256((const __m256i *)(b_ptr_0[b].qs + 64)); + const __m256i rhs_raw_mat_4567_1 = _mm256_loadu_si256((const __m256i *)(b_ptr_0[b].qs + 96)); + + const __m256i rhs_raw_mat_89AB_0 = _mm256_loadu_si256((const __m256i *)(b_ptr_1[b].qs)); + const __m256i rhs_raw_mat_CDEF_0 = _mm256_loadu_si256((const __m256i *)(b_ptr_1[b].qs + 32)); + const __m256i rhs_raw_mat_89AB_1 = _mm256_loadu_si256((const __m256i *)(b_ptr_1[b].qs + 64)); + const __m256i rhs_raw_mat_CDEF_1 = _mm256_loadu_si256((const __m256i *)(b_ptr_1[b].qs + 96)); + + // Save the values in the following vectors in the formats B0B1B4B5, B2B3B6B7 for further processing and storing of valuess + const __m256i rhs_raw_mat_0145_0 = _mm256_blend_epi32(rhs_raw_mat_0123_0, _mm256_permutevar8x32_epi32(rhs_raw_mat_4567_0, requiredOrder), 240); + const __m256i rhs_raw_mat_2367_0 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_0123_0, requiredOrder), rhs_raw_mat_4567_0, 240); + const __m256i rhs_raw_mat_0145_1 = _mm256_blend_epi32(rhs_raw_mat_0123_1, _mm256_permutevar8x32_epi32(rhs_raw_mat_4567_1, requiredOrder), 240); + const __m256i rhs_raw_mat_2367_1 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_0123_1, requiredOrder), rhs_raw_mat_4567_1, 240); + + const __m256i rhs_raw_mat_89CD_0 = _mm256_blend_epi32(rhs_raw_mat_89AB_0, _mm256_permutevar8x32_epi32(rhs_raw_mat_CDEF_0, requiredOrder), 240); + const __m256i rhs_raw_mat_ABEF_0 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_89AB_0, requiredOrder), rhs_raw_mat_CDEF_0, 240); + const __m256i rhs_raw_mat_89CD_1 = _mm256_blend_epi32(rhs_raw_mat_89AB_1, _mm256_permutevar8x32_epi32(rhs_raw_mat_CDEF_1, requiredOrder), 240); + const __m256i rhs_raw_mat_ABEF_1 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_89AB_1, requiredOrder), rhs_raw_mat_CDEF_1, 240); + + const __m512i rhs_raw_mat_014589CD_0 = _mm512_inserti32x8(_mm512_castsi256_si512(rhs_raw_mat_0145_0), rhs_raw_mat_89CD_0, 1); + const __m512i rhs_raw_mat_2367ABEF_0 = _mm512_inserti32x8(_mm512_castsi256_si512(rhs_raw_mat_2367_0), rhs_raw_mat_ABEF_0, 1); + const __m512i rhs_raw_mat_014589CD_1 = _mm512_inserti32x8(_mm512_castsi256_si512(rhs_raw_mat_0145_1), rhs_raw_mat_89CD_1, 1); + const __m512i rhs_raw_mat_2367ABEF_1 = _mm512_inserti32x8(_mm512_castsi256_si512(rhs_raw_mat_2367_1), rhs_raw_mat_ABEF_1, 1); + + // 4-bit -> 8-bit - Sign is maintained + const __m512i rhs_mat_014589CD_0 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(rhs_raw_mat_014589CD_0, m4bexpanded)); //B0(0-7) B1(0-7) B4(0-7) B5(0-7) B8(0-7) B9(0-7) BC(0-7) BD(0-7) + const __m512i rhs_mat_2367ABEF_0 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(rhs_raw_mat_2367ABEF_0, m4bexpanded)); //B2(0-7) B3(0-7) B6(0-7) B7(0-7) BA(0-7) BB(0-7) BE(0-7) BF(0-7) + + const __m512i rhs_mat_014589CD_1 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(rhs_raw_mat_014589CD_1, m4bexpanded)); //B0(8-15) B1(8-15) B4(8-15) B5(8-15) B8(8-15) B9(8-15) BC(8-15) BD(8-15) + const __m512i rhs_mat_2367ABEF_1 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(rhs_raw_mat_2367ABEF_1, m4bexpanded)); //B2(8-15) B3(8-15) B6(8-15) B7(8-15) BA(8-15) BB(8-15) BE(8-15) BF(8-15) + + const __m512i rhs_mat_014589CD_2 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(_mm512_srli_epi16(rhs_raw_mat_014589CD_0, 4), m4bexpanded)); //B0(16-23) B1(16-23) B4(16-23) B5(16-23) B8(16-23) B9(16-23) BC(16-23) BD(16-23) + const __m512i rhs_mat_2367ABEF_2 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(_mm512_srli_epi16(rhs_raw_mat_2367ABEF_0, 4), m4bexpanded)); //B2(16-23) B3(16-23) B6(16-23) B7(16-23) BA(16-23) BB(16-23) BE(16-23) BF(16-23) + + const __m512i rhs_mat_014589CD_3 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(_mm512_srli_epi16(rhs_raw_mat_014589CD_1, 4), m4bexpanded)); //B0(24-31) B1(24-31) B4(24-31) B5(24-31) B8(24-31) B9(24-31) BC(24-31) BD(24-31) + const __m512i rhs_mat_2367ABEF_3 = _mm512_shuffle_epi8(signextendlutexpanded, _mm512_and_si512(_mm512_srli_epi16(rhs_raw_mat_2367ABEF_1, 4), m4bexpanded)); //B2(24-31) B3(24-31) B6(24-31) B7(24-31) BA(24-31) BB(24-31) BE(24-31) BF(24-31) + + // Shuffle pattern one - right side input + const __m512i rhs_mat_014589CD_0_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_0, 136); //B0(0-3) B1(0-3) B0(0-3) B1(0-3) B4(0-3) B5(0-3) B4(0-3) B5(0-3) B8(0-3) B9(0-3) B8(0-3) B9(0-3) BC(0-3) BD(0-3) BC(0-3) BD(0-3) + const __m512i rhs_mat_2367ABEF_0_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_0, 136); //B2(0-3) B3(0-3) B2(0-3) B3(0-3) B6(0-3) B7(0-3) B6(0-3) B7(0-3) BA(0-3) BB(0-3) BA(0-3) BB(0-3) BE(0-3) BF(0-3) BE(0-3) BF(0-3) + + const __m512i rhs_mat_014589CD_1_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_1, 136); //B0(8-11) B1(8-11) B0(8-11) B1(8-11) B4(8-11) B5(8-11) B4(8-11) B5(8-11) B8(8-11) B9(8-11) B8(8-11) B9(8-11) BC(8-11) BD(8-11) BC(8-11) BD(8-11) + const __m512i rhs_mat_2367ABEF_1_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_1, 136); //B2(8-11) B3(8-11) B2(8-11) B3(8-11) B6(8-11) B7(8-11) B6(8-11) B7(8-11) BA(8-11) BB(8-11) BA(8-11) BB(8-11) BE(8-11) BF(8-11) BE(8-11) BF(8-11) + + const __m512i rhs_mat_014589CD_2_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_2, 136); //B0(16-19) B1(16-19) B0(16-19) B1(16-19) B4(16-19) B5(16-19) B4(16-19) B5(16-19) B8(16-19) B9(16-19) B8(16-19) B9(16-19) BC(16-19) BD(16-19) BC(16-19) BD(16-19) + const __m512i rhs_mat_2367ABEF_2_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_2, 136); //B2(16-19) B3(16-19) B2(16-19) B3(16-19) B6(16-19) B7(16-19) B6(16-19) B7(16-19) BA(16-19) BB(16-19) BA(16-19) BB(16-19) BE(16-19) BF(16-19) BE(16-19) BF(16-19) + + const __m512i rhs_mat_014589CD_3_sp1 = _mm512_shuffle_epi32(rhs_mat_014589CD_3, 136); //B0(24-27) B1(24-27) B0(24-27) B1(24-27) B4(24-27) B5(24-27) B4(24-27) B5(24-27) B8(24-27) B9(24-27) B8(24-27) B9(24-27) BC(24-27) BD(24-27) BC(24-27) BD(24-27) + const __m512i rhs_mat_2367ABEF_3_sp1 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_3, 136); //B2(24-27) B3(24-27) B2(24-27) B3(24-27) B6(24-27) B7(24-27) B6(24-27) B7(24-27) BA(24-27) BB(24-27) BA(24-27) BB(24-27) BE(24-27) BF(24-27) BE(24-27) BF(24-27) + + // Shuffle pattern two - right side input + + const __m512i rhs_mat_014589CD_0_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_0, 221); //B0(4-7) B1(4-7) B0(4-7) B1(4-7) B4(4-7) B5(4-7) B4(4-7) B5(4-7) B8(4-7) B9(4-7) B8(4-7) B9(4-7) BC(4-7) BD(4-7) BC(4-7) BD(4-7) + const __m512i rhs_mat_2367ABEF_0_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_0, 221); //B2(4-7) B3(4-7) B2(4-7) B3(4-7) B6(4-7) B7(4-7) B6(4-7) B7(4-7) BA(4-7) BB(4-7) BA(4-7) BB(4-7) BE(4-7) BF(4-7) BE(4-7) BF(4-7) + + const __m512i rhs_mat_014589CD_1_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_1, 221); //B0(12-15) B1(12-15) B0(12-15) B1(12-15) B4(12-15) B5(12-15) B4(12-15) B5(12-15) B8(12-15) B9(12-15) B8(12-15) B9(12-15) BC(12-15) BD(12-15) BC(12-15) BD(12-15) + const __m512i rhs_mat_2367ABEF_1_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_1, 221); //B2(12-15) B3(12-15) B2(12-15) B3(12-15) B6(12-15) B7(12-15) B6(12-15) B7(12-15) BA(12-15) BB(12-15) BA(12-15) BB(12-15) BE(12-15) BF(12-15) BE(12-15) BF(12-15) + + const __m512i rhs_mat_014589CD_2_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_2, 221); //B0(20-23) B1(20-23) B0(20-23) B1(20-23) B4(20-23) B5(20-23) B4(20-23) B5(20-23) B8(20-23) B9(20-23) B8(20-23) B9(20-23) BC(20-23) BD(20-23) BC(20-23) BD(20-23) + const __m512i rhs_mat_2367ABEF_2_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_2, 221); //B2(20-23) B3(20-23) B2(20-23) B3(20-23) B6(20-23) B7(20-23) B6(20-23) B7(20-23) BA(20-23) BB(20-23) BA(20-23) BB(20-23) BE(20-23) BF(20-23) BE(20-23) BF(20-23) + + const __m512i rhs_mat_014589CD_3_sp2 = _mm512_shuffle_epi32(rhs_mat_014589CD_3, 221); //B0(28-31) B1(28-31) B0(28-31) B1(28-31) B4(28-31) B5(28-31) B4(28-31) B5(28-31) B8(28-31) B9(28-31) B8(28-31) B9(28-31) BC(28-31) BD(28-31) BC(28-31) BD(28-31) + const __m512i rhs_mat_2367ABEF_3_sp2 = _mm512_shuffle_epi32(rhs_mat_2367ABEF_3, 221); //B2(28-31) B3(28-31) B2(28-31) B3(28-31) B6(28-31) B7(28-31) B6(28-31) B7(28-31) BA(28-31) BB(28-31) BA(28-31) BB(28-31) BE(28-31) BF(28-31) BE(28-31) BF(28-31) + + + // Scale values - Load the weight scale values of two block_q4_0x8 + const __m512 col_scale_f32 = GGML_F32Cx8x2_LOAD(b_ptr_0[b].d, b_ptr_1[b].d); + + // Load the four block_q4_0 quantized values interleaved with each other in chunks of eight - A0,A1,A2,A3 + // Loaded as set of 128 bit vectors and repeated and stored into a 256 bit vector before again repeating into 512 bit vector + __m256i lhs_mat_ymm_0123_0 = _mm256_loadu_si256((const __m256i *)((a_ptr[b].qs))); + __m256i lhs_mat_ymm_01_0 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_0, lhs_mat_ymm_0123_0, 0); + __m256i lhs_mat_ymm_23_0 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_0, lhs_mat_ymm_0123_0, 17); + __m256i lhs_mat_ymm_0123_1 = _mm256_loadu_si256((const __m256i *)((a_ptr[b].qs + 32))); + __m256i lhs_mat_ymm_01_1 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_1, lhs_mat_ymm_0123_1, 0); + __m256i lhs_mat_ymm_23_1 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_1, lhs_mat_ymm_0123_1, 17); + __m256i lhs_mat_ymm_0123_2 = _mm256_loadu_si256((const __m256i *)((a_ptr[b].qs + 64))); + __m256i lhs_mat_ymm_01_2 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_2, lhs_mat_ymm_0123_2, 0); + __m256i lhs_mat_ymm_23_2 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_2, lhs_mat_ymm_0123_2, 17); + __m256i lhs_mat_ymm_0123_3 = _mm256_loadu_si256((const __m256i *)((a_ptr[b].qs + 96))); + __m256i lhs_mat_ymm_01_3 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_3, lhs_mat_ymm_0123_3, 0); + __m256i lhs_mat_ymm_23_3 = _mm256_permute2f128_si256(lhs_mat_ymm_0123_3, lhs_mat_ymm_0123_3, 17); + + __m512i lhs_mat_01_0 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_01_0), lhs_mat_ymm_01_0, 1); + __m512i lhs_mat_23_0 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_23_0), lhs_mat_ymm_23_0, 1); + __m512i lhs_mat_01_1 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_01_1), lhs_mat_ymm_01_1, 1); + __m512i lhs_mat_23_1 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_23_1), lhs_mat_ymm_23_1, 1); + __m512i lhs_mat_01_2 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_01_2), lhs_mat_ymm_01_2, 1); + __m512i lhs_mat_23_2 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_23_2), lhs_mat_ymm_23_2, 1); + __m512i lhs_mat_01_3 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_01_3), lhs_mat_ymm_01_3, 1); + __m512i lhs_mat_23_3 = _mm512_inserti32x8(_mm512_castsi256_si512(lhs_mat_ymm_23_3), lhs_mat_ymm_23_3, 1); + + // Shuffle pattern one - left side input + + const __m512i lhs_mat_01_0_sp1 = _mm512_shuffle_epi32(lhs_mat_01_0, 160); //A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) + const __m512i lhs_mat_23_0_sp1 = _mm512_shuffle_epi32(lhs_mat_23_0, 160); //A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) + + const __m512i lhs_mat_01_1_sp1 = _mm512_shuffle_epi32(lhs_mat_01_1, 160); //A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) + const __m512i lhs_mat_23_1_sp1 = _mm512_shuffle_epi32(lhs_mat_23_1, 160); //A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) + + const __m512i lhs_mat_01_2_sp1 = _mm512_shuffle_epi32(lhs_mat_01_2, 160); //A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) + const __m512i lhs_mat_23_2_sp1 = _mm512_shuffle_epi32(lhs_mat_23_2, 160); //A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) + + const __m512i lhs_mat_01_3_sp1 = _mm512_shuffle_epi32(lhs_mat_01_3, 160); //A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) + const __m512i lhs_mat_23_3_sp1 = _mm512_shuffle_epi32(lhs_mat_23_3, 160); //A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) + + // Shuffle pattern two - left side input + + const __m512i lhs_mat_01_0_sp2 = _mm512_shuffle_epi32(lhs_mat_01_0, 245); //A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) + const __m512i lhs_mat_23_0_sp2 = _mm512_shuffle_epi32(lhs_mat_23_0, 245); //A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) + + const __m512i lhs_mat_01_1_sp2 = _mm512_shuffle_epi32(lhs_mat_01_1, 245); //A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) + const __m512i lhs_mat_23_1_sp2 = _mm512_shuffle_epi32(lhs_mat_23_1, 245); //A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) + + const __m512i lhs_mat_01_2_sp2 = _mm512_shuffle_epi32(lhs_mat_01_2, 245); //A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) + const __m512i lhs_mat_23_2_sp2 = _mm512_shuffle_epi32(lhs_mat_23_2, 245); //A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) + + const __m512i lhs_mat_01_3_sp2 = _mm512_shuffle_epi32(lhs_mat_01_3, 245); //A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) + const __m512i lhs_mat_23_3_sp2 = _mm512_shuffle_epi32(lhs_mat_23_3, 245); //A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) + + // The values arranged in shuffle patterns are operated with dot product operation within 32 bit lane i.e corresponding bytes and multiplied and added into 32 bit integers within 32 bit lane + // Resembles MMLAs into 2x2 matrices in ARM Version + __m512i iacc_mat_00_sp1 = + _mm512_add_epi32(_mm512_add_epi32(_mm512_add_epi32(mul_sum_i8_pairs_int32x16(lhs_mat_01_3_sp1, rhs_mat_014589CD_3_sp1), mul_sum_i8_pairs_int32x16(lhs_mat_01_2_sp1, rhs_mat_014589CD_2_sp1)), mul_sum_i8_pairs_int32x16(lhs_mat_01_1_sp1, rhs_mat_014589CD_1_sp1)), mul_sum_i8_pairs_int32x16(lhs_mat_01_0_sp1, rhs_mat_014589CD_0_sp1)); + __m512i iacc_mat_01_sp1 = + _mm512_add_epi32(_mm512_add_epi32(_mm512_add_epi32(mul_sum_i8_pairs_int32x16(lhs_mat_01_3_sp1, rhs_mat_2367ABEF_3_sp1), mul_sum_i8_pairs_int32x16(lhs_mat_01_2_sp1, rhs_mat_2367ABEF_2_sp1)), mul_sum_i8_pairs_int32x16(lhs_mat_01_1_sp1, rhs_mat_2367ABEF_1_sp1)), mul_sum_i8_pairs_int32x16(lhs_mat_01_0_sp1, rhs_mat_2367ABEF_0_sp1)); + __m512i iacc_mat_10_sp1 = + _mm512_add_epi32(_mm512_add_epi32(_mm512_add_epi32(mul_sum_i8_pairs_int32x16(lhs_mat_23_3_sp1, rhs_mat_014589CD_3_sp1), mul_sum_i8_pairs_int32x16(lhs_mat_23_2_sp1, rhs_mat_014589CD_2_sp1)), mul_sum_i8_pairs_int32x16(lhs_mat_23_1_sp1, rhs_mat_014589CD_1_sp1)), mul_sum_i8_pairs_int32x16(lhs_mat_23_0_sp1, rhs_mat_014589CD_0_sp1)); + __m512i iacc_mat_11_sp1 = + _mm512_add_epi32(_mm512_add_epi32(_mm512_add_epi32(mul_sum_i8_pairs_int32x16(lhs_mat_23_3_sp1, rhs_mat_2367ABEF_3_sp1), mul_sum_i8_pairs_int32x16(lhs_mat_23_2_sp1, rhs_mat_2367ABEF_2_sp1)), mul_sum_i8_pairs_int32x16(lhs_mat_23_1_sp1, rhs_mat_2367ABEF_1_sp1)), mul_sum_i8_pairs_int32x16(lhs_mat_23_0_sp1, rhs_mat_2367ABEF_0_sp1)); + __m512i iacc_mat_00_sp2 = + _mm512_add_epi32(_mm512_add_epi32(_mm512_add_epi32(mul_sum_i8_pairs_int32x16(lhs_mat_01_3_sp2, rhs_mat_014589CD_3_sp2), mul_sum_i8_pairs_int32x16(lhs_mat_01_2_sp2, rhs_mat_014589CD_2_sp2)), mul_sum_i8_pairs_int32x16(lhs_mat_01_1_sp2, rhs_mat_014589CD_1_sp2)), mul_sum_i8_pairs_int32x16(lhs_mat_01_0_sp2, rhs_mat_014589CD_0_sp2)); + __m512i iacc_mat_01_sp2 = + _mm512_add_epi32(_mm512_add_epi32(_mm512_add_epi32(mul_sum_i8_pairs_int32x16(lhs_mat_01_3_sp2, rhs_mat_2367ABEF_3_sp2), mul_sum_i8_pairs_int32x16(lhs_mat_01_2_sp2, rhs_mat_2367ABEF_2_sp2)), mul_sum_i8_pairs_int32x16(lhs_mat_01_1_sp2, rhs_mat_2367ABEF_1_sp2)), mul_sum_i8_pairs_int32x16(lhs_mat_01_0_sp2, rhs_mat_2367ABEF_0_sp2)); + __m512i iacc_mat_10_sp2 = + _mm512_add_epi32(_mm512_add_epi32(_mm512_add_epi32(mul_sum_i8_pairs_int32x16(lhs_mat_23_3_sp2, rhs_mat_014589CD_3_sp2), mul_sum_i8_pairs_int32x16(lhs_mat_23_2_sp2, rhs_mat_014589CD_2_sp2)), mul_sum_i8_pairs_int32x16(lhs_mat_23_1_sp2, rhs_mat_014589CD_1_sp2)), mul_sum_i8_pairs_int32x16(lhs_mat_23_0_sp2, rhs_mat_014589CD_0_sp2)); + __m512i iacc_mat_11_sp2 = + _mm512_add_epi32(_mm512_add_epi32(_mm512_add_epi32(mul_sum_i8_pairs_int32x16(lhs_mat_23_3_sp2, rhs_mat_2367ABEF_3_sp2), mul_sum_i8_pairs_int32x16(lhs_mat_23_2_sp2, rhs_mat_2367ABEF_2_sp2)), mul_sum_i8_pairs_int32x16(lhs_mat_23_1_sp2, rhs_mat_2367ABEF_1_sp2)), mul_sum_i8_pairs_int32x16(lhs_mat_23_0_sp2, rhs_mat_2367ABEF_0_sp2)); + + // Output of both shuffle patterns are added in order to sum dot product outputs of all 32 values in block + __m512i iacc_mat_00 = _mm512_add_epi32(iacc_mat_00_sp1, iacc_mat_00_sp2); + __m512i iacc_mat_01 = _mm512_add_epi32(iacc_mat_01_sp1, iacc_mat_01_sp2); + __m512i iacc_mat_10 = _mm512_add_epi32(iacc_mat_10_sp1, iacc_mat_10_sp2); + __m512i iacc_mat_11 = _mm512_add_epi32(iacc_mat_11_sp1, iacc_mat_11_sp2); + + + // Straighten out to make 4 row vectors + __m512i iacc_row_0 = _mm512_mask_blend_epi32(0xCCCC, iacc_mat_00, _mm512_shuffle_epi32(iacc_mat_01, 78)); + __m512i iacc_row_1 = _mm512_mask_blend_epi32(0xCCCC, _mm512_shuffle_epi32(iacc_mat_00, 78), iacc_mat_01); + __m512i iacc_row_2 = _mm512_mask_blend_epi32(0xCCCC, iacc_mat_10, _mm512_shuffle_epi32(iacc_mat_11, 78)); + __m512i iacc_row_3 = _mm512_mask_blend_epi32(0xCCCC, _mm512_shuffle_epi32(iacc_mat_10, 78), iacc_mat_11); + + // Load the scale(d) values for all the 4 Q8_0 blocks and repeat it across lanes + const __m128i row_scale_f16 = _mm_shuffle_epi32(_mm_maskload_epi32((int const*)(a_ptr[b].d), loadMask), 68); + const __m512 row_scale_f32 = GGML_F32Cx16_REPEAT_LOAD(row_scale_f16); + + // Multiply with appropiate scales and accumulate + acc_rows[0] = _mm512_fmadd_ps(_mm512_cvtepi32_ps(iacc_row_0), _mm512_mul_ps(col_scale_f32, _mm512_shuffle_ps(row_scale_f32, row_scale_f32, 0)), acc_rows[0]); + acc_rows[1] = _mm512_fmadd_ps(_mm512_cvtepi32_ps(iacc_row_1), _mm512_mul_ps(col_scale_f32, _mm512_shuffle_ps(row_scale_f32, row_scale_f32, 85)), acc_rows[1]); + acc_rows[2] = _mm512_fmadd_ps(_mm512_cvtepi32_ps(iacc_row_2), _mm512_mul_ps(col_scale_f32, _mm512_shuffle_ps(row_scale_f32, row_scale_f32, 170)), acc_rows[2]); + acc_rows[3] = _mm512_fmadd_ps(_mm512_cvtepi32_ps(iacc_row_3), _mm512_mul_ps(col_scale_f32, _mm512_shuffle_ps(row_scale_f32, row_scale_f32, 255)), acc_rows[3]); + } + + // Store the accumulated values + for (int i = 0; i < 4; i++) { + _mm512_storeu_ps((float *)(s + ((y * 4 + i) * bs + x * 8)), acc_rows[i]); + } + } + } + if (anc != nc) { + xstart = anc/8; + y = 0; + } + #endif // __AVX512F__ + + // Take group of four block_q8_0x4 structures at each pass of the loop and perform dot product operation + + for (; y < anr / 4; y += 4) { + const block_q8_0x4 * a_ptrs[4]; + + a_ptrs[0] = a_ptr_start + (y * nb); + for (int i = 0; i < 3; ++i) { + a_ptrs[i + 1] = a_ptrs[i] + nb; + } + + // Take group of eight block_q4_0x8 structures at each pass of the loop and perform dot product operation + for (int64_t x = xstart; x < nc / 8; x++) { + + const block_q4_0x8 * b_ptr = b_ptr_start + (x * b_nb); + + // Master FP accumulators + __m256 acc_rows[16]; + for (int i = 0; i < 16; i++) { + acc_rows[i] = _mm256_setzero_ps(); + } + + for (int64_t b = 0; b < nb; b++) { + // Load the eight block_q4_0 quantized values interleaved with each other in chunks of eight - B0,B1 ....B6,B7 + const __m256i rhs_raw_mat_0123_0 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs)); + const __m256i rhs_raw_mat_4567_0 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs + 32)); + const __m256i rhs_raw_mat_0123_1 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs + 64)); + const __m256i rhs_raw_mat_4567_1 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs + 96)); + + // Save the values in the following vectors in the formats B0B1B4B5, B2B3B6B7 for further processing and storing of values + const __m256i rhs_raw_mat_0145_0 = _mm256_blend_epi32(rhs_raw_mat_0123_0, _mm256_permutevar8x32_epi32(rhs_raw_mat_4567_0, requiredOrder), 240); + const __m256i rhs_raw_mat_2367_0 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_0123_0, requiredOrder), rhs_raw_mat_4567_0, 240); + const __m256i rhs_raw_mat_0145_1 = _mm256_blend_epi32(rhs_raw_mat_0123_1, _mm256_permutevar8x32_epi32(rhs_raw_mat_4567_1, requiredOrder), 240); + const __m256i rhs_raw_mat_2367_1 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_0123_1, requiredOrder), rhs_raw_mat_4567_1, 240); + + // 4-bit -> 8-bit - Sign is maintained + const __m256i rhs_mat_0145_0 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(rhs_raw_mat_0145_0, m4b)); //B0(0-7) B1(0-7) B4(0-7) B5(0-7) + const __m256i rhs_mat_2367_0 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(rhs_raw_mat_2367_0, m4b)); //B2(0-7) B3(0-7) B6(0-7) B7(0-7) + + const __m256i rhs_mat_0145_1 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(rhs_raw_mat_0145_1, m4b)); //B0(8-15) B1(8-15) B4(8-15) B5(8-15) + const __m256i rhs_mat_2367_1 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(rhs_raw_mat_2367_1, m4b)); //B2(8-15) B3(8-15) B6(8-15) B7(8-15) + + const __m256i rhs_mat_0145_2 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(_mm256_srli_epi16(rhs_raw_mat_0145_0, 4), m4b)); //B0(16-23) B1(16-23) B4(16-23) B5(16-23) + const __m256i rhs_mat_2367_2 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(_mm256_srli_epi16(rhs_raw_mat_2367_0, 4), m4b)); //B2(16-23) B3(16-23) B6(16-23) B7(16-23) + + const __m256i rhs_mat_0145_3 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(_mm256_srli_epi16(rhs_raw_mat_0145_1, 4), m4b)); //B0(24-31) B1(24-31) B4(24-31) B5(24-31) + const __m256i rhs_mat_2367_3 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(_mm256_srli_epi16(rhs_raw_mat_2367_1, 4), m4b)); //B2(24-31) B3(24-31) B6(24-31) B7(24-31) + + // Shuffle pattern one - right side input + const __m256i rhs_mat_0145_0_sp1 = _mm256_shuffle_epi32(rhs_mat_0145_0, 136); //B0(0-3) B1(0-3) B0(0-3) B1(0-3) B4(0-3) B5(0-3) B4(0-3) B5(0-3) + const __m256i rhs_mat_2367_0_sp1 = _mm256_shuffle_epi32(rhs_mat_2367_0, 136); //B2(0-3) B3(0-3) B2(0-3) B3(0-3) B6(0-3) B7(0-3) B6(0-3) B7(0-3) + + const __m256i rhs_mat_0145_1_sp1 = _mm256_shuffle_epi32(rhs_mat_0145_1, 136); //B0(8-11) B1(8-11) B0(8-11) B1(8-11) B4(8-11) B5(8-11) B4(8-11) B5(8-11) + const __m256i rhs_mat_2367_1_sp1 = _mm256_shuffle_epi32(rhs_mat_2367_1, 136); //B2(8-11) B3(8-11) B2(8-11) B3(8-11) B6(8-11) B7(8-11) B6(8-11) B7(8-11) + + const __m256i rhs_mat_0145_2_sp1 = _mm256_shuffle_epi32(rhs_mat_0145_2, 136); //B0(16-19) B1(16-19) B0(16-19) B1(16-19) B4(16-19) B5(16-19) B4(16-19) B5(16-19) + const __m256i rhs_mat_2367_2_sp1 = _mm256_shuffle_epi32(rhs_mat_2367_2, 136); //B2(16-19) B3(16-19) B2(16-19) B3(16-19) B6(16-19) B7(16-19) B6(16-19) B7(16-19) + + const __m256i rhs_mat_0145_3_sp1 = _mm256_shuffle_epi32(rhs_mat_0145_3, 136); //B0(24-27) B1(24-27) B0(24-27) B1(24-27) B4(24-27) B5(24-27) B4(24-27) B5(24-27) + const __m256i rhs_mat_2367_3_sp1 = _mm256_shuffle_epi32(rhs_mat_2367_3, 136); //B2(24-27) B3(24-27) B2(24-27) B3(24-27) B6(24-27) B7(24-27) B6(24-27) B7(24-27) + + // Shuffle pattern two - right side input + + const __m256i rhs_mat_0145_0_sp2 = _mm256_shuffle_epi32(rhs_mat_0145_0, 221); //B0(4-7) B1(4-7) B0(4-7) B1(4-7) B4(4-7) B5(4-7) B4(4-7) B5(4-7) + const __m256i rhs_mat_2367_0_sp2 = _mm256_shuffle_epi32(rhs_mat_2367_0, 221); //B2(4-7) B3(4-7) B2(4-7) B3(4-7) B6(4-7) B7(4-7) B6(4-7) B7(4-7) + + const __m256i rhs_mat_0145_1_sp2 = _mm256_shuffle_epi32(rhs_mat_0145_1, 221); //B0(12-15) B1(12-15) B0(12-15) B1(12-15) B4(12-15) B5(12-15) B4(12-15) B5(12-15) + const __m256i rhs_mat_2367_1_sp2 = _mm256_shuffle_epi32(rhs_mat_2367_1, 221); //B2(12-15) B3(12-15) B2(12-15) B3(12-15) B6(12-15) B7(12-15) B6(12-15) B7(12-15) + + const __m256i rhs_mat_0145_2_sp2 = _mm256_shuffle_epi32(rhs_mat_0145_2, 221); //B0(20-23) B1(20-23) B0(20-23) B1(20-23) B4(20-23) B5(20-23) B4(20-23) B5(20-23) + const __m256i rhs_mat_2367_2_sp2 = _mm256_shuffle_epi32(rhs_mat_2367_2, 221); //B2(20-23) B3(20-23) B2(20-23) B3(20-23) B6(20-23) B7(20-23) B6(20-23) B7(20-23) + + const __m256i rhs_mat_0145_3_sp2 = _mm256_shuffle_epi32(rhs_mat_0145_3, 221); //B0(28-31) B1(28-31) B0(28-31) B1(28-31) B4(28-31) B5(28-31) B4(28-31) B5(28-31) + const __m256i rhs_mat_2367_3_sp2 = _mm256_shuffle_epi32(rhs_mat_2367_3, 221); //B2(28-31) B3(28-31) B2(28-31) B3(28-31) B6(28-31) B7(28-31) B6(28-31) B7(28-31) + + // Scale values - Load the wight scale values of block_q4_0x8 + const __m256 col_scale_f32 = GGML_F32Cx8_LOAD(b_ptr[b].d); + + // Process LHS in groups of four + for (int rp = 0; rp < 4; rp++) { + // Load the four block_q4_0 quantized values interleaved with each other in chunks of eight - A0,A1,A2,A3 + // Loaded as set of 128 bit vectors and repeated into a 256 bit vector + __m256i lhs_mat_0123_0 = _mm256_loadu_si256((const __m256i *)((a_ptrs[rp][b].qs))); + __m256i lhs_mat_01_0 = _mm256_permute2f128_si256(lhs_mat_0123_0, lhs_mat_0123_0, 0); + __m256i lhs_mat_23_0 = _mm256_permute2f128_si256(lhs_mat_0123_0, lhs_mat_0123_0, 17); + __m256i lhs_mat_0123_1 = _mm256_loadu_si256((const __m256i *)((a_ptrs[rp][b].qs + 32))); + __m256i lhs_mat_01_1 = _mm256_permute2f128_si256(lhs_mat_0123_1, lhs_mat_0123_1, 0); + __m256i lhs_mat_23_1 = _mm256_permute2f128_si256(lhs_mat_0123_1, lhs_mat_0123_1, 17); + __m256i lhs_mat_0123_2 = _mm256_loadu_si256((const __m256i *)((a_ptrs[rp][b].qs + 64))); + __m256i lhs_mat_01_2 = _mm256_permute2f128_si256(lhs_mat_0123_2, lhs_mat_0123_2, 0); + __m256i lhs_mat_23_2 = _mm256_permute2f128_si256(lhs_mat_0123_2, lhs_mat_0123_2, 17); + __m256i lhs_mat_0123_3 = _mm256_loadu_si256((const __m256i *)((a_ptrs[rp][b].qs + 96))); + __m256i lhs_mat_01_3 = _mm256_permute2f128_si256(lhs_mat_0123_3, lhs_mat_0123_3, 0); + __m256i lhs_mat_23_3 = _mm256_permute2f128_si256(lhs_mat_0123_3, lhs_mat_0123_3, 17); + + // Shuffle pattern one - left side input + const __m256i lhs_mat_01_0_sp1 = _mm256_shuffle_epi32(lhs_mat_01_0, 160); //A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) + const __m256i lhs_mat_23_0_sp1 = _mm256_shuffle_epi32(lhs_mat_23_0, 160); //A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) + + const __m256i lhs_mat_01_1_sp1 = _mm256_shuffle_epi32(lhs_mat_01_1, 160); //A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) + const __m256i lhs_mat_23_1_sp1 = _mm256_shuffle_epi32(lhs_mat_23_1, 160); //A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) + + const __m256i lhs_mat_01_2_sp1 = _mm256_shuffle_epi32(lhs_mat_01_2, 160); //A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) + const __m256i lhs_mat_23_2_sp1 = _mm256_shuffle_epi32(lhs_mat_23_2, 160); //A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) + + const __m256i lhs_mat_01_3_sp1 = _mm256_shuffle_epi32(lhs_mat_01_3, 160); //A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) + const __m256i lhs_mat_23_3_sp1 = _mm256_shuffle_epi32(lhs_mat_23_3, 160); //A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) + + // Shuffle pattern two - left side input + const __m256i lhs_mat_01_0_sp2 = _mm256_shuffle_epi32(lhs_mat_01_0, 245); //A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) + const __m256i lhs_mat_23_0_sp2 = _mm256_shuffle_epi32(lhs_mat_23_0, 245); //A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) + + const __m256i lhs_mat_01_1_sp2 = _mm256_shuffle_epi32(lhs_mat_01_1, 245); //A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) + const __m256i lhs_mat_23_1_sp2 = _mm256_shuffle_epi32(lhs_mat_23_1, 245); //A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) + + const __m256i lhs_mat_01_2_sp2 = _mm256_shuffle_epi32(lhs_mat_01_2, 245); //A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) + const __m256i lhs_mat_23_2_sp2 = _mm256_shuffle_epi32(lhs_mat_23_2, 245); //A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) + + const __m256i lhs_mat_01_3_sp2 = _mm256_shuffle_epi32(lhs_mat_01_3, 245); //A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) + const __m256i lhs_mat_23_3_sp2 = _mm256_shuffle_epi32(lhs_mat_23_3, 245); //A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) + + // The values arranged in shuffle patterns are operated with dot product operation within 32 bit lane i.e corresponding bytes and multiplied and added into 32 bit integers within 32 bit lane + // Resembles MMLAs into 2x2 matrices in ARM Version + __m256i iacc_mat_00_sp1 = + _mm256_add_epi32(_mm256_add_epi32(_mm256_add_epi32(mul_sum_i8_pairs_int32x8(lhs_mat_01_3_sp1, rhs_mat_0145_3_sp1), mul_sum_i8_pairs_int32x8(lhs_mat_01_2_sp1, rhs_mat_0145_2_sp1)), mul_sum_i8_pairs_int32x8(lhs_mat_01_1_sp1, rhs_mat_0145_1_sp1)), mul_sum_i8_pairs_int32x8(lhs_mat_01_0_sp1, rhs_mat_0145_0_sp1)); + __m256i iacc_mat_01_sp1 = + _mm256_add_epi32(_mm256_add_epi32(_mm256_add_epi32(mul_sum_i8_pairs_int32x8(lhs_mat_01_3_sp1, rhs_mat_2367_3_sp1), mul_sum_i8_pairs_int32x8(lhs_mat_01_2_sp1, rhs_mat_2367_2_sp1)), mul_sum_i8_pairs_int32x8(lhs_mat_01_1_sp1, rhs_mat_2367_1_sp1)), mul_sum_i8_pairs_int32x8(lhs_mat_01_0_sp1, rhs_mat_2367_0_sp1)); + __m256i iacc_mat_10_sp1 = + _mm256_add_epi32(_mm256_add_epi32(_mm256_add_epi32(mul_sum_i8_pairs_int32x8(lhs_mat_23_3_sp1, rhs_mat_0145_3_sp1), mul_sum_i8_pairs_int32x8(lhs_mat_23_2_sp1, rhs_mat_0145_2_sp1)), mul_sum_i8_pairs_int32x8(lhs_mat_23_1_sp1, rhs_mat_0145_1_sp1)), mul_sum_i8_pairs_int32x8(lhs_mat_23_0_sp1, rhs_mat_0145_0_sp1)); + __m256i iacc_mat_11_sp1 = + _mm256_add_epi32(_mm256_add_epi32(_mm256_add_epi32(mul_sum_i8_pairs_int32x8(lhs_mat_23_3_sp1, rhs_mat_2367_3_sp1), mul_sum_i8_pairs_int32x8(lhs_mat_23_2_sp1, rhs_mat_2367_2_sp1)), mul_sum_i8_pairs_int32x8(lhs_mat_23_1_sp1, rhs_mat_2367_1_sp1)), mul_sum_i8_pairs_int32x8(lhs_mat_23_0_sp1, rhs_mat_2367_0_sp1)); + __m256i iacc_mat_00_sp2 = + _mm256_add_epi32(_mm256_add_epi32(_mm256_add_epi32(mul_sum_i8_pairs_int32x8(lhs_mat_01_3_sp2, rhs_mat_0145_3_sp2), mul_sum_i8_pairs_int32x8(lhs_mat_01_2_sp2, rhs_mat_0145_2_sp2)), mul_sum_i8_pairs_int32x8(lhs_mat_01_1_sp2, rhs_mat_0145_1_sp2)), mul_sum_i8_pairs_int32x8(lhs_mat_01_0_sp2, rhs_mat_0145_0_sp2)); + __m256i iacc_mat_01_sp2 = + _mm256_add_epi32(_mm256_add_epi32(_mm256_add_epi32(mul_sum_i8_pairs_int32x8(lhs_mat_01_3_sp2, rhs_mat_2367_3_sp2), mul_sum_i8_pairs_int32x8(lhs_mat_01_2_sp2, rhs_mat_2367_2_sp2)), mul_sum_i8_pairs_int32x8(lhs_mat_01_1_sp2, rhs_mat_2367_1_sp2)), mul_sum_i8_pairs_int32x8(lhs_mat_01_0_sp2, rhs_mat_2367_0_sp2)); + __m256i iacc_mat_10_sp2 = + _mm256_add_epi32(_mm256_add_epi32(_mm256_add_epi32(mul_sum_i8_pairs_int32x8(lhs_mat_23_3_sp2, rhs_mat_0145_3_sp2), mul_sum_i8_pairs_int32x8(lhs_mat_23_2_sp2, rhs_mat_0145_2_sp2)), mul_sum_i8_pairs_int32x8(lhs_mat_23_1_sp2, rhs_mat_0145_1_sp2)), mul_sum_i8_pairs_int32x8(lhs_mat_23_0_sp2, rhs_mat_0145_0_sp2)); + __m256i iacc_mat_11_sp2 = + _mm256_add_epi32(_mm256_add_epi32(_mm256_add_epi32(mul_sum_i8_pairs_int32x8(lhs_mat_23_3_sp2, rhs_mat_2367_3_sp2), mul_sum_i8_pairs_int32x8(lhs_mat_23_2_sp2, rhs_mat_2367_2_sp2)), mul_sum_i8_pairs_int32x8(lhs_mat_23_1_sp2, rhs_mat_2367_1_sp2)), mul_sum_i8_pairs_int32x8(lhs_mat_23_0_sp2, rhs_mat_2367_0_sp2)); + + // Output of both shuffle patterns are added in order to sum dot product outputs of all 32 values in block + __m256i iacc_mat_00 = _mm256_add_epi32(iacc_mat_00_sp1, iacc_mat_00_sp2); + __m256i iacc_mat_01 = _mm256_add_epi32(iacc_mat_01_sp1, iacc_mat_01_sp2); + __m256i iacc_mat_10 = _mm256_add_epi32(iacc_mat_10_sp1, iacc_mat_10_sp2); + __m256i iacc_mat_11 = _mm256_add_epi32(iacc_mat_11_sp1, iacc_mat_11_sp2); + + // Straighten out to make 4 row vectors + __m256i iacc_row_0 = _mm256_blend_epi32(iacc_mat_00, _mm256_shuffle_epi32(iacc_mat_01, 78), 204); + __m256i iacc_row_1 = _mm256_blend_epi32(_mm256_shuffle_epi32(iacc_mat_00, 78), iacc_mat_01, 204); + __m256i iacc_row_2 = _mm256_blend_epi32(iacc_mat_10, _mm256_shuffle_epi32(iacc_mat_11, 78), 204); + __m256i iacc_row_3 = _mm256_blend_epi32(_mm256_shuffle_epi32(iacc_mat_10, 78), iacc_mat_11, 204); + + // Load the scale(d) values for all the 4 Q8_0 blocks and repeat it across lanes + const __m256 row_scale_f32 = GGML_F32Cx8_REPEAT_LOAD(a_ptrs[rp][b].d, loadMask); + + // Multiply with appropiate scales and accumulate + acc_rows[rp * 4] = _mm256_fmadd_ps(_mm256_cvtepi32_ps(iacc_row_0), _mm256_mul_ps(col_scale_f32, _mm256_shuffle_ps(row_scale_f32, row_scale_f32, 0)), acc_rows[rp * 4]); + acc_rows[rp * 4 + 1] = _mm256_fmadd_ps(_mm256_cvtepi32_ps(iacc_row_1), _mm256_mul_ps(col_scale_f32, _mm256_shuffle_ps(row_scale_f32, row_scale_f32, 85)), acc_rows[rp * 4 + 1]); + acc_rows[rp * 4 + 2] = _mm256_fmadd_ps(_mm256_cvtepi32_ps(iacc_row_2), _mm256_mul_ps(col_scale_f32, _mm256_shuffle_ps(row_scale_f32, row_scale_f32, 170)), acc_rows[rp * 4 + 2]); + acc_rows[rp * 4 + 3] = _mm256_fmadd_ps(_mm256_cvtepi32_ps(iacc_row_3), _mm256_mul_ps(col_scale_f32, _mm256_shuffle_ps(row_scale_f32, row_scale_f32, 255)), acc_rows[rp * 4 + 3]); + } + } + + // Store the accumulated values + for (int i = 0; i < 16; i++) { + _mm256_storeu_ps((float *)(s + ((y * 4 + i) * bs + x * 8)), acc_rows[i]); + } + } + } + + // Take a block_q8_0x4 structures at each pass of the loop and perform dot product operation + for (; y < nr / 4; y ++) { + + const block_q8_0x4 * a_ptr = a_ptr_start + (y * nb); + + // Load the eight block_q4_0 quantized values interleaved with each other in chunks of eight - B0,B1 ....B6,B7 + for (int64_t x = xstart; x < nc / 8; x++) { + + const block_q4_0x8 * b_ptr = b_ptr_start + (x * b_nb); + + // Master FP accumulators + __m256 acc_rows[4]; + for (int i = 0; i < 4; i++) { + acc_rows[i] = _mm256_setzero_ps(); + } + + for (int64_t b = 0; b < nb; b++) { + // Load the eight block_q8_0 quantized values interleaved with each other in chunks of eight - B0,B1 ....B6,B7 + const __m256i rhs_raw_mat_0123_0 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs)); + const __m256i rhs_raw_mat_4567_0 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs + 32)); + const __m256i rhs_raw_mat_0123_1 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs + 64)); + const __m256i rhs_raw_mat_4567_1 = _mm256_loadu_si256((const __m256i *)(b_ptr[b].qs + 96)); + + // Save the values in the following vectors in the formats B0B1B4B5, B2B3B6B7 for further processing and storing of valuess + const __m256i rhs_raw_mat_0145_0 = _mm256_blend_epi32(rhs_raw_mat_0123_0, _mm256_permutevar8x32_epi32(rhs_raw_mat_4567_0, requiredOrder), 240); + const __m256i rhs_raw_mat_2367_0 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_0123_0, requiredOrder), rhs_raw_mat_4567_0, 240); + const __m256i rhs_raw_mat_0145_1 = _mm256_blend_epi32(rhs_raw_mat_0123_1, _mm256_permutevar8x32_epi32(rhs_raw_mat_4567_1, requiredOrder), 240); + const __m256i rhs_raw_mat_2367_1 = _mm256_blend_epi32(_mm256_permutevar8x32_epi32(rhs_raw_mat_0123_1, requiredOrder), rhs_raw_mat_4567_1, 240); + + // 4-bit -> 8-bit - Sign is maintained + const __m256i rhs_mat_0145_0 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(rhs_raw_mat_0145_0, m4b)); //B0(0-7) B1(0-7) B4(0-7) B5(0-7) + const __m256i rhs_mat_2367_0 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(rhs_raw_mat_2367_0, m4b)); //B2(0-7) B3(0-7) B6(0-7) B7(0-7) + + const __m256i rhs_mat_0145_1 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(rhs_raw_mat_0145_1, m4b)); //B0(8-15) B1(8-15) B4(8-15) B5(8-15) + const __m256i rhs_mat_2367_1 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(rhs_raw_mat_2367_1, m4b)); //B2(8-15) B3(8-15) B6(8-15) B7(8-15) + + const __m256i rhs_mat_0145_2 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(_mm256_srli_epi16(rhs_raw_mat_0145_0, 4), m4b)); //B0(16-23) B1(16-23) B4(16-23) B5(16-23) + const __m256i rhs_mat_2367_2 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(_mm256_srli_epi16(rhs_raw_mat_2367_0, 4), m4b)); //B2(16-23) B3(16-23) B6(16-23) B7(16-23) + + const __m256i rhs_mat_0145_3 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(_mm256_srli_epi16(rhs_raw_mat_0145_1, 4), m4b)); //B0(24-31) B1(24-31) B4(24-31) B5(24-31) + const __m256i rhs_mat_2367_3 = _mm256_shuffle_epi8(signextendlut, _mm256_and_si256(_mm256_srli_epi16(rhs_raw_mat_2367_1, 4), m4b)); //B2(24-31) B3(24-31) B6(24-31) B7(24-31) + + // Shuffle pattern one - right side input + const __m256i rhs_mat_0145_0_sp1 = _mm256_shuffle_epi32(rhs_mat_0145_0, 136); //B0(0-3) B1(0-3) B0(0-3) B1(0-3) B4(0-3) B5(0-3) B4(0-3) B5(0-3) + const __m256i rhs_mat_2367_0_sp1 = _mm256_shuffle_epi32(rhs_mat_2367_0, 136); //B2(0-3) B3(0-3) B2(0-3) B3(0-3) B6(0-3) B7(0-3) B6(0-3) B7(0-3) + + const __m256i rhs_mat_0145_1_sp1 = _mm256_shuffle_epi32(rhs_mat_0145_1, 136); //B0(8-11) B1(8-11) B0(8-11) B1(8-11) B4(8-11) B5(8-11) B4(8-11) B5(8-11) + const __m256i rhs_mat_2367_1_sp1 = _mm256_shuffle_epi32(rhs_mat_2367_1, 136); //B2(8-11) B3(8-11) B2(8-11) B3(8-11) B6(8-11) B7(8-11) B6(8-11) B7(8-11) + + const __m256i rhs_mat_0145_2_sp1 = _mm256_shuffle_epi32(rhs_mat_0145_2, 136); //B0(16-19) B1(16-19) B0(16-19) B1(16-19) B4(16-19) B5(16-19) B4(16-19) B5(16-19) + const __m256i rhs_mat_2367_2_sp1 = _mm256_shuffle_epi32(rhs_mat_2367_2, 136); //B2(16-19) B3(16-19) B2(16-19) B3(16-19) B6(16-19) B7(16-19) B6(16-19) B7(16-19) + + const __m256i rhs_mat_0145_3_sp1 = _mm256_shuffle_epi32(rhs_mat_0145_3, 136); //B0(24-27) B1(24-27) B0(24-27) B1(24-27) B4(24-27) B5(24-27) B4(24-27) B5(24-27) + const __m256i rhs_mat_2367_3_sp1 = _mm256_shuffle_epi32(rhs_mat_2367_3, 136); //B2(24-27) B3(24-27) B2(24-27) B3(24-27) B6(24-27) B7(24-27) B6(24-27) B7(24-27) + + // Shuffle pattern two - right side input + + const __m256i rhs_mat_0145_0_sp2 = _mm256_shuffle_epi32(rhs_mat_0145_0, 221); //B0(4-7) B1(4-7) B0(4-7) B1(4-7) B4(4-7) B5(4-7) B4(4-7) B5(4-7) + const __m256i rhs_mat_2367_0_sp2 = _mm256_shuffle_epi32(rhs_mat_2367_0, 221); //B2(4-7) B3(4-7) B2(4-7) B3(4-7) B6(4-7) B7(4-7) B6(4-7) B7(4-7) + + const __m256i rhs_mat_0145_1_sp2 = _mm256_shuffle_epi32(rhs_mat_0145_1, 221); //B0(12-15) B1(12-15) B0(12-15) B1(12-15) B4(12-15) B5(12-15) B4(12-15) B5(12-15) + const __m256i rhs_mat_2367_1_sp2 = _mm256_shuffle_epi32(rhs_mat_2367_1, 221); //B2(12-15) B3(12-15) B2(12-15) B3(12-15) B6(12-15) B7(12-15) B6(12-15) B7(12-15) + + const __m256i rhs_mat_0145_2_sp2 = _mm256_shuffle_epi32(rhs_mat_0145_2, 221); //B0(20-23) B1(20-23) B0(20-23) B1(20-23) B4(20-23) B5(20-23) B4(20-23) B5(20-23) + const __m256i rhs_mat_2367_2_sp2 = _mm256_shuffle_epi32(rhs_mat_2367_2, 221); //B2(20-23) B3(20-23) B2(20-23) B3(20-23) B6(20-23) B7(20-23) B6(20-23) B7(20-23) + + const __m256i rhs_mat_0145_3_sp2 = _mm256_shuffle_epi32(rhs_mat_0145_3, 221); //B0(28-31) B1(28-31) B0(28-31) B1(28-31) B4(28-31) B5(28-31) B4(28-31) B5(28-31) + const __m256i rhs_mat_2367_3_sp2 = _mm256_shuffle_epi32(rhs_mat_2367_3, 221); //B2(28-31) B3(28-31) B2(28-31) B3(28-31) B6(28-31) B7(28-31) B6(28-31) B7(28-31) + + // Scale values - Load the wight scale values of block_q4_0x8 + const __m256 col_scale_f32 = GGML_F32Cx8_LOAD(b_ptr[b].d); + + // Load the four block_q4_0 quantized values interleaved with each other in chunks of eight - A0,A1,A2,A3 + // Loaded as set of 128 bit vectors and repeated into a 256 bit vector + __m256i lhs_mat_0123_0 = _mm256_loadu_si256((const __m256i *)((a_ptr[b].qs))); + __m256i lhs_mat_01_0 = _mm256_permute2f128_si256(lhs_mat_0123_0, lhs_mat_0123_0, 0); + __m256i lhs_mat_23_0 = _mm256_permute2f128_si256(lhs_mat_0123_0, lhs_mat_0123_0, 17); + __m256i lhs_mat_0123_1 = _mm256_loadu_si256((const __m256i *)((a_ptr[b].qs + 32))); + __m256i lhs_mat_01_1 = _mm256_permute2f128_si256(lhs_mat_0123_1, lhs_mat_0123_1, 0); + __m256i lhs_mat_23_1 = _mm256_permute2f128_si256(lhs_mat_0123_1, lhs_mat_0123_1, 17); + __m256i lhs_mat_0123_2 = _mm256_loadu_si256((const __m256i *)((a_ptr[b].qs + 64))); + __m256i lhs_mat_01_2 = _mm256_permute2f128_si256(lhs_mat_0123_2, lhs_mat_0123_2, 0); + __m256i lhs_mat_23_2 = _mm256_permute2f128_si256(lhs_mat_0123_2, lhs_mat_0123_2, 17); + __m256i lhs_mat_0123_3 = _mm256_loadu_si256((const __m256i *)((a_ptr[b].qs + 96))); + __m256i lhs_mat_01_3 = _mm256_permute2f128_si256(lhs_mat_0123_3, lhs_mat_0123_3, 0); + __m256i lhs_mat_23_3 = _mm256_permute2f128_si256(lhs_mat_0123_3, lhs_mat_0123_3, 17); + + // Shuffle pattern one - left side input + + const __m256i lhs_mat_01_0_sp1 = _mm256_shuffle_epi32(lhs_mat_01_0, 160); //A0(0-3) A0(0-3) A1(0-3) A1(0-3) A0(0-3) A0(0-3) A1(0-3) A1(0-3) + const __m256i lhs_mat_23_0_sp1 = _mm256_shuffle_epi32(lhs_mat_23_0, 160); //A2(0-3) A2(0-3) A3(0-3) A3(0-3) A2(0-3) A2(0-3) A3(0-3) A3(0-3) + + const __m256i lhs_mat_01_1_sp1 = _mm256_shuffle_epi32(lhs_mat_01_1, 160); //A0(8-11) A0(8-11) A1(8-11) A1(8-11) A0(8-11) A0(8-11) A1(8-11) A1(8-11) + const __m256i lhs_mat_23_1_sp1 = _mm256_shuffle_epi32(lhs_mat_23_1, 160); //A2(8-11) A2(8-11) A3(8-11) A3(8-11) A2(8-11) A2(8-11) A3(8-11) A3(8-11) + + const __m256i lhs_mat_01_2_sp1 = _mm256_shuffle_epi32(lhs_mat_01_2, 160); //A0(16-19) A0(16-19) A1(16-19) A1(16-19) A0(16-19) A0(16-19) A1(16-19) A1(16-19) + const __m256i lhs_mat_23_2_sp1 = _mm256_shuffle_epi32(lhs_mat_23_2, 160); //A2(16-19) A2(16-19) A3(16-19) A3(16-19) A2(16-19) A2(16-19) A3(16-19) A3(16-19) + + const __m256i lhs_mat_01_3_sp1 = _mm256_shuffle_epi32(lhs_mat_01_3, 160); //A0(24-27) A0(24-27) A1(24-27) A1(24-27) A0(24-27) A0(24-27) A1(24-27) A1(24-27) + const __m256i lhs_mat_23_3_sp1 = _mm256_shuffle_epi32(lhs_mat_23_3, 160); //A2(24-27) A2(24-27) A3(24-27) A3(24-27) A2(24-27) A2(24-27) A3(24-27) A3(24-27) + + // Shuffle pattern two - left side input + + const __m256i lhs_mat_01_0_sp2 = _mm256_shuffle_epi32(lhs_mat_01_0, 245); //A0(4-7) A0(4-7) A1(4-7) A1(4-7) A0(4-7) A0(4-7) A1(4-7) A1(4-7) + const __m256i lhs_mat_23_0_sp2 = _mm256_shuffle_epi32(lhs_mat_23_0, 245); //A2(4-7) A2(4-7) A3(4-7) A3(4-7) A2(4-7) A2(4-7) A3(4-7) A3(4-7) + + const __m256i lhs_mat_01_1_sp2 = _mm256_shuffle_epi32(lhs_mat_01_1, 245); //A0(12-15) A0(12-15) A1(12-15) A1(12-15) A0(12-15) A0(12-15) A1(12-15) A1(12-15) + const __m256i lhs_mat_23_1_sp2 = _mm256_shuffle_epi32(lhs_mat_23_1, 245); //A2(12-15) A2(12-15) A3(12-15) A3(12-15) A2(12-15) A2(12-15) A3(12-15) A3(12-15) + + const __m256i lhs_mat_01_2_sp2 = _mm256_shuffle_epi32(lhs_mat_01_2, 245); //A0(20-23) A0(20-23) A1(20-23) A1(20-23) A0(20-23) A0(20-23) A1(20-23) A1(20-23) + const __m256i lhs_mat_23_2_sp2 = _mm256_shuffle_epi32(lhs_mat_23_2, 245); //A2(20-23) A2(20-23) A3(20-23) A3(20-23) A2(20-23) A2(20-23) A3(20-23) A3(20-23) + + const __m256i lhs_mat_01_3_sp2 = _mm256_shuffle_epi32(lhs_mat_01_3, 245); //A0(28-31) A0(28-31) A1(28-31) A1(28-31) A0(28-31) A0(28-31) A1(28-31) A1(28-31) + const __m256i lhs_mat_23_3_sp2 = _mm256_shuffle_epi32(lhs_mat_23_3, 245); //A2(28-31) A2(28-31) A3(28-31) A3(28-31) A2(28-31) A2(28-31) A3(28-31) A3(28-31) + + // The values arranged in shuffle patterns are operated with dot product operation within 32 bit lane i.e corresponding bytes and multiplied and added into 32 bit integers within 32 bit lane + // Resembles MMLAs into 2x2 matrices in ARM Version + __m256i iacc_mat_00_sp1 = + _mm256_add_epi32(_mm256_add_epi32(_mm256_add_epi32(mul_sum_i8_pairs_int32x8(lhs_mat_01_3_sp1, rhs_mat_0145_3_sp1), mul_sum_i8_pairs_int32x8(lhs_mat_01_2_sp1, rhs_mat_0145_2_sp1)), mul_sum_i8_pairs_int32x8(lhs_mat_01_1_sp1, rhs_mat_0145_1_sp1)), mul_sum_i8_pairs_int32x8(lhs_mat_01_0_sp1, rhs_mat_0145_0_sp1)); + __m256i iacc_mat_01_sp1 = + _mm256_add_epi32(_mm256_add_epi32(_mm256_add_epi32(mul_sum_i8_pairs_int32x8(lhs_mat_01_3_sp1, rhs_mat_2367_3_sp1), mul_sum_i8_pairs_int32x8(lhs_mat_01_2_sp1, rhs_mat_2367_2_sp1)), mul_sum_i8_pairs_int32x8(lhs_mat_01_1_sp1, rhs_mat_2367_1_sp1)), mul_sum_i8_pairs_int32x8(lhs_mat_01_0_sp1, rhs_mat_2367_0_sp1)); + __m256i iacc_mat_10_sp1 = + _mm256_add_epi32(_mm256_add_epi32(_mm256_add_epi32(mul_sum_i8_pairs_int32x8(lhs_mat_23_3_sp1, rhs_mat_0145_3_sp1), mul_sum_i8_pairs_int32x8(lhs_mat_23_2_sp1, rhs_mat_0145_2_sp1)), mul_sum_i8_pairs_int32x8(lhs_mat_23_1_sp1, rhs_mat_0145_1_sp1)), mul_sum_i8_pairs_int32x8(lhs_mat_23_0_sp1, rhs_mat_0145_0_sp1)); + __m256i iacc_mat_11_sp1 = + _mm256_add_epi32(_mm256_add_epi32(_mm256_add_epi32(mul_sum_i8_pairs_int32x8(lhs_mat_23_3_sp1, rhs_mat_2367_3_sp1), mul_sum_i8_pairs_int32x8(lhs_mat_23_2_sp1, rhs_mat_2367_2_sp1)), mul_sum_i8_pairs_int32x8(lhs_mat_23_1_sp1, rhs_mat_2367_1_sp1)), mul_sum_i8_pairs_int32x8(lhs_mat_23_0_sp1, rhs_mat_2367_0_sp1)); + __m256i iacc_mat_00_sp2 = + _mm256_add_epi32(_mm256_add_epi32(_mm256_add_epi32(mul_sum_i8_pairs_int32x8(lhs_mat_01_3_sp2, rhs_mat_0145_3_sp2), mul_sum_i8_pairs_int32x8(lhs_mat_01_2_sp2, rhs_mat_0145_2_sp2)), mul_sum_i8_pairs_int32x8(lhs_mat_01_1_sp2, rhs_mat_0145_1_sp2)), mul_sum_i8_pairs_int32x8(lhs_mat_01_0_sp2, rhs_mat_0145_0_sp2)); + __m256i iacc_mat_01_sp2 = + _mm256_add_epi32(_mm256_add_epi32(_mm256_add_epi32(mul_sum_i8_pairs_int32x8(lhs_mat_01_3_sp2, rhs_mat_2367_3_sp2), mul_sum_i8_pairs_int32x8(lhs_mat_01_2_sp2, rhs_mat_2367_2_sp2)), mul_sum_i8_pairs_int32x8(lhs_mat_01_1_sp2, rhs_mat_2367_1_sp2)), mul_sum_i8_pairs_int32x8(lhs_mat_01_0_sp2, rhs_mat_2367_0_sp2)); + __m256i iacc_mat_10_sp2 = + _mm256_add_epi32(_mm256_add_epi32(_mm256_add_epi32(mul_sum_i8_pairs_int32x8(lhs_mat_23_3_sp2, rhs_mat_0145_3_sp2), mul_sum_i8_pairs_int32x8(lhs_mat_23_2_sp2, rhs_mat_0145_2_sp2)), mul_sum_i8_pairs_int32x8(lhs_mat_23_1_sp2, rhs_mat_0145_1_sp2)), mul_sum_i8_pairs_int32x8(lhs_mat_23_0_sp2, rhs_mat_0145_0_sp2)); + __m256i iacc_mat_11_sp2 = + _mm256_add_epi32(_mm256_add_epi32(_mm256_add_epi32(mul_sum_i8_pairs_int32x8(lhs_mat_23_3_sp2, rhs_mat_2367_3_sp2), mul_sum_i8_pairs_int32x8(lhs_mat_23_2_sp2, rhs_mat_2367_2_sp2)), mul_sum_i8_pairs_int32x8(lhs_mat_23_1_sp2, rhs_mat_2367_1_sp2)), mul_sum_i8_pairs_int32x8(lhs_mat_23_0_sp2, rhs_mat_2367_0_sp2)); + + // Output of both shuffle patterns are added in order to sum dot product outputs of all 32 values in block + __m256i iacc_mat_00 = _mm256_add_epi32(iacc_mat_00_sp1, iacc_mat_00_sp2); + __m256i iacc_mat_01 = _mm256_add_epi32(iacc_mat_01_sp1, iacc_mat_01_sp2); + __m256i iacc_mat_10 = _mm256_add_epi32(iacc_mat_10_sp1, iacc_mat_10_sp2); + __m256i iacc_mat_11 = _mm256_add_epi32(iacc_mat_11_sp1, iacc_mat_11_sp2); + + + // Straighten out to make 4 row vectors + __m256i iacc_row_0 = _mm256_blend_epi32(iacc_mat_00, _mm256_shuffle_epi32(iacc_mat_01, 78), 204); + __m256i iacc_row_1 = _mm256_blend_epi32(_mm256_shuffle_epi32(iacc_mat_00, 78), iacc_mat_01, 204); + __m256i iacc_row_2 = _mm256_blend_epi32(iacc_mat_10, _mm256_shuffle_epi32(iacc_mat_11, 78), 204); + __m256i iacc_row_3 = _mm256_blend_epi32(_mm256_shuffle_epi32(iacc_mat_10, 78), iacc_mat_11, 204); + + // Load the scale(d) values for all the 4 Q8_0 blocks and repeat it across lanes + const __m256 row_scale_f32 = GGML_F32Cx8_REPEAT_LOAD(a_ptr[b].d, loadMask); + + // Multiply with appropiate scales and accumulate + acc_rows[0] = _mm256_fmadd_ps(_mm256_cvtepi32_ps(iacc_row_0), _mm256_mul_ps(col_scale_f32, _mm256_shuffle_ps(row_scale_f32, row_scale_f32, 0)), acc_rows[0]); + acc_rows[1] = _mm256_fmadd_ps(_mm256_cvtepi32_ps(iacc_row_1), _mm256_mul_ps(col_scale_f32, _mm256_shuffle_ps(row_scale_f32, row_scale_f32, 85)), acc_rows[1]); + acc_rows[2] = _mm256_fmadd_ps(_mm256_cvtepi32_ps(iacc_row_2), _mm256_mul_ps(col_scale_f32, _mm256_shuffle_ps(row_scale_f32, row_scale_f32, 170)), acc_rows[2]); + acc_rows[3] = _mm256_fmadd_ps(_mm256_cvtepi32_ps(iacc_row_3), _mm256_mul_ps(col_scale_f32, _mm256_shuffle_ps(row_scale_f32, row_scale_f32, 255)), acc_rows[3]); + } + + // Store the accumulated values + for (int i = 0; i < 4; i++) { + _mm256_storeu_ps((float *)(s + ((y * 4 + i) * bs + x * 8)), acc_rows[i]); + } + } + } + return; + } +#endif // #if ! ((defined(_MSC_VER)) && ! defined(__clang__)) && defined(__aarch64__) + float sumf[4][8]; + int sumi; + + for (int y = 0; y < nr / 4; y++) { + const block_q8_0x4 * a_ptr = (const block_q8_0x4 *) vy + (y * nb); + for (int x = 0; x < nc / ncols_interleaved; x++) { + const block_q4_0x8 * b_ptr = (const block_q4_0x8 *) vx + (x * nb); + for (int m = 0; m < 4; m++) { + for (int j = 0; j < ncols_interleaved; j++) sumf[m][j] = 0.0; + } + for (int l = 0; l < nb; l++) { + for (int k = 0; k < (qk / (2 * blocklen)); k++) { + for (int m = 0; m < 4; m++) { + for (int j = 0; j < ncols_interleaved; j++) { + sumi = 0; + for (int i = 0; i < blocklen; ++i) { + const int v0 = (int8_t) (b_ptr[l].qs[k * ncols_interleaved * blocklen + j * blocklen + i] << 4); + const int v1 = (int8_t) (b_ptr[l].qs[k * ncols_interleaved * blocklen + j * blocklen + i] & 0xF0); + sumi += ((v0 * a_ptr[l].qs[k * 4 * blocklen + m * blocklen + i]) + + (v1 * a_ptr[l].qs[k * 4 * blocklen + m * blocklen + i + qk / 2 * 4])) >> 4; + } + sumf[m][j] += sumi * GGML_FP16_TO_FP32(b_ptr[l].d[j]) * GGML_FP16_TO_FP32(a_ptr[l].d[m]); + } + } + } + } + for (int m = 0; m < 4; m++) { + for (int j = 0; j < ncols_interleaved; j++) + s[(y * 4 + m) * bs + x * ncols_interleaved + j] = sumf[m][j]; + } + } + } +} diff --git a/ml/backend/ggml/ggml-aarch64.h b/ml/backend/ggml/ggml-aarch64.h new file mode 100644 index 000000000..385c6879a --- /dev/null +++ b/ml/backend/ggml/ggml-aarch64.h @@ -0,0 +1,65 @@ +/** + * llama.cpp - commit 3f1ae2e32cde00c39b96be6d01c2997c29bae555 - do not edit this file + * + * MIT License + * + * Copyright (c) 2023-2024 The ggml authors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +// SPDX-FileCopyrightText: Copyright 2024 Arm Ltd. +#pragma once + +#define GGML_COMMON_DECL_C +#include "ggml-common.h" + +#include "ggml.h" + +// GGML internal header + +#ifdef __cplusplus +extern "C" { +#endif + +// Quantization +void quantize_q8_0_4x4(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int64_t k); +void quantize_q8_0_4x8(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int64_t k); + +void quantize_mat_q8_0(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int64_t nrows, int64_t n_per_row, int64_t blck_size_interleave); + +// Quantization utilizing an importance matrix (a.k.a. "Activation aWare Quantization") +size_t quantize_q4_0_4x4(const float * GGML_RESTRICT src, void * GGML_RESTRICT dst, int64_t nrows, int64_t n_per_row, const float * imatrix); +size_t quantize_q4_0_4x8(const float * GGML_RESTRICT src, void * GGML_RESTRICT dst, int64_t nrows, int64_t n_per_row, const float * imatrix); +size_t quantize_q4_0_8x8(const float * GGML_RESTRICT src, void * GGML_RESTRICT dst, int64_t nrows, int64_t n_per_row, const float * imatrix); + +// GEMV +void ggml_gemv_q4_0_4x4_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); +void ggml_gemv_q4_0_4x8_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); +void ggml_gemv_q4_0_8x8_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); + +// GEMM +void ggml_gemm_q4_0_4x4_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); +void ggml_gemm_q4_0_4x8_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); +void ggml_gemm_q4_0_8x8_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy, int nr, int nc); + +#ifdef __cplusplus +} +#endif + diff --git a/ml/backend/ggml/ggml-alloc.c b/ml/backend/ggml/ggml-alloc.c new file mode 100644 index 000000000..32bf41ecf --- /dev/null +++ b/ml/backend/ggml/ggml-alloc.c @@ -0,0 +1,1068 @@ +/** + * llama.cpp - commit 3f1ae2e32cde00c39b96be6d01c2997c29bae555 - do not edit this file + * + * MIT License + * + * Copyright (c) 2023-2024 The ggml authors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +#include "ggml-alloc.h" +#include "ggml-backend-impl.h" +#include "ggml.h" +#include "ggml-impl.h" +#include +#include +#include +#include +#include +#include + +#define MAX(a, b) ((a) > (b) ? (a) : (b)) +#define MAX_FREE_BLOCKS 256 + +//#define GGML_ALLOCATOR_DEBUG + +//#define AT_PRINTF(...) fprintf(stderr, __VA_ARGS__) +#define AT_PRINTF(...) + + +static bool ggml_is_view(const struct ggml_tensor * t) { + return t->view_src != NULL; +} + +static bool ggml_are_same_layout(const struct ggml_tensor * a, const struct ggml_tensor * b) { + if (a->type != b->type) { + return false; + } + for (int i = 0; i < GGML_MAX_DIMS; i++) { + if (a->ne[i] != b->ne[i]) { + return false; + } + if (a->nb[i] != b->nb[i]) { + return false; + } + } + return true; +} + +static bool ggml_op_can_inplace(enum ggml_op op) { + switch (op) { + case GGML_OP_SCALE: + case GGML_OP_DIAG_MASK_ZERO: + case GGML_OP_DIAG_MASK_INF: + case GGML_OP_ADD: + case GGML_OP_ADD1: + case GGML_OP_SUB: + case GGML_OP_MUL: + case GGML_OP_DIV: + case GGML_OP_SQR: + case GGML_OP_SQRT: + case GGML_OP_LOG: + case GGML_OP_UNARY: + case GGML_OP_ROPE: + case GGML_OP_RMS_NORM: + case GGML_OP_SOFT_MAX: + return true; + + default: + return false; + } +} + +static size_t aligned_offset(const void * buffer, size_t offset, size_t alignment) { + assert(alignment && !(alignment & (alignment - 1))); // power of 2 + size_t align = (alignment - (((uintptr_t)buffer + offset) % alignment)) % alignment; + return offset + align; +} + +// tallocr + +struct ggml_tallocr ggml_tallocr_new(ggml_backend_buffer_t buffer) { + void * base = ggml_backend_buffer_get_base(buffer); + size_t align = ggml_backend_buffer_get_alignment(buffer); + + assert(align && !(align & (align - 1))); // power of 2 + + struct ggml_tallocr talloc = (struct ggml_tallocr) { + /*.buffer = */ buffer, + /*.base = */ base, + /*.alignment = */ align, + /*.offset = */ aligned_offset(base, 0, align), + }; + return talloc; +} + +void ggml_tallocr_alloc(struct ggml_tallocr * talloc, struct ggml_tensor * tensor) { + size_t size = ggml_backend_buffer_get_alloc_size(talloc->buffer, tensor); + size = GGML_PAD(size, talloc->alignment); + + if (talloc->offset + size > ggml_backend_buffer_get_size(talloc->buffer)) { + fprintf(stderr, "%s: not enough space in the buffer to allocate %s (needed %zu, available %zu)\n", + __func__, tensor->name, size, ggml_backend_buffer_get_size(talloc->buffer) - talloc->offset); + GGML_ABORT("not enough space in the buffer"); + } + + void * addr = (char *)ggml_backend_buffer_get_base(talloc->buffer) + talloc->offset; + talloc->offset += size; + + assert(((uintptr_t)addr % talloc->alignment) == 0); + + ggml_backend_tensor_alloc(talloc->buffer, tensor, addr); +} + +// dynamic tensor allocator + +struct free_block { + size_t offset; + size_t size; +}; + +struct ggml_dyn_tallocr { + size_t alignment; + int n_free_blocks; + struct free_block free_blocks[MAX_FREE_BLOCKS]; + size_t max_size; + +#ifdef GGML_ALLOCATOR_DEBUG + struct { + const struct ggml_tensor * tensor; + size_t offset; + } allocated_tensors[1024]; +#endif +}; + +#ifdef GGML_ALLOCATOR_DEBUG +static void add_allocated_tensor(struct ggml_dyn_tallocr * alloc, size_t offset, const struct ggml_tensor * tensor) { + for (int i = 0; i < 1024; i++) { + if (alloc->allocated_tensors[i].tensor == NULL) { + alloc->allocated_tensors[i].tensor = tensor; + alloc->allocated_tensors[i].offset = offset; + return; + } + } + GGML_ABORT("out of allocated_tensors"); +} +static void remove_allocated_tensor(struct ggml_dyn_tallocr * alloc, size_t offset, const struct ggml_tensor * tensor) { + for (int i = 0; i < 1024; i++) { + if (alloc->allocated_tensors[i].offset == offset) { + alloc->allocated_tensors[i].tensor = NULL; + return; + } + } + GGML_ABORT("tried to free tensor %s not found\n", tensor->name); +} +#endif + +static size_t ggml_dyn_tallocr_alloc(struct ggml_dyn_tallocr * alloc, size_t size, const struct ggml_tensor * tensor) { + size = aligned_offset(NULL, size, alloc->alignment); + + AT_PRINTF("%s: allocating %s (%zu bytes) - ", __func__, tensor->name, size); + + size_t max_avail = 0; + + // find the best fitting free block besides the last block + int best_fit_block = -1; + size_t best_fit_size = SIZE_MAX; + for (int i = 0; i < alloc->n_free_blocks - 1; i++) { + struct free_block * block = &alloc->free_blocks[i]; + max_avail = MAX(max_avail, block->size); + if (block->size >= size && block->size <= best_fit_size) { + best_fit_block = i; + best_fit_size = block->size; + } + } + + if (best_fit_block == -1) { + // the last block is our last resort + struct free_block * block = &alloc->free_blocks[alloc->n_free_blocks - 1]; + max_avail = MAX(max_avail, block->size); + if (block->size >= size) { + best_fit_block = alloc->n_free_blocks - 1; + } else { + // this should never happen + fprintf(stderr, "%s: not enough space in the buffer to allocate %zu bytes, largest block available %zu bytes\n", + __func__, size, max_avail); + GGML_ABORT("not enough space in the buffer"); + } + } + + struct free_block * block = &alloc->free_blocks[best_fit_block]; + size_t offset = block->offset; + block->offset = offset + size; + block->size -= size; + if (block->size == 0) { + // remove block if empty + alloc->n_free_blocks--; + for (int j = best_fit_block; j < alloc->n_free_blocks; j++) { + alloc->free_blocks[j] = alloc->free_blocks[j+1]; + } + } + + AT_PRINTF("block %d, offset %zu\n", best_fit_block, offset); + +#ifdef GGML_ALLOCATOR_DEBUG + add_allocated_tensor(alloc, offset, tensor); + size_t cur_max = offset + size; + if (cur_max > alloc->max_size) { + // sort allocated_tensors by offset + for (int i = 0; i < 1024; i++) { + for (int j = i + 1; j < 1024; j++) { + if (alloc->allocated_tensors[i].offset > alloc->allocated_tensors[j].offset) { + const struct ggml_tensor * tmp_tensor = alloc->allocated_tensors[i].tensor; + size_t tmp_offset = alloc->allocated_tensors[i].offset; + alloc->allocated_tensors[i].tensor = alloc->allocated_tensors[j].tensor; + alloc->allocated_tensors[i].offset = alloc->allocated_tensors[j].offset; + alloc->allocated_tensors[j].tensor = tmp_tensor; + alloc->allocated_tensors[j].offset = tmp_offset; + } + } + } + fprintf(stderr, "max_size = %.2f MB: tensors: ", cur_max / 1024.0 / 1024.0); + for (int i = 0; i < 1024; i++) { + if (alloc->allocated_tensors[i].tensor) { + fprintf(stderr, "%s [%zx-%zx] (%.2f MB) ", alloc->allocated_tensors[i].tensor->name, + alloc->allocated_tensors[i].offset, + alloc->allocated_tensors[i].offset + ggml_nbytes(alloc->allocated_tensors[i].tensor), + ggml_nbytes(alloc->allocated_tensors[i].tensor) / 1024.0 / 1024.0); + } + } + fprintf(stderr, "\n"); + } +#endif + + alloc->max_size = MAX(alloc->max_size, offset + size); + + return offset; + + GGML_UNUSED(tensor); +} + +// this is a very naive implementation, but for our case the number of free blocks should be very small +static void ggml_dyn_tallocr_free_tensor(struct ggml_dyn_tallocr * alloc, size_t offset, size_t size, const struct ggml_tensor * tensor) { + size = aligned_offset(NULL, size, alloc->alignment); + + AT_PRINTF("%s: freeing %s at %zu (%zu bytes) - n_free_blocks = %d\n", __func__, tensor->name, offset, size, alloc->n_free_blocks); + +#ifdef GGML_ALLOCATOR_DEBUG + remove_allocated_tensor(alloc, offset, tensor); +#endif + + // see if we can merge with an existing block + for (int i = 0; i < alloc->n_free_blocks; i++) { + struct free_block * block = &alloc->free_blocks[i]; + // check if ptr is at the end of the block + if (block->offset + block->size == offset) { + block->size += size; + // check if we can merge with the next block + if (i < alloc->n_free_blocks - 1 && block->offset + block->size == alloc->free_blocks[i+1].offset) { + block->size += alloc->free_blocks[i+1].size; + alloc->n_free_blocks--; + for (int j = i+1; j < alloc->n_free_blocks; j++) { + alloc->free_blocks[j] = alloc->free_blocks[j+1]; + } + } + return; + } + // check if ptr is at the beginning of the block + if (offset + size == block->offset) { + block->offset = offset; + block->size += size; + // check if we can merge with the previous block + if (i > 0 && alloc->free_blocks[i-1].offset + alloc->free_blocks[i-1].size == block->offset) { + alloc->free_blocks[i-1].size += block->size; + alloc->n_free_blocks--; + for (int j = i; j < alloc->n_free_blocks; j++) { + alloc->free_blocks[j] = alloc->free_blocks[j+1]; + } + } + return; + } + } + // otherwise, add a new block + GGML_ASSERT(alloc->n_free_blocks < MAX_FREE_BLOCKS && "out of free blocks"); + // insert the new block in the correct position to keep the array sorted by address (to make merging blocks faster) + int insert_pos = 0; + while (insert_pos < alloc->n_free_blocks && alloc->free_blocks[insert_pos].offset < offset) { + insert_pos++; + } + // shift all blocks from insert_pos onward to make room for the new block + for (int i = alloc->n_free_blocks; i > insert_pos; i--) { + alloc->free_blocks[i] = alloc->free_blocks[i-1]; + } + // insert the new block + alloc->free_blocks[insert_pos].offset = offset; + alloc->free_blocks[insert_pos].size = size; + alloc->n_free_blocks++; + + GGML_UNUSED(tensor); +} + +static void ggml_dyn_tallocr_reset(struct ggml_dyn_tallocr * alloc) { + alloc->n_free_blocks = 1; + alloc->free_blocks[0].offset = 0; + alloc->free_blocks[0].size = SIZE_MAX/2; // restrict maximum size of a measure allocator to half size_t max to avoid overflows + alloc->max_size = 0; + +#ifdef GGML_ALLOCATOR_DEBUG + for (int i = 0; i < 1024; i++) { + alloc->allocated_tensors[i].tensor = NULL; + } +#endif +} + +static struct ggml_dyn_tallocr * ggml_dyn_tallocr_new(size_t alignment) { + struct ggml_dyn_tallocr * alloc = (struct ggml_dyn_tallocr *)malloc(sizeof(struct ggml_dyn_tallocr)); + + *alloc = (struct ggml_dyn_tallocr) { + /*.alignment = */ alignment, + /*.n_free_blocks = */ 0, + /*.free_blocks = */ {{0}}, + /*.max_size = */ 0, +#ifdef GGML_ALLOCATOR_DEBUG + /*.allocated_tensors = */ {{0}}, +#endif + }; + + ggml_dyn_tallocr_reset(alloc); + + return alloc; +} + +static void ggml_dyn_tallocr_free(struct ggml_dyn_tallocr * alloc) { + free(alloc); +} + +static size_t ggml_dyn_tallocr_max_size(struct ggml_dyn_tallocr * alloc) { + return alloc->max_size; +} + + +///////////////////////////////////// + +// graph allocator + +struct hash_node { + int n_children; + int n_views; + int buffer_id; + size_t offset; // offset within the buffer + bool allocated; +}; + +struct tensor_alloc { + int buffer_id; + size_t offset; + size_t size_max; // 0 = pre-allocated, unused, or view +}; + +struct leaf_alloc { + int buffer_id; + struct tensor_alloc leaf; +}; + +struct node_alloc { + struct tensor_alloc dst; + struct tensor_alloc src[GGML_MAX_SRC]; +}; + +struct ggml_gallocr { + ggml_backend_buffer_type_t * bufts; // [n_buffers] + ggml_backend_buffer_t * buffers; // [n_buffers] + struct ggml_dyn_tallocr ** buf_tallocs; // [n_buffers] + int n_buffers; + + struct ggml_hash_set hash_set; + struct hash_node * hash_values; // [hash_set.size] + + struct node_alloc * node_allocs; // [n_nodes] + int n_nodes; + + struct leaf_alloc * leaf_allocs; // [n_leafs] + int n_leafs; +}; + +ggml_gallocr_t ggml_gallocr_new_n(ggml_backend_buffer_type_t * bufts, int n_bufs) { + ggml_gallocr_t galloc = (ggml_gallocr_t)calloc(1, sizeof(struct ggml_gallocr)); + GGML_ASSERT(galloc != NULL); + + galloc->bufts = calloc(n_bufs, sizeof(ggml_backend_buffer_type_t)); + GGML_ASSERT(galloc->bufts != NULL); + + galloc->buffers = calloc(n_bufs, sizeof(ggml_backend_buffer_t)); + GGML_ASSERT(galloc->buffers != NULL); + + galloc->buf_tallocs = calloc(n_bufs, sizeof(struct ggml_dyn_tallocr *)); + GGML_ASSERT(galloc->buf_tallocs != NULL); + + for (int i = 0; i < n_bufs; i++) { + galloc->bufts[i] = bufts[i]; + galloc->buffers[i] = NULL; + + // check if the same buffer type is used multiple times and reuse the same allocator + for (int j = 0; j < i; j++) { + if (bufts[i] == bufts[j]) { + galloc->buf_tallocs[i] = galloc->buf_tallocs[j]; + break; + } + } + + if (galloc->buf_tallocs[i] == NULL) { + size_t alignment = ggml_backend_buft_get_alignment(bufts[i]); + galloc->buf_tallocs[i] = ggml_dyn_tallocr_new(alignment); + } + } + galloc->n_buffers = n_bufs; + + return galloc; +} + +ggml_gallocr_t ggml_gallocr_new(ggml_backend_buffer_type_t buft) { + return ggml_gallocr_new_n(&buft, 1); +} + +void ggml_gallocr_free(ggml_gallocr_t galloc) { + if (galloc == NULL) { + return; + } + + for (int i = 0; i < galloc->n_buffers; i++) { + if (galloc->buffers != NULL) { + // skip if already freed + bool freed = false; + for (int j = 0; j < i; j++) { + if (galloc->buffers[j] == galloc->buffers[i]) { + freed = true; + break; + } + } + if (!freed) { + ggml_backend_buffer_free(galloc->buffers[i]); + } + } + if (galloc->buf_tallocs != NULL) { + // skip if already freed + bool freed = false; + for (int j = 0; j < i; j++) { + if (galloc->buf_tallocs[j] == galloc->buf_tallocs[i]) { + freed = true; + break; + } + } + if (!freed) { + ggml_dyn_tallocr_free(galloc->buf_tallocs[i]); + } + } + } + + ggml_hash_set_free(&galloc->hash_set); + free(galloc->hash_values); + free(galloc->bufts); + free(galloc->buffers); + free(galloc->buf_tallocs); + free(galloc->node_allocs); + free(galloc->leaf_allocs); + free(galloc); +} + +typedef struct ggml_gallocr * ggml_gallocr_t; + +static struct hash_node * ggml_gallocr_hash_get(ggml_gallocr_t galloc, struct ggml_tensor * t) { + size_t i = ggml_hash_find_or_insert(&galloc->hash_set, t); + return &galloc->hash_values[i]; +} + +static bool ggml_gallocr_is_own(ggml_gallocr_t galloc, struct ggml_tensor * t) { + return ggml_gallocr_hash_get(galloc, t)->allocated; +} + +static void ggml_gallocr_set_node_offset(ggml_gallocr_t galloc, struct ggml_tensor * node, int buffer_id, size_t offset) { + struct hash_node * hn = ggml_gallocr_hash_get(galloc, node); + hn->buffer_id = buffer_id; + hn->offset = offset; + hn->allocated = true; +} + +static bool ggml_gallocr_is_allocated(ggml_gallocr_t galloc, struct ggml_tensor * t) { + return t->data != NULL || ggml_gallocr_hash_get(galloc, t)->allocated; +} + +static void ggml_gallocr_allocate_node(ggml_gallocr_t galloc, struct ggml_tensor * node, int buffer_id) { + struct hash_node * hn = ggml_gallocr_hash_get(galloc, node); + + if (!ggml_gallocr_is_allocated(galloc, node) && !ggml_is_view(node)) { + hn->allocated = true; + assert(hn->offset == 0); + + // try to reuse a parent's buffer (inplace) + if (ggml_op_can_inplace(node->op)) { + for (int i = 0; i < GGML_MAX_SRC; i++) { + struct ggml_tensor * parent = node->src[i]; + if (parent == NULL) { + continue; + } + + // if the node's data is external, then we cannot re-use it + if (!ggml_gallocr_is_own(galloc, parent)) { + AT_PRINTF("not reusing parent %s for %s as %p is external\n", parent->name, node->name, parent->data); + continue; + } + + // outputs cannot be reused + if (parent->flags & GGML_TENSOR_FLAG_OUTPUT || (parent->view_src != NULL && parent->view_src->flags & GGML_TENSOR_FLAG_OUTPUT)) { + AT_PRINTF("not reusing parent %s for %s as it is an output\n", parent->name, node->name); + continue; + } + + if (!ggml_are_same_layout(node, parent)) { + AT_PRINTF("not reusing parent %s for %s as layouts are different\n", parent->name, node->name); + continue; + } + + struct hash_node * p_hn = ggml_gallocr_hash_get(galloc, parent); + if (p_hn->n_children == 1 && p_hn->n_views == 0) { + if (ggml_is_view(parent)) { + struct ggml_tensor * view_src = parent->view_src; + struct hash_node * view_src_hn = ggml_gallocr_hash_get(galloc, view_src); + if (view_src_hn->n_views == 1 && view_src_hn->n_children == 0 && view_src->data == parent->data) { + AT_PRINTF("reusing view parent %s (%s) for %s\n", parent->name, view_src->name, node->name); + assert(view_src_hn->offset == p_hn->offset); + hn->buffer_id = p_hn->buffer_id; + hn->offset = p_hn->offset; + p_hn->allocated = false; // avoid freeing the parent + view_src_hn->allocated = false; + return; + } + } else { + AT_PRINTF("reusing parent %s for %s\n", parent->name, node->name); + hn->buffer_id = p_hn->buffer_id; + hn->offset = p_hn->offset; + p_hn->allocated = false; // avoid freeing the parent + return; + } + } + } + } + // allocate tensor from the buffer + struct ggml_dyn_tallocr * alloc = galloc->buf_tallocs[buffer_id]; + ggml_backend_buffer_type_t buft = galloc->bufts[buffer_id]; + size_t size = ggml_backend_buft_get_alloc_size(buft, node); + size_t offset = ggml_dyn_tallocr_alloc(alloc, size, node); + hn->buffer_id = buffer_id; + hn->offset = offset; + return; + } +} + +static void ggml_gallocr_free_node(ggml_gallocr_t galloc, struct ggml_tensor * node) { + // graph outputs are never freed + if (node->flags & GGML_TENSOR_FLAG_OUTPUT) { + AT_PRINTF("not freeing output %s\n", node->name); + return; + } + + struct hash_node * hn = ggml_gallocr_hash_get(galloc, node); + size_t offset = hn->offset; + int buffer_id = hn->buffer_id; + struct ggml_dyn_tallocr * alloc = galloc->buf_tallocs[buffer_id]; + ggml_backend_buffer_type_t buft = galloc->bufts[buffer_id]; + size_t size = ggml_backend_buft_get_alloc_size(buft, node); + ggml_dyn_tallocr_free_tensor(alloc, offset, size, node); + hn->allocated = false; +} + +static int get_node_buffer_id(const int * node_buffer_ids, int i) { + return node_buffer_ids ? node_buffer_ids[i] : 0; +} + +static void ggml_gallocr_alloc_graph_impl(ggml_gallocr_t galloc, struct ggml_cgraph * graph, const int * node_buffer_ids, const int * leaf_buffer_ids) { + // clear hash tables + ggml_hash_set_reset(&galloc->hash_set); + memset(galloc->hash_values, 0, sizeof(struct hash_node) * galloc->hash_set.size); + + // allocate leafs + // these may be tensors that the application is not using in the graph, but may still want to allocate for other purposes + for (int i = 0; i < graph->n_leafs; i++) { + struct ggml_tensor * leaf = graph->leafs[i]; + ggml_gallocr_allocate_node(galloc, leaf, get_node_buffer_id(leaf_buffer_ids, i)); + } + + // count number of children and views + // allocate other graph inputs and leafs first to avoid overwriting them + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + + // TODO: better way to add external dependencies + // GGML_OP_NONE does not appear normally in the graph nodes, but is used by ggml-backend to add dependencies to + // control when some tensors are allocated and freed. in this case, the dependencies are in `src`, but the node + // itself is never used and should not be considered a dependency + if (ggml_is_view(node) && node->op != GGML_OP_NONE) { + struct ggml_tensor * view_src = node->view_src; + ggml_gallocr_hash_get(galloc, view_src)->n_views += 1; + } + + if (node->flags & GGML_TENSOR_FLAG_INPUT) { + ggml_gallocr_allocate_node(galloc, graph->nodes[i], get_node_buffer_id(node_buffer_ids, i)); + } + + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * src = node->src[j]; + if (src == NULL) { + continue; + } + + ggml_gallocr_hash_get(galloc, src)->n_children += 1; + + // allocate explicit inputs + if (src->flags & GGML_TENSOR_FLAG_INPUT) { + ggml_gallocr_allocate_node(galloc, src, get_node_buffer_id(node_buffer_ids, i)); + } + } + } + + // allocate tensors + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + int buffer_id = get_node_buffer_id(node_buffer_ids, i); + + // allocate parents (only leafs need to be allocated at this point) + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * parent = node->src[j]; + if (parent == NULL) { + continue; + } + ggml_gallocr_allocate_node(galloc, parent, buffer_id); + } + + // allocate node + ggml_gallocr_allocate_node(galloc, node, buffer_id); + + AT_PRINTF("exec: %s (%s) <= ", ggml_op_desc(node), node->name); + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * parent = node->src[j]; + if (parent == NULL) { + continue; + } + AT_PRINTF("%s", parent->name); + if (j < GGML_MAX_SRC - 1 && node->src[j + 1] != NULL) { + AT_PRINTF(", "); + } + } + AT_PRINTF("\n"); + + // update parents + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * parent = node->src[j]; + if (parent == NULL) { + continue; + } + struct hash_node * p_hn = ggml_gallocr_hash_get(galloc, parent); + p_hn->n_children -= 1; + + AT_PRINTF("parent %s: %d children, %d views, allocated: %d\n", + parent->name, p_hn->n_children, p_hn->n_views, p_hn->allocated); + + if (p_hn->n_children == 0 && p_hn->n_views == 0) { + if (ggml_is_view(parent)) { + struct ggml_tensor * view_src = parent->view_src; + struct hash_node * view_src_hn = ggml_gallocr_hash_get(galloc, view_src); + view_src_hn->n_views -= 1; + AT_PRINTF("view_src %s: %d children, %d views\n", + view_src->name, view_src_hn->n_children, view_src_hn->n_views); + if (view_src_hn->n_views == 0 && view_src_hn->n_children == 0 && view_src_hn->allocated) { + ggml_gallocr_free_node(galloc, view_src); + } + } + else if (p_hn->allocated) { + ggml_gallocr_free_node(galloc, parent); + } + } + AT_PRINTF("\n"); + } + } +} + +bool ggml_gallocr_reserve_n(ggml_gallocr_t galloc, struct ggml_cgraph * graph, const int * node_buffer_ids, const int * leaf_buffer_ids) { + size_t min_hash_size = graph->n_nodes + graph->n_leafs; + // add 25% margin to avoid hash collisions + min_hash_size += min_hash_size / 4; + + // initialize hash table + if (galloc->hash_set.size < min_hash_size) { + ggml_hash_set_free(&galloc->hash_set); + galloc->hash_set = ggml_hash_set_new(min_hash_size); + GGML_ASSERT(galloc->hash_set.keys != NULL); + + free(galloc->hash_values); + galloc->hash_values = malloc(sizeof(struct hash_node) * galloc->hash_set.size); + GGML_ASSERT(galloc->hash_values != NULL); + } + + // reset allocators + for (int i = 0; i < galloc->n_buffers; i++) { + ggml_dyn_tallocr_reset(galloc->buf_tallocs[i]); + } + + // allocate in hash table + ggml_gallocr_alloc_graph_impl(galloc, graph, node_buffer_ids, leaf_buffer_ids); + + // set the node_allocs from the hash table + if (galloc->n_nodes < graph->n_nodes) { + free(galloc->node_allocs); + galloc->node_allocs = calloc(graph->n_nodes, sizeof(struct node_alloc)); + GGML_ASSERT(galloc->node_allocs != NULL); + } + galloc->n_nodes = graph->n_nodes; + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + struct node_alloc * node_alloc = &galloc->node_allocs[i]; + if (node->view_src || node->data) { + node_alloc->dst.buffer_id = -1; + node_alloc->dst.offset = SIZE_MAX; + node_alloc->dst.size_max = 0; + } else { + struct hash_node * hn = ggml_gallocr_hash_get(galloc, node); + node_alloc->dst.buffer_id = hn->buffer_id; + node_alloc->dst.offset = hn->offset; + node_alloc->dst.size_max = ggml_backend_buft_get_alloc_size(galloc->bufts[hn->buffer_id], node); + } + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * src = node->src[j]; + if (!src || src->view_src || src->data) { + node_alloc->src[j].buffer_id = -1; + node_alloc->src[j].offset = SIZE_MAX; + node_alloc->src[j].size_max = 0; + } else { + struct hash_node * hn = ggml_gallocr_hash_get(galloc, src); + node_alloc->src[j].buffer_id = hn->buffer_id; + node_alloc->src[j].offset = hn->offset; + node_alloc->src[j].size_max = ggml_backend_buft_get_alloc_size(galloc->bufts[hn->buffer_id], src); + } + } + } + if (galloc->n_leafs < graph->n_leafs) { + free(galloc->leaf_allocs); + galloc->leaf_allocs = calloc(graph->n_leafs, sizeof(galloc->leaf_allocs[0])); + GGML_ASSERT(galloc->leaf_allocs != NULL); + } + galloc->n_leafs = graph->n_leafs; + for (int i = 0; i < graph->n_leafs; i++) { + struct ggml_tensor * leaf = graph->leafs[i]; + struct hash_node * hn = ggml_gallocr_hash_get(galloc, leaf); + galloc->leaf_allocs[i].buffer_id = hn->buffer_id; + if (leaf->view_src || leaf->data) { + galloc->leaf_allocs[i].leaf.buffer_id = -1; + galloc->leaf_allocs[i].leaf.offset = SIZE_MAX; + galloc->leaf_allocs[i].leaf.size_max = 0; + } else { + galloc->leaf_allocs[i].leaf.buffer_id = hn->buffer_id; + galloc->leaf_allocs[i].leaf.offset = hn->offset; + galloc->leaf_allocs[i].leaf.size_max = ggml_backend_buft_get_alloc_size(galloc->bufts[hn->buffer_id], leaf); + } + } + + // reallocate buffers if needed + for (int i = 0; i < galloc->n_buffers; i++) { + // if the buffer type is used multiple times, we reuse the same buffer + for (int j = 0; j < i; j++) { + if (galloc->buf_tallocs[j] == galloc->buf_tallocs[i]) { + galloc->buffers[i] = galloc->buffers[j]; + break; + } + } + + size_t cur_size = galloc->buffers[i] ? ggml_backend_buffer_get_size(galloc->buffers[i]) : 0; + size_t new_size = ggml_dyn_tallocr_max_size(galloc->buf_tallocs[i]); + + // even if there are no tensors allocated in this buffer, we still need to allocate it to initialize views + if (new_size > cur_size || galloc->buffers[i] == NULL) { +#ifndef NDEBUG + fprintf(stderr, "%s: reallocating %s buffer from size %.02f MiB to %.02f MiB\n", __func__, ggml_backend_buft_name(galloc->bufts[i]), cur_size / 1024.0 / 1024.0, new_size / 1024.0 / 1024.0); +#endif + + ggml_backend_buffer_free(galloc->buffers[i]); + galloc->buffers[i] = ggml_backend_buft_alloc_buffer(galloc->bufts[i], new_size); + if (galloc->buffers[i] == NULL) { + fprintf(stderr, "%s: failed to allocate %s buffer of size %zu\n", __func__, ggml_backend_buft_name(galloc->bufts[i]), new_size); + return false; + } + ggml_backend_buffer_set_usage(galloc->buffers[i], GGML_BACKEND_BUFFER_USAGE_COMPUTE); + } + } + + return true; +} + +bool ggml_gallocr_reserve(ggml_gallocr_t galloc, struct ggml_cgraph *graph) { + return ggml_gallocr_reserve_n(galloc, graph, NULL, NULL); +} + +static void ggml_gallocr_init_tensor(ggml_gallocr_t galloc, struct ggml_tensor * tensor, struct tensor_alloc * tensor_alloc) { + int buffer_id = tensor_alloc->buffer_id; + assert(tensor->data || tensor->view_src || ggml_backend_buffer_get_alloc_size(galloc->buffers[buffer_id], tensor) <= tensor_alloc->size_max); + + if (tensor->view_src != NULL) { + if (tensor->buffer == NULL) { + assert(tensor_alloc->offset == SIZE_MAX); + if (tensor->view_src->buffer == NULL) { + // this tensor was allocated without ggml-backend + return; + } + ggml_backend_view_init(tensor); + } + } else { + if (tensor->data == NULL) { + assert(tensor_alloc->offset != SIZE_MAX); + assert(ggml_backend_buffer_get_alloc_size(galloc->buffers[buffer_id], tensor) <= tensor_alloc->size_max); + void * base = ggml_backend_buffer_get_base(galloc->buffers[buffer_id]); + void * addr = (char *)base + tensor_alloc->offset; + ggml_backend_tensor_alloc(galloc->buffers[buffer_id], tensor, addr); + } else { + if (tensor->buffer == NULL) { + // this tensor was allocated without ggml-backend + return; + } + } + } +} + +static bool ggml_gallocr_node_needs_realloc(ggml_gallocr_t galloc, struct ggml_tensor * node, struct tensor_alloc * talloc) { + size_t node_size = (node->data || node->view_src) ? 0 : ggml_backend_buft_get_alloc_size(galloc->bufts[talloc->buffer_id], node); + return talloc->size_max >= node_size; +} + +static bool ggml_gallocr_needs_realloc(ggml_gallocr_t galloc, struct ggml_cgraph * graph) { + if (galloc->n_nodes != graph->n_nodes) { +#ifndef NDEBUG + fprintf(stderr, "%s: graph has different number of nodes\n", __func__); +#endif + return true; + } + + if (galloc->n_leafs != graph->n_leafs) { +#ifndef NDEBUG + fprintf(stderr, "%s: graph has different number of leafs\n", __func__); +#endif + return true; + } + + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + struct node_alloc * node_alloc = &galloc->node_allocs[i]; + + if (!ggml_gallocr_node_needs_realloc(galloc, node, &node_alloc->dst)) { +#ifndef NDEBUG + fprintf(stderr, "%s: node %s is not valid\n", __func__, node->name); +#endif + return true; + } + + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * src = node->src[j]; + if (src == NULL) { + continue; + } + if (!ggml_gallocr_node_needs_realloc(galloc, src, &node_alloc->src[j])) { +#ifndef NDEBUG + fprintf(stderr, "%s: src %d (%s) of node %s is not valid\n", __func__, j, src->name, node->name); +#endif + return true; + } + } + } + + return false; +} + +bool ggml_gallocr_alloc_graph(ggml_gallocr_t galloc, struct ggml_cgraph * graph) { + if (ggml_gallocr_needs_realloc(galloc, graph)) { + if (galloc->n_buffers == 1) { +#ifndef NDEBUG + fprintf(stderr, "%s: reallocating buffers automatically\n", __func__); +#endif + if (!ggml_gallocr_reserve(galloc, graph)) { + return false; + } + } else { +#ifndef NDEBUG + fprintf(stderr, "%s: cannot reallocate multi buffer graph automatically, call reserve\n", __func__); +#endif + return false; + } + } + + // reset buffers + for (int i = 0; i < galloc->n_buffers; i++) { + if (galloc->buffers[i] != NULL) { + ggml_backend_buffer_reset(galloc->buffers[i]); + } + } + + // allocate the graph tensors from the previous assignments + // leafs + for (int i = 0; i < graph->n_leafs; i++) { + struct ggml_tensor * leaf = graph->leafs[i]; + struct leaf_alloc * leaf_alloc = &galloc->leaf_allocs[i]; + ggml_gallocr_init_tensor(galloc, leaf, &leaf_alloc->leaf); + } + // nodes + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + struct node_alloc * node_alloc = &galloc->node_allocs[i]; + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * src = node->src[j]; + if (src == NULL) { + continue; + } + ggml_gallocr_init_tensor(galloc, src, &node_alloc->src[j]); + } + ggml_gallocr_init_tensor(galloc, node, &node_alloc->dst); + } + + return true; +} + +size_t ggml_gallocr_get_buffer_size(ggml_gallocr_t galloc, int buffer_id) { + GGML_ASSERT(buffer_id >= 0 && buffer_id < galloc->n_buffers); + + if (galloc->buffers[buffer_id] == NULL) { + return 0; + } + + for (int i = 0; i < buffer_id; i++) { + if (galloc->buffers[i] == galloc->buffers[buffer_id]) { + // this buffer is the same as a previous one due to the same buffer type being used multiple times + // only return the buffer size the first time it appears to avoid double counting + return 0; + } + } + + return ggml_backend_buffer_get_size(galloc->buffers[buffer_id]); +} + +// utils + +static bool alloc_tensor_range(struct ggml_context * ctx, + struct ggml_tensor * first, struct ggml_tensor * last, + ggml_backend_buffer_type_t buft, size_t size, + ggml_backend_buffer_t ** buffers, size_t * n_buffers) { + ggml_backend_buffer_t buffer = ggml_backend_buft_alloc_buffer(buft, size); + if (buffer == NULL) { +#ifndef NDEBUG + fprintf(stderr, "%s: failed to allocate %s buffer of size %zu\n", __func__, ggml_backend_buft_name(buft), size); +#endif + for (size_t i = 0; i < *n_buffers; i++) { + ggml_backend_buffer_free((*buffers)[i]); + } + free(*buffers); + return false; + } + + struct ggml_tallocr tallocr = ggml_tallocr_new(buffer); + + for (struct ggml_tensor * t = first; t != last; t = ggml_get_next_tensor(ctx, t)) { + if (t->data == NULL) { + if (t->view_src == NULL) { + ggml_tallocr_alloc(&tallocr, t); + } else if (t->buffer == NULL) { + ggml_backend_view_init(t); + } + } else { + if (t->view_src != NULL && t->buffer == NULL) { + // view of a pre-allocated tensor + ggml_backend_view_init(t); + } + } + } + + *buffers = realloc(*buffers, sizeof(ggml_backend_buffer_t) * (*n_buffers + 1)); + (*buffers)[(*n_buffers)++] = buffer; + + return true; +} + +ggml_backend_buffer_t ggml_backend_alloc_ctx_tensors_from_buft(struct ggml_context * ctx, ggml_backend_buffer_type_t buft) { + GGML_ASSERT(ggml_get_no_alloc(ctx) == true); + + size_t alignment = ggml_backend_buft_get_alignment(buft); + size_t max_size = ggml_backend_buft_get_max_size(buft); + + ggml_backend_buffer_t * buffers = NULL; + size_t n_buffers = 0; + + size_t cur_buf_size = 0; + struct ggml_tensor * first = ggml_get_first_tensor(ctx); + for (struct ggml_tensor * t = first; t != NULL; t = ggml_get_next_tensor(ctx, t)) { + size_t this_size = 0; + if (t->data == NULL && t->view_src == NULL) { + this_size = GGML_PAD(ggml_backend_buft_get_alloc_size(buft, t), alignment); + } + + if (this_size > max_size) { + fprintf(stderr, "%s: tensor %s is too large to fit in a %s buffer (tensor size: %zu, max buffer size: %zu)\n", + __func__, t->name, + ggml_backend_buft_name(buft), + this_size, max_size); + for (size_t i = 0; i < n_buffers; i++) { + ggml_backend_buffer_free(buffers[i]); + } + free(buffers); + return NULL; + } + + if ((cur_buf_size + this_size) > max_size) { + // allocate tensors in the current buffer + if (!alloc_tensor_range(ctx, first, t, buft, cur_buf_size, &buffers, &n_buffers)) { + return NULL; + } + first = t; + cur_buf_size = this_size; + } else { + cur_buf_size += this_size; + } + } + + // allocate remaining tensors + if (cur_buf_size > 0) { + if (!alloc_tensor_range(ctx, first, NULL, buft, cur_buf_size, &buffers, &n_buffers)) { + return NULL; + } + } + + if (n_buffers == 0) { +#ifndef NDEBUG + fprintf(stderr, "%s: all tensors in the context are already allocated\n", __func__); +#endif + return NULL; + } + + ggml_backend_buffer_t buffer; + if (n_buffers == 1) { + buffer = buffers[0]; + } else { + buffer = ggml_backend_multi_buffer_alloc_buffer(buffers, n_buffers); + } + free(buffers); + return buffer; +} + +ggml_backend_buffer_t ggml_backend_alloc_ctx_tensors(struct ggml_context * ctx, ggml_backend_t backend) { + return ggml_backend_alloc_ctx_tensors_from_buft(ctx, ggml_backend_get_default_buffer_type(backend)); +} diff --git a/ml/backend/ggml/ggml-alloc.h b/ml/backend/ggml/ggml-alloc.h new file mode 100644 index 000000000..c44864b81 --- /dev/null +++ b/ml/backend/ggml/ggml-alloc.h @@ -0,0 +1,102 @@ +/** + * llama.cpp - commit 3f1ae2e32cde00c39b96be6d01c2997c29bae555 - do not edit this file + * + * MIT License + * + * Copyright (c) 2023-2024 The ggml authors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +#pragma once + +#include "ggml.h" + +#ifdef __cplusplus +extern "C" { +#endif + +typedef struct ggml_backend_buffer_type * ggml_backend_buffer_type_t; +typedef struct ggml_backend_buffer * ggml_backend_buffer_t; +typedef struct ggml_backend * ggml_backend_t; + +// Tensor allocator +struct ggml_tallocr { + ggml_backend_buffer_t buffer; + void * base; + size_t alignment; + size_t offset; +}; + +GGML_API struct ggml_tallocr ggml_tallocr_new(ggml_backend_buffer_t buffer); +GGML_API void ggml_tallocr_alloc(struct ggml_tallocr * talloc, struct ggml_tensor * tensor); + +// Graph allocator +/* + Example usage: + ggml_gallocr_t galloc = ggml_gallocr_new(ggml_bacckend_cpu_buffer_type()); + + // optional: create a worst-case graph and reserve the buffers to avoid reallocations + ggml_gallocr_reserve(galloc, build_graph(max_batch)); + + // allocate the graph + struct ggml_cgraph * graph = build_graph(batch); + ggml_gallocr_alloc_graph(galloc, graph); + + printf("compute buffer size: %zu bytes\n", ggml_gallocr_get_buffer_size(galloc, 0)); + + // evaluate the graph + ggml_backend_graph_compute(backend, graph); +*/ + +// special tensor flags for use with the graph allocator: +// ggml_set_input(): all input tensors are allocated at the beginning of the graph in non-overlapping addresses +// ggml_set_output(): output tensors are never freed and never overwritten + +typedef struct ggml_gallocr * ggml_gallocr_t; + +GGML_API ggml_gallocr_t ggml_gallocr_new(ggml_backend_buffer_type_t buft); +GGML_API ggml_gallocr_t ggml_gallocr_new_n(ggml_backend_buffer_type_t * bufts, int n_bufs); +GGML_API void ggml_gallocr_free(ggml_gallocr_t galloc); + +// pre-allocate buffers from a measure graph - does not allocate or modify the graph +// call with a worst-case graph to avoid buffer reallocations +// not strictly required for single buffer usage: ggml_gallocr_alloc_graph will reallocate the buffers automatically if needed +// returns false if the buffer allocation failed +GGML_API bool ggml_gallocr_reserve(ggml_gallocr_t galloc, struct ggml_cgraph * graph); +GGML_API bool ggml_gallocr_reserve_n( + ggml_gallocr_t galloc, + struct ggml_cgraph * graph, + const int * node_buffer_ids, + const int * leaf_buffer_ids); + +// automatic reallocation if the topology changes when using a single buffer +// returns false if using multiple buffers and a re-allocation is needed (call ggml_gallocr_reserve_n first to set the node buffers) +GGML_API bool ggml_gallocr_alloc_graph(ggml_gallocr_t galloc, struct ggml_cgraph * graph); + +GGML_API size_t ggml_gallocr_get_buffer_size(ggml_gallocr_t galloc, int buffer_id); + +// Utils +// Create a buffer and allocate all the tensors in a ggml_context +GGML_API struct ggml_backend_buffer * ggml_backend_alloc_ctx_tensors_from_buft(struct ggml_context * ctx, ggml_backend_buffer_type_t buft); +GGML_API struct ggml_backend_buffer * ggml_backend_alloc_ctx_tensors(struct ggml_context * ctx, ggml_backend_t backend); + +#ifdef __cplusplus +} +#endif diff --git a/ml/backend/ggml/ggml-backend-impl.h b/ml/backend/ggml/ggml-backend-impl.h new file mode 100644 index 000000000..7223974fd --- /dev/null +++ b/ml/backend/ggml/ggml-backend-impl.h @@ -0,0 +1,180 @@ +/** + * llama.cpp - commit 3f1ae2e32cde00c39b96be6d01c2997c29bae555 - do not edit this file + * + * MIT License + * + * Copyright (c) 2023-2024 The ggml authors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +#pragma once + +// ggml-backend internal header + +#include "ggml-backend.h" + +#ifdef __cplusplus +extern "C" { +#endif + + // + // Backend buffer + // + + // buffer type + typedef void * ggml_backend_buffer_type_context_t; + + struct ggml_backend_buffer_type_i { + const char * (*GGML_CALL get_name) (ggml_backend_buffer_type_t buft); + // allocate a buffer of this type + ggml_backend_buffer_t (*GGML_CALL alloc_buffer) (ggml_backend_buffer_type_t buft, size_t size); + // tensor alignment + size_t (*GGML_CALL get_alignment) (ggml_backend_buffer_type_t buft); + // max buffer size that can be allocated + size_t (*GGML_CALL get_max_size) (ggml_backend_buffer_type_t buft); + // data size needed to allocate the tensor, including padding + size_t (*GGML_CALL get_alloc_size) (ggml_backend_buffer_type_t buft, const struct ggml_tensor * tensor); + // check if tensor data is in host memory + bool (*GGML_CALL is_host) (ggml_backend_buffer_type_t buft); + }; + + struct ggml_backend_buffer_type { + struct ggml_backend_buffer_type_i iface; + ggml_backend_buffer_type_context_t context; + }; + + // buffer + typedef void * ggml_backend_buffer_context_t; + + struct ggml_backend_buffer_i { + const char * (*GGML_CALL get_name) (ggml_backend_buffer_t buffer); + void (*GGML_CALL free_buffer) (ggml_backend_buffer_t buffer); + void * (*GGML_CALL get_base) (ggml_backend_buffer_t buffer); + void (*GGML_CALL init_tensor) (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); + void (*GGML_CALL memset_tensor) (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, uint8_t value, size_t offset, size_t size); + void (*GGML_CALL set_tensor) (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); + void (*GGML_CALL get_tensor) (ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); + bool (*GGML_CALL cpy_tensor) (ggml_backend_buffer_t buffer, const struct ggml_tensor * src, struct ggml_tensor * dst); // dst is in the buffer, src may be in any buffer + void (*GGML_CALL clear) (ggml_backend_buffer_t buffer, uint8_t value); + void (*GGML_CALL reset) (ggml_backend_buffer_t buffer); // reset any internal state due to tensor initialization, such as tensor extras + }; + + struct ggml_backend_buffer { + struct ggml_backend_buffer_i iface; + ggml_backend_buffer_type_t buft; + ggml_backend_buffer_context_t context; + size_t size; + enum ggml_backend_buffer_usage usage; + }; + + GGML_CALL ggml_backend_buffer_t ggml_backend_buffer_init( + ggml_backend_buffer_type_t buft, + struct ggml_backend_buffer_i iface, + ggml_backend_buffer_context_t context, + size_t size); + + // do not use directly, use ggml_backend_tensor_copy instead + bool ggml_backend_buffer_copy_tensor(const struct ggml_tensor * src, struct ggml_tensor * dst); + + // buffer that contains a collection of buffers + GGML_CALL ggml_backend_buffer_t ggml_backend_multi_buffer_alloc_buffer(ggml_backend_buffer_t * buffers, size_t n_buffers); + GGML_CALL bool ggml_backend_buffer_is_multi_buffer(ggml_backend_buffer_t buffer); + GGML_CALL void ggml_backend_multi_buffer_set_usage(ggml_backend_buffer_t buffer, enum ggml_backend_buffer_usage usage); + + // + // Backend + // + + typedef void * ggml_backend_context_t; + + struct ggml_backend_i { + const char * (*GGML_CALL get_name)(ggml_backend_t backend); + + void (*GGML_CALL free)(ggml_backend_t backend); + + // buffer allocation + ggml_backend_buffer_type_t (*GGML_CALL get_default_buffer_type)(ggml_backend_t backend); + + // (optional) asynchronous tensor data access + void (*GGML_CALL set_tensor_async)(ggml_backend_t backend, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); + void (*GGML_CALL get_tensor_async)(ggml_backend_t backend, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); + bool (*GGML_CALL cpy_tensor_async)(ggml_backend_t backend_src, ggml_backend_t backend_dst, const struct ggml_tensor * src, struct ggml_tensor * dst); + + // (optional) complete all pending operations + void (*GGML_CALL synchronize)(ggml_backend_t backend); + + // compute graph with a plan (not used currently) + // create a new plan for a graph + ggml_backend_graph_plan_t (*GGML_CALL graph_plan_create) (ggml_backend_t backend, const struct ggml_cgraph * cgraph); + void (*GGML_CALL graph_plan_free) (ggml_backend_t backend, ggml_backend_graph_plan_t plan); + // update the plan with a new graph - this should be faster than creating a new plan when the graph has the same topology + void (*GGML_CALL graph_plan_update) (ggml_backend_t backend, ggml_backend_graph_plan_t plan, const struct ggml_cgraph * cgraph); + // compute the graph with the plan + enum ggml_status (*GGML_CALL graph_plan_compute)(ggml_backend_t backend, ggml_backend_graph_plan_t plan); + + // compute graph without a plan (async) + enum ggml_status (*GGML_CALL graph_compute) (ggml_backend_t backend, struct ggml_cgraph * cgraph); + + // check if the backend can compute an operation + bool (*GGML_CALL supports_op)(ggml_backend_t backend, const struct ggml_tensor * op); + + // check if the backend can use tensors allocated in a buffer type + bool (*GGML_CALL supports_buft)(ggml_backend_t backend, ggml_backend_buffer_type_t buft); + + // check if the backend wants to run an operation, even if the weights are allocated in a CPU buffer + // these should be expensive operations with large batch sizes that may benefit from running on this backend + // even if the weight has to be copied from the CPU temporarily + bool (*GGML_CALL offload_op)(ggml_backend_t backend, const struct ggml_tensor * op); + + // (optional) event synchronization + // create a new event that can record events on this backend instance + ggml_backend_event_t (*GGML_CALL event_new) (ggml_backend_t backend); + void (*GGML_CALL event_free) (ggml_backend_event_t event); + // record an event on the backend instance that created it + void (*GGML_CALL event_record) (ggml_backend_event_t event); + // wait for an event on on a different backend instance + void (*GGML_CALL event_wait) (ggml_backend_t backend, ggml_backend_event_t event); + // block until an event is recorded + void (*GGML_CALL event_synchronize) (ggml_backend_event_t event); + }; + + struct ggml_backend { + ggml_guid_t guid; + + struct ggml_backend_i iface; + ggml_backend_context_t context; + }; + + struct ggml_backend_event { + ggml_backend_t backend; + void * context; + }; + + // + // Backend registry + // + + typedef ggml_backend_t (*GGML_CALL ggml_backend_init_fn)(const char * params, void * user_data); + + GGML_CALL void ggml_backend_register(const char * name, ggml_backend_init_fn init_fn, ggml_backend_buffer_type_t default_buffer_type, void * user_data); + +#ifdef __cplusplus +} +#endif diff --git a/ml/backend/ggml/ggml-backend.c b/ml/backend/ggml/ggml-backend.c new file mode 100644 index 000000000..49be166ce --- /dev/null +++ b/ml/backend/ggml/ggml-backend.c @@ -0,0 +1,2325 @@ +/** + * llama.cpp - commit 3f1ae2e32cde00c39b96be6d01c2997c29bae555 - do not edit this file + * + * MIT License + * + * Copyright (c) 2023-2024 The ggml authors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +#include "ggml-backend-impl.h" +#include "ggml-alloc.h" +#include "ggml-impl.h" + +#include +#include +#include +#include +#include +#include + + +#define MAX(a, b) ((a) > (b) ? (a) : (b)) + +// backend buffer type + +const char * ggml_backend_buft_name(ggml_backend_buffer_type_t buft) { + return buft->iface.get_name(buft); +} + +GGML_CALL ggml_backend_buffer_t ggml_backend_buft_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { + return buft->iface.alloc_buffer(buft, size); +} + +size_t ggml_backend_buft_get_alignment(ggml_backend_buffer_type_t buft) { + return buft->iface.get_alignment(buft); +} + +size_t ggml_backend_buft_get_max_size(ggml_backend_buffer_type_t buft) { + // get_max_size is optional, defaults to SIZE_MAX + if (buft->iface.get_max_size) { + return buft->iface.get_max_size(buft); + } + return SIZE_MAX; +} + +GGML_CALL size_t ggml_backend_buft_get_alloc_size(ggml_backend_buffer_type_t buft, struct ggml_tensor * tensor) { + // get_alloc_size is optional, defaults to ggml_nbytes + if (buft->iface.get_alloc_size) { + size_t size = buft->iface.get_alloc_size(buft, tensor); + assert(size >= ggml_nbytes(tensor)); + return size; + } + return ggml_nbytes(tensor); +} + +bool ggml_backend_buft_is_host(ggml_backend_buffer_type_t buft) { + if (buft->iface.is_host) { + return buft->iface.is_host(buft); + } + return false; +} + +// backend buffer + +GGML_CALL ggml_backend_buffer_t ggml_backend_buffer_init( + ggml_backend_buffer_type_t buft, + struct ggml_backend_buffer_i iface, + ggml_backend_buffer_context_t context, + size_t size) { + ggml_backend_buffer_t buffer = malloc(sizeof(struct ggml_backend_buffer)); + + (*buffer) = (struct ggml_backend_buffer) { + /* .interface = */ iface, + /* .buft = */ buft, + /* .context = */ context, + /* .size = */ size, + /* .usage = */ GGML_BACKEND_BUFFER_USAGE_ANY + }; + + return buffer; +} + +const char * ggml_backend_buffer_name(ggml_backend_buffer_t buffer) { + return buffer->iface.get_name(buffer); +} + +void ggml_backend_buffer_free(ggml_backend_buffer_t buffer) { + if (buffer == NULL) { + return; + } + + if (buffer->iface.free_buffer != NULL) { + buffer->iface.free_buffer(buffer); + } + +// TODO: this needs to be freed in cuda and hipblas backends because +// the cuda backend implementation compiled with msvc +#if !defined(GGML_USE_CUDA) && !defined(GGML_USE_HIPBLAS) + free(buffer); +#endif +} + +size_t ggml_backend_buffer_get_size(ggml_backend_buffer_t buffer) { + return buffer->size; +} + +void * ggml_backend_buffer_get_base(ggml_backend_buffer_t buffer) { + void * base = buffer->iface.get_base(buffer); + + GGML_ASSERT(base != NULL && "backend buffer base cannot be NULL"); + + return base; +} + +GGML_CALL void ggml_backend_buffer_init_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor) { + // init_tensor is optional + if (buffer->iface.init_tensor) { + buffer->iface.init_tensor(buffer, tensor); + } +} + +size_t ggml_backend_buffer_get_alignment (ggml_backend_buffer_t buffer) { + return ggml_backend_buft_get_alignment(ggml_backend_buffer_get_type(buffer)); +} + +size_t ggml_backend_buffer_get_max_size(ggml_backend_buffer_t buffer) { + return ggml_backend_buft_get_max_size(ggml_backend_buffer_get_type(buffer)); +} + +size_t ggml_backend_buffer_get_alloc_size(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor) { + return ggml_backend_buft_get_alloc_size(ggml_backend_buffer_get_type(buffer), tensor); +} + +void ggml_backend_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { + buffer->iface.clear(buffer, value); +} + +bool ggml_backend_buffer_is_host(ggml_backend_buffer_t buffer) { + return ggml_backend_buft_is_host(ggml_backend_buffer_get_type(buffer)); +} + +void ggml_backend_buffer_set_usage(ggml_backend_buffer_t buffer, enum ggml_backend_buffer_usage usage) { + buffer->usage = usage; + + // FIXME: add a generic callback to the buffer interface + if (ggml_backend_buffer_is_multi_buffer(buffer)) { + ggml_backend_multi_buffer_set_usage(buffer, usage); + } +} + +enum ggml_backend_buffer_usage ggml_backend_buffer_get_usage(ggml_backend_buffer_t buffer) { + return buffer->usage; +} + +ggml_backend_buffer_type_t ggml_backend_buffer_get_type(ggml_backend_buffer_t buffer) { + return buffer->buft; +} + +void ggml_backend_buffer_reset(ggml_backend_buffer_t buffer) { + if (buffer->iface.reset) { + buffer->iface.reset(buffer); + } +} + +bool ggml_backend_buffer_copy_tensor(const struct ggml_tensor * src, struct ggml_tensor * dst) { + ggml_backend_buffer_t dst_buf = dst->view_src ? dst->view_src->buffer : dst->buffer; + if (dst_buf->iface.cpy_tensor) { + return dst_buf->iface.cpy_tensor(dst_buf, src, dst); + } + return false; +} + +// backend + +ggml_guid_t ggml_backend_guid(ggml_backend_t backend) { + if (backend == NULL) { + return NULL; + } + return backend->guid; +} + +const char * ggml_backend_name(ggml_backend_t backend) { + if (backend == NULL) { + return "NULL"; + } + return backend->iface.get_name(backend); +} + +void ggml_backend_free(ggml_backend_t backend) { + if (backend == NULL) { + return; + } + + backend->iface.free(backend); +} + +ggml_backend_buffer_type_t ggml_backend_get_default_buffer_type(ggml_backend_t backend) { + return backend->iface.get_default_buffer_type(backend); +} + +ggml_backend_buffer_t ggml_backend_alloc_buffer(ggml_backend_t backend, size_t size) { + return ggml_backend_buft_alloc_buffer(ggml_backend_get_default_buffer_type(backend), size); +} + +size_t ggml_backend_get_alignment(ggml_backend_t backend) { + return ggml_backend_buft_get_alignment(ggml_backend_get_default_buffer_type(backend)); +} + +size_t ggml_backend_get_max_size(ggml_backend_t backend) { + return ggml_backend_buft_get_max_size(ggml_backend_get_default_buffer_type(backend)); +} + +void ggml_backend_tensor_set_async(ggml_backend_t backend, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { + GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); + GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor write out of bounds"); + + if (backend->iface.set_tensor_async == NULL) { + ggml_backend_tensor_set(tensor, data, offset, size); + } else { + backend->iface.set_tensor_async(backend, tensor, data, offset, size); + } +} + +void ggml_backend_tensor_get_async(ggml_backend_t backend, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { + GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); + GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor read out of bounds"); + + if (backend->iface.get_tensor_async == NULL) { + ggml_backend_tensor_get(tensor, data, offset, size); + } else { + backend->iface.get_tensor_async(backend, tensor, data, offset, size); + } +} + +GGML_CALL void ggml_backend_tensor_set(struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { + ggml_backend_buffer_t buf = tensor->view_src ? tensor->view_src->buffer : tensor->buffer; + + GGML_ASSERT(buf != NULL && "tensor buffer not set"); + GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); + GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor write out of bounds"); + + if (!size) { + return; + } + + buf->iface.set_tensor(buf, tensor, data, offset, size); +} + +GGML_CALL void ggml_backend_tensor_get(const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { + ggml_backend_buffer_t buf = tensor->view_src ? tensor->view_src->buffer : tensor->buffer; + + GGML_ASSERT(buf != NULL && "tensor buffer not set"); + GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); + GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor read out of bounds"); + + if (!size) { + return; + } + + buf->iface.get_tensor(buf, tensor, data, offset, size); +} + +GGML_API GGML_CALL void ggml_backend_tensor_memset(struct ggml_tensor * tensor, uint8_t value, size_t offset, size_t size) { + ggml_backend_buffer_t buf = tensor->view_src ? tensor->view_src->buffer : tensor->buffer; + + GGML_ASSERT(buf != NULL && "tensor buffer not set"); + GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); + GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor write out of bounds"); + + if (!size) { + return; + } + + GGML_ASSERT(buf->iface.memset_tensor != NULL && "memset not supported by backend buffer"); + + buf->iface.memset_tensor(buf, tensor, value, offset, size); +} + +void ggml_backend_synchronize(ggml_backend_t backend) { + if (backend->iface.synchronize == NULL) { + return; + } + + backend->iface.synchronize(backend); +} + +ggml_backend_graph_plan_t ggml_backend_graph_plan_create(ggml_backend_t backend, struct ggml_cgraph * cgraph) { + GGML_ASSERT(backend->iface.graph_plan_create != NULL); + + return backend->iface.graph_plan_create(backend, cgraph); +} + +void ggml_backend_graph_plan_free(ggml_backend_t backend, ggml_backend_graph_plan_t plan) { + GGML_ASSERT(backend->iface.graph_plan_free != NULL); + + backend->iface.graph_plan_free(backend, plan); +} + +enum ggml_status ggml_backend_graph_plan_compute(ggml_backend_t backend, ggml_backend_graph_plan_t plan) { + GGML_ASSERT(backend->iface.graph_plan_compute != NULL); + + return backend->iface.graph_plan_compute(backend, plan); +} + +enum ggml_status ggml_backend_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { + enum ggml_status err = ggml_backend_graph_compute_async(backend, cgraph); + ggml_backend_synchronize(backend); + return err; +} + +enum ggml_status ggml_backend_graph_compute_async(ggml_backend_t backend, struct ggml_cgraph * cgraph) { + return backend->iface.graph_compute(backend, cgraph); +} + +bool ggml_backend_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { + return backend->iface.supports_op(backend, op); +} + +bool ggml_backend_supports_buft(ggml_backend_t backend, ggml_backend_buffer_type_t buft) { + return backend->iface.supports_buft(backend, buft); +} + +bool ggml_backend_offload_op(ggml_backend_t backend, const struct ggml_tensor * op) { + if (backend->iface.offload_op != NULL) { + return backend->iface.offload_op(backend, op); + } + return false; +} + +// backend copy + +static bool ggml_are_same_layout(const struct ggml_tensor * a, const struct ggml_tensor * b) { + if (a->type != b->type) { + return false; + } + for (int i = 0; i < GGML_MAX_DIMS; i++) { + if (a->ne[i] != b->ne[i]) { + return false; + } + if (a->nb[i] != b->nb[i]) { + return false; + } + } + return true; +} + +void ggml_backend_tensor_copy(struct ggml_tensor * src, struct ggml_tensor * dst) { + GGML_ASSERT(ggml_are_same_layout(src, dst) && "cannot copy tensors with different layouts"); + + if (src == dst) { + return; + } + + if (ggml_backend_buffer_is_host(src->buffer)) { + ggml_backend_tensor_set(dst, src->data, 0, ggml_nbytes(src)); + } else if (ggml_backend_buffer_is_host(dst->buffer)) { + ggml_backend_tensor_get(src, dst->data, 0, ggml_nbytes(src)); + } else if (!ggml_backend_buffer_copy_tensor(src, dst)) { +#ifndef NDEBUG + fprintf(stderr, "%s: warning: slow copy from %s to %s\n", __func__, ggml_backend_buffer_name(src->buffer), ggml_backend_buffer_name(dst->buffer)); +#endif + size_t nbytes = ggml_nbytes(src); + void * data = malloc(nbytes); + ggml_backend_tensor_get(src, data, 0, nbytes); + ggml_backend_tensor_set(dst, data, 0, nbytes); + free(data); + } +} + +void ggml_backend_tensor_copy_async(ggml_backend_t backend_src, ggml_backend_t backend_dst, struct ggml_tensor * src, struct ggml_tensor * dst) { + GGML_ASSERT(ggml_are_same_layout(src, dst) && "cannot copy tensors with different layouts"); + + if (src == dst) { + return; + } + + if (backend_dst->iface.cpy_tensor_async != NULL) { + if (backend_dst->iface.cpy_tensor_async(backend_src, backend_dst, src, dst)) { + return; + } + } + + // an async copy would normally happen after all the queued operations on both backends are completed + // to simulate the same behavior, we need to synchronize both backends first, and do a blocking copy + ggml_backend_synchronize(backend_src); + ggml_backend_synchronize(backend_dst); + ggml_backend_tensor_copy(src, dst); +} + +// events + +ggml_backend_event_t ggml_backend_event_new(ggml_backend_t backend) { + if (backend->iface.event_new == NULL) { + return NULL; + } + return backend->iface.event_new(backend); +} + +void ggml_backend_event_free(ggml_backend_event_t event) { + if (event == NULL) { + return; + } + event->backend->iface.event_free(event); +} + +void ggml_backend_event_record(ggml_backend_event_t event) { + GGML_ASSERT(event->backend->iface.event_record != NULL); + + event->backend->iface.event_record(event); +} + +void ggml_backend_event_synchronize(ggml_backend_event_t event) { + GGML_ASSERT(event->backend->iface.event_synchronize != NULL); + + event->backend->iface.event_synchronize(event); +} + +void ggml_backend_event_wait(ggml_backend_t backend, ggml_backend_event_t event) { + GGML_ASSERT(backend->iface.event_wait != NULL); + + backend->iface.event_wait(backend, event); +} + +// backend registry + +#define GGML_REG_MAX_BACKENDS 64 + +struct ggml_backend_reg { + char name[128]; + ggml_backend_init_fn init_fn; + ggml_backend_buffer_type_t default_buffer_type; + void * user_data; +}; + +static struct ggml_backend_reg ggml_backend_registry[GGML_REG_MAX_BACKENDS]; +static size_t ggml_backend_registry_count = 0; + +GGML_CALL static ggml_backend_t ggml_backend_reg_cpu_init(const char * params, void * user_data); + +GGML_CALL static void ggml_backend_registry_init(void) { + static bool initialized = false; + + if (initialized) { + return; + } + + initialized = true; + + ggml_backend_register("CPU", ggml_backend_reg_cpu_init, ggml_backend_cpu_buffer_type(), NULL); + + // add forward decls here to avoid including the backend headers +#ifdef GGML_USE_CUDA + extern GGML_CALL void ggml_backend_cuda_reg_devices(void); + ggml_backend_cuda_reg_devices(); +#endif + +#ifdef GGML_USE_SYCL + extern void ggml_backend_sycl_reg_devices(void); + ggml_backend_sycl_reg_devices(); +#endif + +#ifdef GGML_USE_METAL + extern GGML_CALL ggml_backend_t ggml_backend_reg_metal_init(const char * params, void * user_data); + extern GGML_CALL ggml_backend_buffer_type_t ggml_backend_metal_buffer_type(void); + ggml_backend_register("Metal", ggml_backend_reg_metal_init, ggml_backend_metal_buffer_type(), NULL); +#endif + +#ifdef GGML_USE_VULKAN + extern GGML_CALL int ggml_backend_vk_reg_devices(void); + ggml_backend_vk_reg_devices(); +#endif + +#ifdef GGML_USE_KOMPUTE + extern GGML_CALL void ggml_backend_kompute_reg_devices(void); + ggml_backend_kompute_reg_devices(); +#endif + +#ifdef GGML_USE_CANN + extern GGML_CALL int ggml_backend_cann_reg_devices(void); + ggml_backend_cann_reg_devices(); +#endif +} + +GGML_CALL void ggml_backend_register(const char * name, ggml_backend_init_fn init_fn, ggml_backend_buffer_type_t default_buffer_type, void * user_data) { + GGML_ASSERT(ggml_backend_registry_count < GGML_REG_MAX_BACKENDS); + + size_t id = ggml_backend_registry_count; + + ggml_backend_registry[id] = (struct ggml_backend_reg) { + /* .name = */ {0}, + /* .fn = */ init_fn, + /* .default_buffer_type = */ default_buffer_type, + /* .user_data = */ user_data, + }; + + snprintf(ggml_backend_registry[id].name, sizeof(ggml_backend_registry[id].name), "%s", name); + +#ifndef NDEBUG + fprintf(stderr, "%s: registered backend %s\n", __func__, name); +#endif + + ggml_backend_registry_count++; +} + +size_t ggml_backend_reg_get_count(void) { + ggml_backend_registry_init(); + + return ggml_backend_registry_count; +} + +size_t ggml_backend_reg_find_by_name(const char * name) { + ggml_backend_registry_init(); + + for (size_t i = 0; i < ggml_backend_registry_count; i++) { + // TODO: case insensitive in a portable way + if (strcmp(ggml_backend_registry[i].name, name) == 0) { + return i; + } + } + + // not found + return SIZE_MAX; +} + +// init from backend:params string +ggml_backend_t ggml_backend_reg_init_backend_from_str(const char * backend_str) { + ggml_backend_registry_init(); + + const char * params = strchr(backend_str, ':'); + char backend_name[128]; + if (params == NULL) { + snprintf(backend_name, sizeof(backend_name), "%s", backend_str); + params = ""; + } else { + snprintf(backend_name, sizeof(backend_name), "%.*s", (int)(params - backend_str), backend_str); + params++; + } + + size_t backend_i = ggml_backend_reg_find_by_name(backend_name); + + if (backend_i == SIZE_MAX) { + fprintf(stderr, "%s: backend %s not found\n", __func__, backend_name); + return NULL; + } + + return ggml_backend_reg_init_backend(backend_i, params); +} + +const char * ggml_backend_reg_get_name(size_t i) { + ggml_backend_registry_init(); + + GGML_ASSERT(i < ggml_backend_registry_count); + return ggml_backend_registry[i].name; +} + +ggml_backend_t ggml_backend_reg_init_backend(size_t i, const char * params) { + ggml_backend_registry_init(); + + GGML_ASSERT(i < ggml_backend_registry_count); + return ggml_backend_registry[i].init_fn(params, ggml_backend_registry[i].user_data); +} + +ggml_backend_buffer_type_t ggml_backend_reg_get_default_buffer_type(size_t i) { + ggml_backend_registry_init(); + + GGML_ASSERT(i < ggml_backend_registry_count); + return ggml_backend_registry[i].default_buffer_type; +} + +ggml_backend_buffer_t ggml_backend_reg_alloc_buffer(size_t i, size_t size) { + ggml_backend_registry_init(); + + GGML_ASSERT(i < ggml_backend_registry_count); + return ggml_backend_buft_alloc_buffer(ggml_backend_registry[i].default_buffer_type, size); +} + +// backend CPU + +static const size_t TENSOR_ALIGNMENT = 32; // required for mmap as gguf only guarantees 32-byte alignment + +GGML_CALL static const char * ggml_backend_cpu_buffer_name(ggml_backend_buffer_t buffer) { + return "CPU"; + + GGML_UNUSED(buffer); +} + +GGML_CALL static void * ggml_backend_cpu_buffer_get_base(ggml_backend_buffer_t buffer) { + uintptr_t data = (uintptr_t)buffer->context; + + // align the buffer + if (data % TENSOR_ALIGNMENT != 0) { + data = GGML_PAD(data, TENSOR_ALIGNMENT); + } + + return (void *)data; +} + +GGML_CALL static void ggml_backend_cpu_buffer_free_buffer(ggml_backend_buffer_t buffer) { + free(buffer->context); +} + +GGML_CALL static void ggml_backend_cpu_buffer_memset_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, uint8_t value, size_t offset, size_t size) { + memset((char *)tensor->data + offset, value, size); + + GGML_UNUSED(buffer); +} + +GGML_CALL static void ggml_backend_cpu_buffer_set_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { + memcpy((char *)tensor->data + offset, data, size); + + GGML_UNUSED(buffer); +} + +GGML_CALL static void ggml_backend_cpu_buffer_get_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { + memcpy(data, (const char *)tensor->data + offset, size); + + GGML_UNUSED(buffer); +} + +GGML_CALL static bool ggml_backend_cpu_buffer_cpy_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * src, struct ggml_tensor * dst) { + if (ggml_backend_buffer_is_host(src->buffer)) { + memcpy(dst->data, src->data, ggml_nbytes(src)); + return true; + } + return false; + + GGML_UNUSED(buffer); +} + +GGML_CALL static void ggml_backend_cpu_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { + memset(buffer->context, value, buffer->size); +} + +static struct ggml_backend_buffer_i cpu_backend_buffer_i = { + /* .get_name = */ ggml_backend_cpu_buffer_name, + /* .free_buffer = */ ggml_backend_cpu_buffer_free_buffer, + /* .get_base = */ ggml_backend_cpu_buffer_get_base, + /* .init_tensor = */ NULL, // no initialization required + /* .memset_tensor = */ ggml_backend_cpu_buffer_memset_tensor, + /* .set_tensor = */ ggml_backend_cpu_buffer_set_tensor, + /* .get_tensor = */ ggml_backend_cpu_buffer_get_tensor, + /* .cpy_tensor = */ ggml_backend_cpu_buffer_cpy_tensor, + /* .clear = */ ggml_backend_cpu_buffer_clear, + /* .reset = */ NULL, +}; + +// for buffers from ptr, free is not called +static struct ggml_backend_buffer_i cpu_backend_buffer_i_from_ptr = { + /* .get_name = */ ggml_backend_cpu_buffer_name, + /* .free_buffer = */ NULL, // ptr is not owned by the buffer, so it does not need to be freed + /* .get_base = */ ggml_backend_cpu_buffer_get_base, + /* .init_tensor = */ NULL, // no initialization required + /* .memset_tensor = */ ggml_backend_cpu_buffer_memset_tensor, + /* .set_tensor = */ ggml_backend_cpu_buffer_set_tensor, + /* .get_tensor = */ ggml_backend_cpu_buffer_get_tensor, + /* .cpy_tensor = */ ggml_backend_cpu_buffer_cpy_tensor, + /* .clear = */ ggml_backend_cpu_buffer_clear, + /* .reset = */ NULL, +}; + +GGML_CALL static const char * ggml_backend_cpu_buffer_type_get_name(ggml_backend_buffer_type_t buft) { + return "CPU"; + + GGML_UNUSED(buft); +} + +GGML_CALL static ggml_backend_buffer_t ggml_backend_cpu_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { + size += TENSOR_ALIGNMENT; // malloc may return an address that is not aligned + void * data = malloc(size); // TODO: use GGML_ALIGNED_MALLOC (move to ggml-impl.h) + if (data == NULL) { + fprintf(stderr, "%s: failed to allocate buffer of size %zu\n", __func__, size); + return NULL; + } + + return ggml_backend_buffer_init(buft, cpu_backend_buffer_i, data, size); +} + +GGML_CALL static size_t ggml_backend_cpu_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { + return TENSOR_ALIGNMENT; + + GGML_UNUSED(buft); +} + +GGML_CALL static bool ggml_backend_cpu_buffer_type_is_host(ggml_backend_buffer_type_t buft) { + return true; + + GGML_UNUSED(buft); +} + +GGML_CALL ggml_backend_buffer_type_t ggml_backend_cpu_buffer_type(void) { + static struct ggml_backend_buffer_type ggml_backend_cpu_buffer_type = { + /* .iface = */ { + /* .get_name = */ ggml_backend_cpu_buffer_type_get_name, + /* .alloc_buffer = */ ggml_backend_cpu_buffer_type_alloc_buffer, + /* .get_alignment = */ ggml_backend_cpu_buffer_type_get_alignment, + /* .get_max_size = */ NULL, // defaults to SIZE_MAX + /* .get_alloc_size = */ NULL, // defaults to ggml_nbytes + /* .is_host = */ ggml_backend_cpu_buffer_type_is_host, + }, + /* .context = */ NULL, + }; + + return &ggml_backend_cpu_buffer_type; +} + +#ifdef GGML_USE_CPU_HBM + +// buffer type HBM + +#include + +GGML_CALL static const char * ggml_backend_cpu_hbm_buffer_type_get_name(ggml_backend_buffer_type_t buft) { + return "CPU_HBM"; + + GGML_UNUSED(buft); +} + +GGML_CALL static const char * ggml_backend_cpu_hbm_buffer_get_name(ggml_backend_buffer_t buf) { + return "CPU_HBM"; + + GGML_UNUSED(buf); +} + +GGML_CALL static void ggml_backend_cpu_hbm_buffer_free_buffer(ggml_backend_buffer_t buffer) { + hbw_free(buffer->context); +} + +GGML_CALL static ggml_backend_buffer_t ggml_backend_cpu_hbm_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { + //void * ptr = hbw_malloc(size); + void * ptr; + int result = hbw_posix_memalign(&ptr, ggml_backend_cpu_buffer_type_get_alignment(buft), size); + if (result != 0) { + fprintf(stderr, "failed to allocate HBM buffer of size %zu\n", size); + return NULL; + } + + ggml_backend_buffer_t buffer = ggml_backend_cpu_buffer_from_ptr(ptr, size); + buffer->buft = buft; + buffer->iface.get_name = ggml_backend_cpu_hbm_buffer_get_name; + buffer->iface.free_buffer = ggml_backend_cpu_hbm_buffer_free_buffer; + + return buffer; +} + +ggml_backend_buffer_type_t ggml_backend_cpu_hbm_buffer_type(void) { + static struct ggml_backend_buffer_type ggml_backend_cpu_buffer_type_hbm = { + /* .iface = */ { + /* .get_name = */ ggml_backend_cpu_hbm_buffer_type_get_name, + /* .alloc_buffer = */ ggml_backend_cpu_hbm_buffer_type_alloc_buffer, + /* .get_alignment = */ ggml_backend_cpu_buffer_type_get_alignment, + /* .get_max_size = */ NULL, // defaults to SIZE_MAX + /* .get_alloc_size = */ NULL, // defaults to ggml_nbytes + /* .is_host = */ ggml_backend_cpu_buffer_type_is_host, + }, + /* .context = */ NULL, + }; + + return &ggml_backend_cpu_buffer_type_hbm; +} +#endif + +struct ggml_backend_cpu_context { + int n_threads; + ggml_threadpool_t threadpool; + + void * work_data; + size_t work_size; + + ggml_abort_callback abort_callback; + void * abort_callback_data; +}; + +GGML_CALL static const char * ggml_backend_cpu_name(ggml_backend_t backend) { + return "CPU"; + + GGML_UNUSED(backend); +} + +GGML_CALL static void ggml_backend_cpu_free(ggml_backend_t backend) { + struct ggml_backend_cpu_context * cpu_ctx = (struct ggml_backend_cpu_context *)backend->context; + free(cpu_ctx->work_data); + free(cpu_ctx); + free(backend); +} + +GGML_CALL static ggml_backend_buffer_type_t ggml_backend_cpu_get_default_buffer_type(ggml_backend_t backend) { + return ggml_backend_cpu_buffer_type(); + + GGML_UNUSED(backend); +} + +struct ggml_backend_plan_cpu { + struct ggml_cplan cplan; + struct ggml_cgraph cgraph; +}; + +GGML_CALL static ggml_backend_graph_plan_t ggml_backend_cpu_graph_plan_create(ggml_backend_t backend, const struct ggml_cgraph * cgraph) { + struct ggml_backend_cpu_context * cpu_ctx = (struct ggml_backend_cpu_context *)backend->context; + + struct ggml_backend_plan_cpu * cpu_plan = malloc(sizeof(struct ggml_backend_plan_cpu)); + + cpu_plan->cplan = ggml_graph_plan(cgraph, cpu_ctx->n_threads, cpu_ctx->threadpool); + cpu_plan->cgraph = *cgraph; // FIXME: deep copy + + if (cpu_plan->cplan.work_size > 0) { + cpu_plan->cplan.work_data = malloc(cpu_plan->cplan.work_size); + if (cpu_plan->cplan.work_data == NULL) { + free(cpu_plan); + return NULL; + } + } + + cpu_plan->cplan.abort_callback = cpu_ctx->abort_callback; + cpu_plan->cplan.abort_callback_data = cpu_ctx->abort_callback_data; + + return cpu_plan; +} + +GGML_CALL static void ggml_backend_cpu_graph_plan_free(ggml_backend_t backend, ggml_backend_graph_plan_t plan) { + struct ggml_backend_plan_cpu * cpu_plan = (struct ggml_backend_plan_cpu *)plan; + + free(cpu_plan->cplan.work_data); + free(cpu_plan); + + GGML_UNUSED(backend); +} + +GGML_CALL static enum ggml_status ggml_backend_cpu_graph_plan_compute(ggml_backend_t backend, ggml_backend_graph_plan_t plan) { + struct ggml_backend_plan_cpu * cpu_plan = (struct ggml_backend_plan_cpu *)plan; + + return ggml_graph_compute(&cpu_plan->cgraph, &cpu_plan->cplan); + + GGML_UNUSED(backend); +} + +GGML_CALL static enum ggml_status ggml_backend_cpu_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { + struct ggml_backend_cpu_context * cpu_ctx = (struct ggml_backend_cpu_context *)backend->context; + + struct ggml_cplan cplan = ggml_graph_plan(cgraph, cpu_ctx->n_threads, cpu_ctx->threadpool); + + if (cpu_ctx->work_size < cplan.work_size) { + free(cpu_ctx->work_data); + cpu_ctx->work_data = malloc(cplan.work_size); + if (cpu_ctx->work_data == NULL) { + cpu_ctx->work_size = 0; + return GGML_STATUS_ALLOC_FAILED; + } + cpu_ctx->work_size = cplan.work_size; + } + cplan.work_data = cpu_ctx->work_data; + + cplan.abort_callback = cpu_ctx->abort_callback; + cplan.abort_callback_data = cpu_ctx->abort_callback_data; + + return ggml_graph_compute(cgraph, &cplan); +} + +GGML_CALL static bool ggml_backend_cpu_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { + switch (op->op) { + case GGML_OP_CPY: + return + op->type != GGML_TYPE_IQ2_XXS && + op->type != GGML_TYPE_IQ2_XS && + op->type != GGML_TYPE_IQ1_S && + op->type != GGML_TYPE_IQ1_M; // missing type_traits.from_float + case GGML_OP_MUL_MAT: + return op->src[1]->type == GGML_TYPE_F32 || op->src[1]->type == ggml_internal_get_type_traits(op->src[0]->type).vec_dot_type; + case GGML_OP_ROPE_BACK: + return op->src[2] == NULL && (op->op_params[2] & 4) == 0; + case GGML_OP_IM2COL_BACK: + return op->src[0]->type == GGML_TYPE_F32 && op->src[1]->type == GGML_TYPE_F32; + default: + return true; + } + + GGML_UNUSED(backend); +} + +GGML_CALL static bool ggml_backend_cpu_supports_buft(ggml_backend_t backend, ggml_backend_buffer_type_t buft) { + return ggml_backend_buft_is_host(buft); + + GGML_UNUSED(backend); +} + +static struct ggml_backend_i cpu_backend_i = { + /* .get_name = */ ggml_backend_cpu_name, + /* .free = */ ggml_backend_cpu_free, + /* .get_default_buffer_type = */ ggml_backend_cpu_get_default_buffer_type, + /* .set_tensor_async = */ NULL, + /* .get_tensor_async = */ NULL, + /* .cpy_tensor_async = */ NULL, + /* .synchronize = */ NULL, + /* .graph_plan_create = */ ggml_backend_cpu_graph_plan_create, + /* .graph_plan_free = */ ggml_backend_cpu_graph_plan_free, + /* .graph_plan_update = */ NULL, + /* .graph_plan_compute = */ ggml_backend_cpu_graph_plan_compute, + /* .graph_compute = */ ggml_backend_cpu_graph_compute, + /* .supports_op = */ ggml_backend_cpu_supports_op, + /* .supports_buft = */ ggml_backend_cpu_supports_buft, + /* .offload_op = */ NULL, + /* .event_new = */ NULL, + /* .event_free = */ NULL, + /* .event_record = */ NULL, + /* .event_wait = */ NULL, + /* .event_synchronize = */ NULL, +}; + +static ggml_guid_t ggml_backend_cpu_guid(void) { + static ggml_guid guid = { 0xaa, 0x67, 0xc7, 0x43, 0x96, 0xe6, 0xa3, 0x8a, 0xe3, 0xaf, 0xea, 0x92, 0x36, 0xbc, 0xfc, 0x89 }; + return &guid; +} + +ggml_backend_t ggml_backend_cpu_init(void) { + struct ggml_backend_cpu_context * ctx = malloc(sizeof(struct ggml_backend_cpu_context)); + if (ctx == NULL) { + return NULL; + } + + ctx->n_threads = GGML_DEFAULT_N_THREADS; + ctx->threadpool = NULL; + ctx->work_data = NULL; + ctx->work_size = 0; + ctx->abort_callback = NULL; + ctx->abort_callback_data = NULL; + + ggml_backend_t cpu_backend = malloc(sizeof(struct ggml_backend)); + if (cpu_backend == NULL) { + free(ctx); + return NULL; + } + + *cpu_backend = (struct ggml_backend) { + /* .guid = */ ggml_backend_cpu_guid(), + /* .interface = */ cpu_backend_i, + /* .context = */ ctx + }; + return cpu_backend; +} + +GGML_CALL bool ggml_backend_is_cpu(ggml_backend_t backend) { + return backend != NULL && ggml_guid_matches(backend->guid, ggml_backend_cpu_guid()); +} + +void ggml_backend_cpu_set_n_threads(ggml_backend_t backend_cpu, int n_threads) { + GGML_ASSERT(ggml_backend_is_cpu(backend_cpu)); + + struct ggml_backend_cpu_context * ctx = (struct ggml_backend_cpu_context *)backend_cpu->context; + ctx->n_threads = n_threads; +} + +void ggml_backend_cpu_set_threadpool(ggml_backend_t backend_cpu, ggml_threadpool_t threadpool) { + GGML_ASSERT(ggml_backend_is_cpu(backend_cpu)); + + struct ggml_backend_cpu_context * ctx = (struct ggml_backend_cpu_context *)backend_cpu->context; + + if (ctx->threadpool && ctx->threadpool != threadpool) { + // already had a different threadpool, pause/suspend it before switching + ggml_threadpool_pause(ctx->threadpool); + } + ctx->threadpool = threadpool; +} + +void ggml_backend_cpu_set_abort_callback(ggml_backend_t backend_cpu, ggml_abort_callback abort_callback, void * abort_callback_data) { + GGML_ASSERT(ggml_backend_is_cpu(backend_cpu)); + + struct ggml_backend_cpu_context * ctx = (struct ggml_backend_cpu_context *)backend_cpu->context; + ctx->abort_callback = abort_callback; + ctx->abort_callback_data = abort_callback_data; +} + +GGML_CALL ggml_backend_buffer_t ggml_backend_cpu_buffer_from_ptr(void * ptr, size_t size) { + GGML_ASSERT((uintptr_t)ptr % TENSOR_ALIGNMENT == 0 && "buffer pointer must be aligned"); + return ggml_backend_buffer_init(ggml_backend_cpu_buffer_type(), cpu_backend_buffer_i_from_ptr, ptr, size); +} + +GGML_CALL static ggml_backend_t ggml_backend_reg_cpu_init(const char * params, void * user_data) { + return ggml_backend_cpu_init(); + + GGML_UNUSED(params); + GGML_UNUSED(user_data); +} + +// multi-buffer buffer + +struct ggml_backend_multi_buffer_context { + ggml_backend_buffer_t * buffers; + size_t n_buffers; +}; + +typedef struct ggml_backend_multi_buffer_context * ggml_backend_multi_buffer_context_t; + +GGML_CALL static const char * ggml_backend_multi_buffer_get_name(ggml_backend_buffer_t buffer) { + ggml_backend_multi_buffer_context_t ctx = (ggml_backend_multi_buffer_context_t) buffer->context; + + return ctx->buffers[0]->iface.get_name(ctx->buffers[0]); +} + +GGML_CALL static void ggml_backend_multi_buffer_free_buffer(ggml_backend_buffer_t buffer) { + ggml_backend_multi_buffer_context_t ctx = (ggml_backend_multi_buffer_context_t) buffer->context; + for (size_t i = 0; i < ctx->n_buffers; i++) { + ggml_backend_buffer_free(ctx->buffers[i]); + } + + free(ctx->buffers); + free(ctx); +} + +GGML_CALL static void ggml_backend_multi_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { + ggml_backend_multi_buffer_context_t ctx = (ggml_backend_multi_buffer_context_t) buffer->context; + for (size_t i = 0; i < ctx->n_buffers; i++) { + ggml_backend_buffer_clear(ctx->buffers[i], value); + } +} + +static struct ggml_backend_buffer_i ggml_backend_multi_buffer_context_interface(void) { + static struct ggml_backend_buffer_i multi_backend_buffer_i = { + /* .get_name = */ ggml_backend_multi_buffer_get_name, + /* .free_buffer = */ ggml_backend_multi_buffer_free_buffer, + /* .get_base = */ NULL, + /* .init_tensor = */ NULL, + /* .memset_tensor = */ NULL, + /* .set_tensor = */ NULL, + /* .get_tensor = */ NULL, + /* .cpy_tensor = */ NULL, + /* .clear = */ ggml_backend_multi_buffer_clear, + /* .reset = */ NULL, + }; + + return multi_backend_buffer_i; +} + +GGML_CALL ggml_backend_buffer_t ggml_backend_multi_buffer_alloc_buffer(ggml_backend_buffer_t * buffers, size_t n_buffers) { + ggml_backend_multi_buffer_context_t ctx = (ggml_backend_multi_buffer_context_t) malloc(sizeof(struct ggml_backend_multi_buffer_context)); + ctx->n_buffers = n_buffers; + ctx->buffers = (ggml_backend_buffer_t *) malloc(n_buffers * sizeof(ggml_backend_buffer_t)); + + GGML_ASSERT(ctx->buffers != NULL); + + size_t total_size = 0; + for (size_t i = 0; i < n_buffers; i++) { + ctx->buffers[i] = buffers[i]; + total_size += ggml_backend_buffer_get_size(buffers[i]); + } + + return ggml_backend_buffer_init(buffers[0]->buft, ggml_backend_multi_buffer_context_interface(), ctx, total_size); +} + +GGML_CALL bool ggml_backend_buffer_is_multi_buffer(ggml_backend_buffer_t buffer) { + return buffer->iface.get_name == ggml_backend_multi_buffer_get_name; +} + +GGML_CALL void ggml_backend_multi_buffer_set_usage(ggml_backend_buffer_t buffer, enum ggml_backend_buffer_usage usage) { + GGML_ASSERT(ggml_backend_buffer_is_multi_buffer(buffer)); + ggml_backend_multi_buffer_context_t ctx = (ggml_backend_multi_buffer_context_t) buffer->context; + for (size_t i = 0; i < ctx->n_buffers; i++) { + ggml_backend_buffer_set_usage(ctx->buffers[i], usage); + } +} + +// creates a copy of the tensor with the same memory layout +static struct ggml_tensor * ggml_dup_tensor_layout(struct ggml_context * ctx, const struct ggml_tensor * tensor) { + struct ggml_tensor * dup = ggml_dup_tensor(ctx, tensor); + for (int i = 0; i < GGML_MAX_DIMS; i++) { + dup->nb[i] = tensor->nb[i]; + } + return dup; +} + +static bool ggml_is_view_op(enum ggml_op op) { + return op == GGML_OP_VIEW || op == GGML_OP_RESHAPE || op == GGML_OP_PERMUTE || op == GGML_OP_TRANSPOSE; +} + +// scheduler + +#ifndef GGML_SCHED_MAX_BACKENDS +#define GGML_SCHED_MAX_BACKENDS 16 +#endif + +#ifndef GGML_SCHED_MAX_SPLIT_INPUTS +#define GGML_SCHED_MAX_SPLIT_INPUTS GGML_MAX_SRC +#endif + +#ifndef GGML_SCHED_MAX_COPIES +#define GGML_SCHED_MAX_COPIES 4 +#endif + +struct ggml_backend_sched_split { + int backend_id; + int i_start; + int i_end; + struct ggml_tensor * inputs[GGML_SCHED_MAX_SPLIT_INPUTS]; + int n_inputs; + // graph view of this split + struct ggml_cgraph graph; +}; + +struct ggml_backend_sched { + bool is_reset; // true if the scheduler has been reset since the last graph split + bool is_alloc; + + int n_backends; + + ggml_backend_t backends[GGML_SCHED_MAX_BACKENDS]; + ggml_backend_buffer_type_t bufts[GGML_SCHED_MAX_BACKENDS]; + ggml_gallocr_t galloc; + + // hash map of the nodes in the graph + struct ggml_hash_set hash_set; + int * hv_tensor_backend_ids; // [hash_set.size] + struct ggml_tensor ** hv_tensor_copies; // [hash_set.size][n_backends][n_copies] + + int * node_backend_ids; // [graph_size] + int * leaf_backend_ids; // [graph_size] + + int * prev_node_backend_ids; // [graph_size] + int * prev_leaf_backend_ids; // [graph_size] + + // copy of the graph with modified inputs + struct ggml_cgraph graph; + + // graph splits + struct ggml_backend_sched_split * splits; + int n_splits; + int splits_capacity; + + // pipeline parallelism support + int n_copies; + int cur_copy; + ggml_backend_event_t events[GGML_SCHED_MAX_BACKENDS][GGML_SCHED_MAX_COPIES]; + struct ggml_tensor * graph_inputs[GGML_SCHED_MAX_SPLIT_INPUTS]; + int n_graph_inputs; + + struct ggml_context * ctx; + + ggml_backend_sched_eval_callback callback_eval; + void * callback_eval_user_data; + + char * context_buffer; + size_t context_buffer_size; + + bool debug; +}; + +#define hash_id(tensor) ggml_hash_find_or_insert(&sched->hash_set, tensor) +#define tensor_backend_id(tensor) sched->hv_tensor_backend_ids[hash_id(tensor)] +#define tensor_id_copy(id, backend_id, copy_id) sched->hv_tensor_copies[(id) * sched->n_backends * sched->n_copies + (backend_id) * sched->n_copies + (copy_id)] +#define tensor_copy(tensor, backend_id, copy_id) tensor_id_copy(hash_id(tensor), backend_id, copy_id) + +// returns the priority of the backend, lower id is higher priority +static int ggml_backend_sched_backend_id(ggml_backend_sched_t sched, ggml_backend_t backend) { + for (int i = 0; i < sched->n_backends; i++) { + if (sched->backends[i] == backend) { + return i; + } + } + return -1; +} + +static int ggml_backend_sched_backend_from_buffer(ggml_backend_sched_t sched, const struct ggml_tensor * tensor, const struct ggml_tensor * op) { + ggml_backend_buffer_t buffer = tensor->buffer; + if (buffer == NULL) { + return -1; + } + + // find highest prio backend that supports the buffer type and the op + for (int i = 0; i < sched->n_backends; i++) { + if (ggml_backend_supports_buft(sched->backends[i], buffer->buft) && + ggml_backend_supports_op(sched->backends[i], op)) { + return i; + } + } + +#ifndef NDEBUG + fprintf(stderr, "%s: warning: no backend supports op %s with a weight with buffer type %s used in tensor %s, the weight will need to be copied\n", + __func__, ggml_op_desc(tensor), ggml_backend_buffer_name(buffer), tensor->name); +#endif + + return -1; +} + +#if 0 +#define GGML_SCHED_MAX_SPLITS_DEBUG 4096 +static char causes[GGML_DEFAULT_GRAPH_SIZE*16 + GGML_SCHED_MAX_SPLITS_DEBUG*GGML_SCHED_MAX_SPLIT_INPUTS][128]; // debug only +#define SET_CAUSE(node, ...) sprintf(causes[hash_id(node)], __VA_ARGS__) +#define GET_CAUSE(node) causes[hash_id(node)] +#else +#define SET_CAUSE(node, ...) +#define GET_CAUSE(node) "" +#endif + +// returns the backend that should be used for the node based on the current locations +static int ggml_backend_sched_backend_id_from_cur(ggml_backend_sched_t sched, struct ggml_tensor * tensor) { + // TODO: use supports_op to check if the backend supports the op + + // assign pre-allocated nodes to their backend + int cur_backend_id = ggml_backend_sched_backend_from_buffer(sched, tensor, tensor); + if (cur_backend_id != -1) { + SET_CAUSE(tensor, "1.dst"); + return cur_backend_id; + } + + // view_src + if (tensor->view_src != NULL) { + cur_backend_id = ggml_backend_sched_backend_from_buffer(sched, tensor->view_src, tensor); + if (cur_backend_id != -1) { + SET_CAUSE(tensor, "1.vsrc"); + return cur_backend_id; + } + } + + if (tensor->buffer || (tensor->view_src && tensor->view_src->buffer)) { + // since the tensor is pre-allocated, it cannot be moved to another backend + GGML_ABORT("pre-allocated tensor in a backend that cannot run the operation"); + } + + // graph input + if (tensor->flags & GGML_TENSOR_FLAG_INPUT) { + cur_backend_id = sched->n_backends - 1; // last backend (assumed CPU) + SET_CAUSE(tensor, "1.inp"); + return cur_backend_id; + } + + // operations with weights are preferably run on the same backend as the weights + for (int i = 0; i < GGML_MAX_SRC; i++) { + const struct ggml_tensor * src = tensor->src[i]; + if (src == NULL) { + continue; + } + if (src->buffer != NULL && src->buffer->usage == GGML_BACKEND_BUFFER_USAGE_WEIGHTS) { + int src_backend_id = ggml_backend_sched_backend_from_buffer(sched, src, tensor); + // check if a backend with higher prio wants to offload the op + if (src_backend_id == sched->n_backends - 1) { + for (int b = 0; b < src_backend_id; b++) { + if (ggml_backend_supports_op(sched->backends[b], tensor) && ggml_backend_offload_op(sched->backends[b], tensor)) { + SET_CAUSE(tensor, "1.off"); + return b; + } + } + } + SET_CAUSE(tensor, "1.wgt%d", i); + return src_backend_id; + } + } + + return -1; +} + +static char * fmt_size(size_t size) { + static char buffer[128]; + if (size >= 1024*1024) { + snprintf(buffer, sizeof(buffer), "%zuM", size/1024/1024); + } else { + snprintf(buffer, sizeof(buffer), "%zuK", size/1024); + } + return buffer; +} + +static void ggml_backend_sched_print_assignments(ggml_backend_sched_t sched, struct ggml_cgraph * graph) { + int cur_split = 0; + for (int i = 0; i < graph->n_nodes; i++) { + if (cur_split < sched->n_splits && i == sched->splits[cur_split].i_start) { + ggml_backend_t split_backend = sched->backends[sched->splits[cur_split].backend_id]; + fprintf(stderr, "\n## SPLIT #%d: %s # %d inputs: ", cur_split, ggml_backend_name(split_backend), + sched->splits[cur_split].n_inputs); + for (int j = 0; j < sched->splits[cur_split].n_inputs; j++) { + fprintf(stderr, "[%s (%5.5s)] ", sched->splits[cur_split].inputs[j]->name, + fmt_size(ggml_nbytes(sched->splits[cur_split].inputs[j]))); + } + fprintf(stderr, "\n"); + cur_split++; + } + struct ggml_tensor * node = graph->nodes[i]; + if (ggml_is_view_op(node->op)) { + continue; + } + ggml_backend_t tensor_backend = ggml_backend_sched_get_tensor_backend(sched, node); + fprintf(stderr, "node #%3d (%10.10s): %20.20s (%5.5s) [%5.5s %8.8s]:", i, ggml_op_name(node->op), node->name, + fmt_size(ggml_nbytes(node)), tensor_backend ? ggml_backend_name(tensor_backend) : "NULL", GET_CAUSE(node)); + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * src = node->src[j]; + if (src == NULL) { + continue; + } + ggml_backend_t src_backend = ggml_backend_sched_get_tensor_backend(sched, src); + fprintf(stderr, " %20.20s (%5.5s) [%5.5s %8.8s]", src->name, + fmt_size(ggml_nbytes(src)), src_backend ? ggml_backend_name(src_backend) : "NULL", GET_CAUSE(src)); + } + fprintf(stderr, "\n"); + } +} + +static bool ggml_backend_sched_buffer_supported(ggml_backend_sched_t sched, struct ggml_tensor * t, int backend_id) { + ggml_backend_buffer_t buf = t->view_src ? t->view_src->buffer : t->buffer; + ggml_backend_buffer_type_t buft = NULL; + + if (buf) { + // the tensor is already allocated + buft = buf->buft; + } else { + // see if the tensor already has a backend assigned, and use the buffer type of that backend + int tensor_backend_id = tensor_backend_id(t); + if (tensor_backend_id == -1 && t->view_src) { + tensor_backend_id = tensor_backend_id(t->view_src); + } + if (tensor_backend_id != -1) { + buft = sched->bufts[tensor_backend_id]; + } + } + + return buft != NULL && ggml_backend_supports_buft(sched->backends[backend_id], buft); +} + +static void ggml_backend_sched_set_if_supported(ggml_backend_sched_t sched, struct ggml_tensor * node, int cur_backend_id, int * node_backend_id) { + if (ggml_backend_supports_op(sched->backends[cur_backend_id], node)) { + *node_backend_id = cur_backend_id; + SET_CAUSE(node, "2.sup"); + } +} + +// assigns backends to ops and splits the graph into subgraphs that can be computed on the same backend +static void ggml_backend_sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * graph) { + // reset splits + sched->n_splits = 0; + sched->n_graph_inputs = 0; + sched->is_reset = false; + + struct ggml_init_params params = { + /* .mem_size = */ sched->context_buffer_size, + /* .mem_buffer = */ sched->context_buffer, + /* .no_alloc = */ true + }; + + ggml_free(sched->ctx); + + sched->ctx = ggml_init(params); + if (sched->ctx == NULL) { + GGML_ABORT("%s: failed to initialize context\n", __func__); + } + + // pass 1: assign backends to ops with pre-allocated inputs + for (int i = 0; i < graph->n_leafs; i++) { + struct ggml_tensor * leaf = graph->leafs[i]; + int * leaf_backend_id = &tensor_backend_id(leaf); + // do not overwrite user assignments + if (*leaf_backend_id == -1) { + *leaf_backend_id = ggml_backend_sched_backend_id_from_cur(sched, leaf); + } + } + + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + int * node_backend_id = &tensor_backend_id(node); + // do not overwrite user assignments + if (*node_backend_id == -1) { + *node_backend_id = ggml_backend_sched_backend_id_from_cur(sched, node); + +#if 0 + // src + if (node->op == GGML_OP_NONE) { + continue; + } + + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * src = node->src[j]; + if (src == NULL) { + continue; + } + int * src_backend_id = &tensor_backend_id(src); + if (*src_backend_id == -1) { + *src_backend_id = ggml_backend_sched_backend_id_from_cur(sched, src); + } + } +#endif + } + } + + // pass 2: expand current backend assignments + // assign the same backend to adjacent nodes + // expand gpu backends (i.e. non last prio) up and down, ignoring cpu (the lowest priority backend) + // thus, cpu will never be used unless weights are on cpu, or there are no gpu ops between cpu ops + // ops unsupported by the backend being expanded will be left unassigned so that they can be assigned later when the locations of its inputs are known + // expand gpu down + { + int cur_backend_id = -1; + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + if (ggml_is_view_op(node->op)) { + continue; + } + int * node_backend_id = &tensor_backend_id(node); + if (*node_backend_id != -1) { + if (*node_backend_id == sched->n_backends - 1) { + // skip cpu (lowest prio backend) + cur_backend_id = -1; + } else { + cur_backend_id = *node_backend_id; + } + } else if (cur_backend_id != -1) { + ggml_backend_sched_set_if_supported(sched, node, cur_backend_id, node_backend_id); + } + } + } + // expand gpu up + { + int cur_backend_id = -1; + for (int i = graph->n_nodes - 1; i >= 0; i--) { + struct ggml_tensor * node = graph->nodes[i]; + if (ggml_is_view_op(node->op)) { + continue; + } + int * node_backend_id = &tensor_backend_id(node); + if (*node_backend_id != -1) { + if (*node_backend_id == sched->n_backends - 1) { + // skip cpu (lowest prio backend) + cur_backend_id = -1; + } else { + cur_backend_id = *node_backend_id; + } + } else if (cur_backend_id != -1) { + ggml_backend_sched_set_if_supported(sched, node, cur_backend_id, node_backend_id); + } + } + } + // expand rest down + { + int cur_backend_id = -1; + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + if (ggml_is_view_op(node->op)) { + continue; + } + int * node_backend_id = &tensor_backend_id(node); + if (*node_backend_id != -1) { + cur_backend_id = *node_backend_id; + } else if (cur_backend_id != -1) { + ggml_backend_sched_set_if_supported(sched, node, cur_backend_id, node_backend_id); + } + } + } + // expand rest up + { + int cur_backend_id = -1; + for (int i = graph->n_nodes - 1; i >= 0; i--) { + struct ggml_tensor * node = graph->nodes[i]; + if (ggml_is_view_op(node->op)) { + continue; + } + int * node_backend_id = &tensor_backend_id(node); + if (*node_backend_id != -1) { + cur_backend_id = *node_backend_id; + } else if (cur_backend_id != -1) { + ggml_backend_sched_set_if_supported(sched, node, cur_backend_id, node_backend_id); + } + } + } + + // pass 3: upgrade nodes to higher prio backends with compatible buffer types + // if the tensor is already in the same buffer type (*) as another higher priority backend, we should move it there + // however, we also need to verify that the sources are in compatible buffer types + // (*) the actual requirement is more relaxed, the buffer type of the backend should be supported by all the users of this tensor further down the graph + // however, this is slow to verify, so we have a more strict requirement that the buffer type is the same + // this is not uncommon since multiple backends can use host memory, with the same buffer type (eg. BLAS and CPU) + // additionally, set remaining unassigned nodes to the backend with the most supported inputs + // only nodes that could not be assigned during expansion due to the backend not supporting the op should be unassigned at this point + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + if (ggml_is_view_op(node->op)) { + continue; + } + int * node_backend_id = &tensor_backend_id(node); + if (*node_backend_id == -1) { + // unassigned node: find the backend with the most supported inputs + int n_supported_best = -1; + for (int b = 0; b < sched->n_backends; b++) { + if (ggml_backend_supports_op(sched->backends[b], node)) { + int n_supported = 0; + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * src = node->src[j]; + if (src == NULL) { + continue; + } + if ((tensor_backend_id(src) != -1 || tensor_backend_id(src->view_src) != -1) && ggml_backend_sched_buffer_supported(sched, src, b)) { + n_supported++; + } + } + if (n_supported > n_supported_best) { + n_supported_best = n_supported; + *node_backend_id = b; + SET_CAUSE(node, "3.best"); + } + } + } + } else { + // assigned node: upgrade to higher prio backend if possible + for (int b = 0; b < *node_backend_id; b++) { + if (sched->bufts[b] == sched->bufts[*node_backend_id] && ggml_backend_supports_op(sched->backends[b], node)) { + bool supported = true; + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * src = node->src[j]; + if (src == NULL) { + continue; + } + if (!ggml_backend_sched_buffer_supported(sched, src, b)) { + supported = false; + break; + } + } + if (supported) { + *node_backend_id = b; + SET_CAUSE(node, "3.upg"); + break; + } + } + } + } + } + + // pass 4: assign backends to remaining src from dst and view_src + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + int * cur_backend_id = &tensor_backend_id(node); + if (node->view_src != NULL && *cur_backend_id == -1) { + *cur_backend_id = tensor_backend_id(node->view_src); + SET_CAUSE(node, "4.vsrc"); + } + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * src = node->src[j]; + if (src == NULL) { + continue; + } + int * src_backend_id = &tensor_backend_id(src); + if (*src_backend_id == -1) { + if (src->view_src != NULL) { + // views are always on the same backend as the source + *src_backend_id = tensor_backend_id(src->view_src); + SET_CAUSE(src, "4.vsrc"); + } else { + *src_backend_id = *cur_backend_id; + SET_CAUSE(src, "4.cur"); + } + } + } + } + + // pass 5: split graph, find tensors that need to be copied + { + int i_split = 0; + struct ggml_backend_sched_split * split = &sched->splits[0]; + // find the backend of the first split, skipping view ops + int i = 0; + for (; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + if (!ggml_is_view_op(node->op)) { + split->backend_id = tensor_backend_id(node); + break; + } + } + split->i_start = 0; + split->n_inputs = 0; + int cur_backend_id = split->backend_id; + for (; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + + if (ggml_is_view_op(node->op)) { + continue; + } + + const int node_backend_id = tensor_backend_id(node); + + assert(node_backend_id != -1); // all nodes should be assigned by now + + // check if we should start a new split based on the sources of the current node + bool need_new_split = false; + if (node_backend_id == cur_backend_id && split->n_inputs > 0) { + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * src = node->src[j]; + if (src == NULL) { + continue; + } + // check if a weight is on a different backend + // by starting a new split, the memory of the previously offloaded weights can be reused + if (src->buffer != NULL && src->buffer->usage == GGML_BACKEND_BUFFER_USAGE_WEIGHTS) { + int src_backend_id = tensor_backend_id(src); + if (src_backend_id != cur_backend_id) { + need_new_split = true; + break; + } + } + // check if the split has too many inputs + // FIXME: count the number of inputs instead of only checking when full + if (split->n_inputs == GGML_SCHED_MAX_SPLIT_INPUTS) { + const size_t id = hash_id(src); + int src_backend_id = sched->hv_tensor_backend_ids[id]; + bool supported = ggml_backend_sched_buffer_supported(sched, src, cur_backend_id); + if (src_backend_id != cur_backend_id && tensor_id_copy(id, cur_backend_id, 0) == NULL && !supported) { + //printf("starting new split because of too many inputs: node %s, input %s\n", node->name, src->name); + need_new_split = true; + break; + } + } + } + } + + if (node_backend_id != cur_backend_id || need_new_split) { + split->i_end = i; + i_split++; + if (i_split >= sched->splits_capacity) { + sched->splits_capacity *= 2; + sched->splits = realloc(sched->splits, sched->splits_capacity * sizeof(struct ggml_backend_sched_split)); + GGML_ASSERT(sched->splits != NULL); + } + split = &sched->splits[i_split]; + split->backend_id = node_backend_id; + split->i_start = i; + split->n_inputs = 0; + cur_backend_id = node_backend_id; + } + + // find inputs that are not on the same backend + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * src = node->src[j]; + if (src == NULL) { + continue; + } + + size_t src_id = hash_id(src); + const int src_backend_id = sched->hv_tensor_backend_ids[src_id]; + assert(src_backend_id != -1); // all inputs should be assigned by now + + if (src->flags & GGML_TENSOR_FLAG_INPUT && sched->n_copies > 1) { + if (tensor_id_copy(src_id, src_backend_id, 0) == NULL) { + ggml_backend_t backend = sched->backends[src_backend_id]; + for (int c = 0; c < sched->n_copies; c++) { + struct ggml_tensor * tensor_copy; + if (c == sched->cur_copy) { + tensor_copy = src; // use the original tensor as the current copy + } else { + tensor_copy = ggml_dup_tensor_layout(sched->ctx, src); + ggml_format_name(tensor_copy, "%s#%s#%d", ggml_backend_name(backend), src->name, c); + } + if (sched->n_copies > 1) { + ggml_set_input(tensor_copy); + ggml_set_output(tensor_copy); // prevent ggml-alloc from overwriting the tensor + } + tensor_id_copy(src_id, src_backend_id, c) = tensor_copy; + SET_CAUSE(tensor_copy, "4.cpy"); + } + int n_graph_inputs = sched->n_graph_inputs++; + GGML_ASSERT(n_graph_inputs < GGML_SCHED_MAX_SPLIT_INPUTS); + sched->graph_inputs[n_graph_inputs] = src; + } + } + + if (src_backend_id != cur_backend_id && !ggml_backend_sched_buffer_supported(sched, src, cur_backend_id)) { + // create a copy of the input in the split's backend + if (tensor_id_copy(src_id, cur_backend_id, 0) == NULL) { + ggml_backend_t backend = sched->backends[cur_backend_id]; + for (int c = 0; c < sched->n_copies; c++) { + struct ggml_tensor * tensor_copy = ggml_dup_tensor_layout(sched->ctx, src); + ggml_format_name(tensor_copy, "%s#%s#%d", ggml_backend_name(backend), src->name, c); + if (sched->n_copies > 1) { + ggml_set_input(tensor_copy); + ggml_set_output(tensor_copy); // prevent ggml-alloc from overwriting the tensor + } + tensor_id_copy(src_id, cur_backend_id, c) = tensor_copy; + SET_CAUSE(tensor_copy, "4.cpy"); + } + int n_inputs = split->n_inputs++; + GGML_ASSERT(n_inputs < GGML_SCHED_MAX_SPLIT_INPUTS); + split->inputs[n_inputs] = src; + } + node->src[j] = tensor_id_copy(src_id, cur_backend_id, sched->cur_copy); + } + } + } + split->i_end = graph->n_nodes; + sched->n_splits = i_split + 1; + } + + if (sched->debug) { + ggml_backend_sched_print_assignments(sched, graph); + } + + // swap node_backend_ids and leaf _backend_ids with prevs + { + int * tmp = sched->node_backend_ids; + sched->node_backend_ids = sched->prev_node_backend_ids; + sched->prev_node_backend_ids = tmp; + + tmp = sched->leaf_backend_ids; + sched->leaf_backend_ids = sched->prev_leaf_backend_ids; + sched->prev_leaf_backend_ids = tmp; + } + + int graph_size = MAX(graph->n_nodes, graph->n_leafs) + sched->n_splits*GGML_SCHED_MAX_SPLIT_INPUTS*2*sched->n_copies; + if (sched->graph.size < graph_size) { + sched->graph.size = graph_size; + sched->graph.nodes = realloc(sched->graph.nodes, graph_size * sizeof(struct ggml_tensor *)); + sched->graph.leafs = realloc(sched->graph.leafs, graph_size * sizeof(struct ggml_tensor *)); + GGML_ASSERT(sched->graph.nodes != NULL); + GGML_ASSERT(sched->graph.leafs != NULL); + } + sched->graph.n_nodes = 0; + sched->graph.n_leafs = 0; + + struct ggml_cgraph * graph_copy = &sched->graph; + + for (int i = 0; i < sched->n_splits; i++) { + struct ggml_backend_sched_split * split = &sched->splits[i]; + split->graph = ggml_graph_view(graph, split->i_start, split->i_end); + + // add inputs to the graph copy so that they are allocated by ggml-alloc at the start of the split + for (int j = 0; j < split->n_inputs; j++) { + assert(graph_copy->size > (graph_copy->n_nodes + 1)); + + struct ggml_tensor * input = split->inputs[j]; + const size_t input_id = hash_id(input); + struct ggml_tensor * input_cpy = tensor_id_copy(input_id, split->backend_id, sched->cur_copy); + + // add a dependency to the input source so that it is not freed before the copy is done + struct ggml_tensor * input_dep = ggml_view_tensor(sched->ctx, input); + input_dep->src[0] = input; + sched->node_backend_ids[graph_copy->n_nodes] = sched->hv_tensor_backend_ids[input_id]; + graph_copy->nodes[graph_copy->n_nodes++] = input_dep; + + // add a dependency to the input copy so that it is allocated at the start of the split + sched->node_backend_ids[graph_copy->n_nodes] = split->backend_id; + graph_copy->nodes[graph_copy->n_nodes++] = input_cpy; + } + + for (int j = split->i_start; j < split->i_end; j++) { + assert(graph_copy->size > graph_copy->n_nodes); + sched->node_backend_ids[graph_copy->n_nodes] = tensor_backend_id(graph->nodes[j]); + graph_copy->nodes[graph_copy->n_nodes++] = graph->nodes[j]; + } + } + + if (sched->n_copies > 1) { + // add input copies as leafs so that they are allocated first + for (int i = 0; i < sched->n_graph_inputs; i++) { + struct ggml_tensor * input = sched->graph_inputs[i]; + size_t id = hash_id(input); + int backend_id = tensor_backend_id(input); + for (int c = 0; c < sched->n_copies; c++) { + struct ggml_tensor * input_cpy = tensor_id_copy(id, backend_id, c); + sched->leaf_backend_ids[graph_copy->n_leafs] = backend_id; + assert(graph_copy->size > graph_copy->n_leafs); + graph_copy->leafs[graph_copy->n_leafs++] = input_cpy; + } + } + + for (int i = 0; i < sched->n_splits; i++) { + struct ggml_backend_sched_split * split = &sched->splits[i]; + int backend_id = split->backend_id; + for (int j = 0; j < split->n_inputs; j++) { + struct ggml_tensor * input = split->inputs[j]; + size_t id = hash_id(input); + for (int c = 0; c < sched->n_copies; c++) { + struct ggml_tensor * input_cpy = tensor_id_copy(id, backend_id, c); + sched->leaf_backend_ids[graph_copy->n_leafs] = backend_id; + assert(graph_copy->size > graph_copy->n_leafs); + graph_copy->leafs[graph_copy->n_leafs++] = input_cpy; + } + } + } + } + + // add leafs from the original graph + for (int i = 0; i < graph->n_leafs; i++) { + struct ggml_tensor * leaf = graph->leafs[i]; + sched->leaf_backend_ids[graph_copy->n_leafs] = tensor_backend_id(leaf); + assert(graph_copy->size > graph_copy->n_leafs); + graph_copy->leafs[graph_copy->n_leafs++] = leaf; + } +} + +static bool ggml_backend_sched_alloc_splits(ggml_backend_sched_t sched) { + bool backend_ids_changed = false; + for (int i = 0; i < sched->graph.n_nodes; i++) { + if (sched->node_backend_ids[i] != sched->prev_node_backend_ids[i] && + sched->bufts[sched->node_backend_ids[i]] != sched->bufts[sched->prev_node_backend_ids[i]]) { + backend_ids_changed = true; + break; + } + } + if (!backend_ids_changed) { + for (int i = 0; i < sched->graph.n_leafs; i++) { + if (sched->leaf_backend_ids[i] != sched->prev_leaf_backend_ids[i] && + sched->bufts[sched->leaf_backend_ids[i]] != sched->bufts[sched->prev_leaf_backend_ids[i]]) { + backend_ids_changed = true; + break; + } + } + } + + // allocate graph + if (backend_ids_changed || !ggml_gallocr_alloc_graph(sched->galloc, &sched->graph)) { + // the re-allocation may cause the split inputs to be moved to a different address + ggml_backend_sched_synchronize(sched); +#ifndef NDEBUG + fprintf(stderr, "%s: failed to allocate graph, reserving (backend_ids_changed = %d)\n", __func__, backend_ids_changed); +#endif + ggml_gallocr_reserve_n(sched->galloc, &sched->graph, sched->node_backend_ids, sched->leaf_backend_ids); + if (!ggml_gallocr_alloc_graph(sched->galloc, &sched->graph)) { + fprintf(stderr, "%s: failed to allocate graph\n", __func__); + return false; + } + } + + return true; +} + +static enum ggml_status ggml_backend_sched_compute_splits(ggml_backend_sched_t sched) { + struct ggml_backend_sched_split * splits = sched->splits; + + for (int i = 0; i < sched->n_splits; i++) { + struct ggml_backend_sched_split * split = &splits[i]; + int split_backend_id = split->backend_id; + ggml_backend_t split_backend = sched->backends[split_backend_id]; + + // copy the input tensors to the split backend + for (int j = 0; j < split->n_inputs; j++) { + ggml_backend_t input_backend = ggml_backend_sched_get_tensor_backend(sched, split->inputs[j]); + struct ggml_tensor * input = split->inputs[j]; + struct ggml_tensor * input_cpy = tensor_copy(input, split_backend_id, sched->cur_copy); + + if (input->flags & GGML_TENSOR_FLAG_INPUT) { + // inputs from the user must be copied immediately to prevent the user overwriting the data before the copy is done + if (sched->events[split_backend_id][sched->cur_copy] != NULL) { + ggml_backend_event_synchronize(sched->events[split_backend_id][sched->cur_copy]); + } else { + ggml_backend_synchronize(split_backend); + } + ggml_backend_tensor_copy(input, input_cpy); + } else { + // wait for the split backend to finish using the input before overwriting it + if (sched->events[split_backend_id][sched->cur_copy] != NULL) { + ggml_backend_event_wait(split_backend, sched->events[split_backend_id][sched->cur_copy]); + } else { + ggml_backend_synchronize(split_backend); + } + // try async copy, but if not possible, we can still use a sync copy without synchronizing the dst backend, since we handle the synchronization here with multiple copies and events + // TODO: add public function to facilitate this, since applications do not have direct access to the backend interface + if (!split_backend->iface.cpy_tensor_async || !split_backend->iface.cpy_tensor_async(input_backend, split_backend, input, input_cpy)) { + ggml_backend_synchronize(input_backend); + if (sched->events[split_backend_id][sched->cur_copy] != NULL) { + ggml_backend_event_synchronize(sched->events[split_backend_id][sched->cur_copy]); + } else { + ggml_backend_synchronize(split_backend); + } + ggml_backend_tensor_copy(input, input_cpy); + } + } + } + + if (!sched->callback_eval) { + enum ggml_status ec = ggml_backend_graph_compute_async(split_backend, &split->graph); + if (ec != GGML_STATUS_SUCCESS) { + return ec; + } + } else { + // similar to ggml_backend_compare_graph_backend + for (int j0 = 0; j0 < split->graph.n_nodes; j0++) { + struct ggml_tensor * t = split->graph.nodes[j0]; + + // check if the user needs data from this node + bool need = sched->callback_eval(t, true, sched->callback_eval_user_data); + + int j1 = j0; + + // determine the range [j0, j1] of nodes that can be computed together + while (!need && j1 < split->graph.n_nodes - 1) { + t = split->graph.nodes[++j1]; + need = sched->callback_eval(t, true, sched->callback_eval_user_data); + } + + struct ggml_cgraph gv = ggml_graph_view(&split->graph, j0, j1 + 1); + + enum ggml_status ec = ggml_backend_graph_compute_async(split_backend, &gv); + if (ec != GGML_STATUS_SUCCESS) { + return ec; + } + + // TODO: pass backend to the callback, then the user can decide if they want to synchronize + ggml_backend_synchronize(split_backend); + + if (need && !sched->callback_eval(t, false, sched->callback_eval_user_data)) { + break; + } + + j0 = j1; + } + } + + // record the event of this copy + if (split->n_inputs > 0) { + if (sched->events[split_backend_id][sched->cur_copy] != NULL) { + ggml_backend_event_record(sched->events[split_backend_id][sched->cur_copy]); + } + } + } + + sched->cur_copy = (sched->cur_copy + 1) % sched->n_copies; + + return GGML_STATUS_SUCCESS; +} + +ggml_backend_sched_t ggml_backend_sched_new( + ggml_backend_t * backends, + ggml_backend_buffer_type_t * bufts, + int n_backends, + size_t graph_size, + bool parallel) { + GGML_ASSERT(n_backends > 0); + GGML_ASSERT(n_backends <= GGML_SCHED_MAX_BACKENDS); + GGML_ASSERT(ggml_backend_is_cpu(backends[n_backends - 1])); // last backend must be CPU + + struct ggml_backend_sched * sched = calloc(1, sizeof(struct ggml_backend_sched)); + + sched->debug = getenv("GGML_SCHED_DEBUG") != NULL; + sched->n_backends = n_backends; + sched->n_copies = parallel ? GGML_SCHED_MAX_COPIES : 1; + + // initialize hash table + // FIXME: needs to be size*2 to account for leafs (do it in graph_split instead) + sched->hash_set = ggml_hash_set_new(graph_size); + sched->hv_tensor_backend_ids = malloc(sched->hash_set.size * sizeof(sched->hv_tensor_backend_ids[0])); + sched->hv_tensor_copies = malloc(sched->hash_set.size * sched->n_backends * sched->n_copies * sizeof(struct ggml_tensor *)); + + const size_t ggml_sched_max_splits = graph_size; // at most there is one split for each node in the graph + const size_t nodes_size = graph_size + ggml_sched_max_splits*GGML_SCHED_MAX_SPLIT_INPUTS*2; + sched->node_backend_ids = calloc(nodes_size, sizeof(sched->node_backend_ids[0])); + sched->leaf_backend_ids = calloc(nodes_size, sizeof(sched->leaf_backend_ids[0])); + sched->prev_node_backend_ids = calloc(nodes_size, sizeof(sched->prev_node_backend_ids[0])); + sched->prev_leaf_backend_ids = calloc(nodes_size, sizeof(sched->prev_leaf_backend_ids[0])); + + sched->context_buffer_size = ggml_sched_max_splits*GGML_SCHED_MAX_SPLIT_INPUTS*2*sizeof(struct ggml_tensor) + ggml_graph_overhead_custom(graph_size, false); + sched->context_buffer = malloc(sched->context_buffer_size); + + const int initial_splits_capacity = 16; + sched->splits = calloc(initial_splits_capacity, sizeof(sched->splits[0])); + sched->splits_capacity = initial_splits_capacity; + + for (int b = 0; b < n_backends; b++) { + sched->backends[b] = backends[b]; + sched->bufts[b] = bufts ? bufts[b] : ggml_backend_get_default_buffer_type(backends[b]); + GGML_ASSERT(ggml_backend_supports_buft(backends[b], sched->bufts[b])); + if (sched->n_copies > 1) { + for (int c = 0; c < sched->n_copies; c++) { + sched->events[b][c] = ggml_backend_event_new(backends[b]); + } + } + } + + sched->galloc = ggml_gallocr_new_n(sched->bufts, n_backends); + + ggml_backend_sched_reset(sched); + + return sched; +} + +void ggml_backend_sched_free(ggml_backend_sched_t sched) { + if (sched == NULL) { + return; + } + for (int b = 0; b < sched->n_backends; b++) { + for (int c = 0; c < sched->n_copies; c++) { + ggml_backend_event_free(sched->events[b][c]); + } + } + ggml_gallocr_free(sched->galloc); + ggml_free(sched->ctx); + ggml_hash_set_free(&sched->hash_set); + free(sched->splits); + free(sched->hv_tensor_backend_ids); + free(sched->hv_tensor_copies); + free(sched->node_backend_ids); + free(sched->leaf_backend_ids); + free(sched->prev_node_backend_ids); + free(sched->prev_leaf_backend_ids); + free(sched->context_buffer); + free(sched->graph.nodes); + free(sched->graph.leafs); + free(sched); +} + +void ggml_backend_sched_reset(ggml_backend_sched_t sched) { + // reset state for the next run + if (!sched->is_reset) { + ggml_hash_set_reset(&sched->hash_set); + memset(sched->hv_tensor_backend_ids, -1, sched->hash_set.size * sizeof(sched->hv_tensor_backend_ids[0])); + memset(sched->hv_tensor_copies, 0, sched->hash_set.size * sched->n_backends * sched->n_copies * sizeof(struct ggml_tensor *)); + sched->is_reset = true; + } + sched->is_alloc = false; +} + +bool ggml_backend_sched_reserve(ggml_backend_sched_t sched, struct ggml_cgraph * measure_graph) { + GGML_ASSERT((int)sched->hash_set.size >= measure_graph->n_nodes + measure_graph->n_leafs); + + ggml_backend_sched_split_graph(sched, measure_graph); + + if (!ggml_gallocr_reserve_n(sched->galloc, &sched->graph, sched->node_backend_ids, sched->leaf_backend_ids)) { + return false; + } + + ggml_backend_sched_reset(sched); + ggml_backend_sched_synchronize(sched); + + return true; +} + +bool ggml_backend_sched_alloc_graph(ggml_backend_sched_t sched, struct ggml_cgraph * graph) { + GGML_ASSERT((int)sched->hash_set.size >= graph->n_nodes + graph->n_leafs); + + ggml_backend_sched_split_graph(sched, graph); + + + if (!ggml_backend_sched_alloc_splits(sched)) { + return false; + } + + sched->is_alloc = true; + + return true; +} + +enum ggml_status ggml_backend_sched_graph_compute(ggml_backend_sched_t sched, struct ggml_cgraph * graph) { + enum ggml_status err = ggml_backend_sched_graph_compute_async(sched, graph); + ggml_backend_sched_synchronize(sched); + return err; +} + +enum ggml_status ggml_backend_sched_graph_compute_async(ggml_backend_sched_t sched, struct ggml_cgraph * graph) { + if (!sched->is_reset && !sched->is_alloc) { + ggml_backend_sched_reset(sched); + } + + if (!sched->is_alloc) { + if (!ggml_backend_sched_alloc_graph(sched, graph)) { + return GGML_STATUS_ALLOC_FAILED; + } + } + + return ggml_backend_sched_compute_splits(sched); +} + +void ggml_backend_sched_synchronize(ggml_backend_sched_t sched) { + for (int i = 0; i < sched->n_backends; i++) { + ggml_backend_synchronize(sched->backends[i]); + } +} + +void ggml_backend_sched_set_eval_callback(ggml_backend_sched_t sched, ggml_backend_sched_eval_callback callback, void * user_data) { + sched->callback_eval = callback; + sched->callback_eval_user_data = user_data; +} + +int ggml_backend_sched_get_n_splits(ggml_backend_sched_t sched) { + return sched->n_splits; +} + +int ggml_backend_sched_get_n_copies(ggml_backend_sched_t sched) { + return sched->n_copies; +} + +int ggml_backend_sched_get_n_backends(ggml_backend_sched_t sched) { + return sched->n_backends; +} + +ggml_backend_t ggml_backend_sched_get_backend(ggml_backend_sched_t sched, int i) { + GGML_ASSERT(i >= 0 && i < sched->n_backends); + return sched->backends[i]; +} + +size_t ggml_backend_sched_get_buffer_size(ggml_backend_sched_t sched, ggml_backend_t backend) { + int backend_index = ggml_backend_sched_backend_id(sched, backend); + GGML_ASSERT(backend_index >= 0 && backend_index < sched->n_backends); + + return ggml_gallocr_get_buffer_size(sched->galloc, backend_index); +} + +void ggml_backend_sched_set_tensor_backend(ggml_backend_sched_t sched, struct ggml_tensor * node, ggml_backend_t backend) { + int backend_index = ggml_backend_sched_backend_id(sched, backend); + GGML_ASSERT(backend_index >= 0 && backend_index < sched->n_backends); + tensor_backend_id(node) = backend_index; + SET_CAUSE(node, "usr"); + sched->is_reset = false; +} + +ggml_backend_t ggml_backend_sched_get_tensor_backend(ggml_backend_sched_t sched, struct ggml_tensor * node) { + int backend_index = tensor_backend_id(node); + if (backend_index == -1) { + return NULL; + } + return sched->backends[backend_index]; +} + +// utils + +void ggml_backend_view_init(struct ggml_tensor * tensor) { + GGML_ASSERT(tensor->buffer == NULL); + GGML_ASSERT(tensor->view_src != NULL); + GGML_ASSERT(tensor->view_src->buffer != NULL); + GGML_ASSERT(tensor->view_src->data != NULL); + + tensor->buffer = tensor->view_src->buffer; + tensor->data = (char *)tensor->view_src->data + tensor->view_offs; + ggml_backend_buffer_init_tensor(tensor->buffer, tensor); +} + +void ggml_backend_tensor_alloc(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, void * addr) { + GGML_ASSERT(tensor->buffer == NULL); + GGML_ASSERT(tensor->data == NULL); + GGML_ASSERT(tensor->view_src == NULL); + GGML_ASSERT(addr >= ggml_backend_buffer_get_base(buffer)); + GGML_ASSERT((char *)addr + ggml_backend_buffer_get_alloc_size(buffer, tensor) <= + (char *)ggml_backend_buffer_get_base(buffer) + ggml_backend_buffer_get_size(buffer)); + + tensor->buffer = buffer; + tensor->data = addr; + ggml_backend_buffer_init_tensor(buffer, tensor); +} + +static struct ggml_tensor * graph_copy_dup_tensor(struct ggml_hash_set hash_set, struct ggml_tensor ** node_copies, + struct ggml_context * ctx_allocated, struct ggml_context * ctx_unallocated, struct ggml_tensor * src) { + + GGML_ASSERT(src != NULL); + GGML_ASSERT(src->data && "graph must be allocated"); + + size_t id = ggml_hash_insert(&hash_set, src); + if (id == GGML_HASHSET_ALREADY_EXISTS) { + return node_copies[ggml_hash_find(&hash_set, src)]; + } + + struct ggml_tensor * dst = ggml_dup_tensor_layout(src->data && !src->view_src ? ctx_allocated : ctx_unallocated, src); + if (src->view_src != NULL) { + dst->view_src = graph_copy_dup_tensor(hash_set, node_copies, ctx_allocated, ctx_unallocated, src->view_src); + dst->view_offs = src->view_offs; + } + dst->op = src->op; + memcpy(dst->op_params, src->op_params, sizeof(dst->op_params)); + ggml_set_name(dst, src->name); + + // copy src + for (int i = 0; i < GGML_MAX_SRC; i++) { + struct ggml_tensor * s = src->src[i]; + if (s == NULL) { + continue; + } + dst->src[i] = graph_copy_dup_tensor(hash_set, node_copies, ctx_allocated, ctx_unallocated, s); + } + + node_copies[id] = dst; + return dst; +} + +static void graph_copy_init_tensor(struct ggml_hash_set * hash_set, struct ggml_tensor ** node_copies, bool * node_init, struct ggml_tensor * src) { + size_t id = ggml_hash_find(hash_set, src); + if (node_init[id]) { + return; + } + node_init[id] = true; + + struct ggml_tensor * dst = node_copies[id]; + if (dst->view_src != NULL) { + graph_copy_init_tensor(hash_set, node_copies, node_init, src->view_src); + ggml_backend_view_init(dst); + } + else { + ggml_backend_tensor_copy(src, dst); + } + + // init src + for (int i = 0; i < GGML_MAX_SRC; i++) { + struct ggml_tensor * s = src->src[i]; + if (s == NULL) { + continue; + } + graph_copy_init_tensor(hash_set, node_copies, node_init, s); + } +} + +struct ggml_backend_graph_copy ggml_backend_graph_copy(ggml_backend_t backend, struct ggml_cgraph * graph) { + struct ggml_hash_set hash_set = ggml_hash_set_new(graph->visited_hash_set.size); + struct ggml_tensor ** node_copies = calloc(hash_set.size, sizeof(node_copies[0])); // NOLINT + bool * node_init = calloc(hash_set.size, sizeof(node_init[0])); + + struct ggml_init_params params = { + /* .mem_size = */ ggml_tensor_overhead()*hash_set.size + ggml_graph_overhead_custom(graph->size, false), + /* .mem_buffer = */ NULL, + /* .no_alloc = */ true + }; + + struct ggml_context * ctx_allocated = ggml_init(params); + struct ggml_context * ctx_unallocated = ggml_init(params); + + if (ctx_allocated == NULL || ctx_unallocated == NULL) { + fprintf(stderr, "failed to allocate context for graph copy\n"); + ggml_hash_set_free(&hash_set); + free(node_copies); + free(node_init); + ggml_free(ctx_allocated); + ggml_free(ctx_unallocated); + return (struct ggml_backend_graph_copy) { + /* .buffer = */ NULL, + /* .ctx_allocated = */ NULL, + /* .ctx_unallocated = */ NULL, + /* .graph = */ NULL, + }; + } + + // dup nodes + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + graph_copy_dup_tensor(hash_set, node_copies, ctx_allocated, ctx_unallocated, node); + } + + // allocate nodes + ggml_backend_buffer_t buffer = ggml_backend_alloc_ctx_tensors(ctx_allocated, backend); + if (buffer == NULL) { + fprintf(stderr, "failed to allocate buffer for graph copy\n"); + ggml_hash_set_free(&hash_set); + free(node_copies); + free(node_init); + ggml_free(ctx_allocated); + ggml_free(ctx_unallocated); + return (struct ggml_backend_graph_copy) { + /* .buffer = */ NULL, + /* .ctx_allocated = */ NULL, + /* .ctx_unallocated = */ NULL, + /* .graph = */ NULL, + }; + } + + //printf("copy buffer size: %zu MB\n", ggml_backend_buffer_get_size(buffer) / 1024 / 1024); + + // copy data and init views + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + graph_copy_init_tensor(&hash_set, node_copies, node_init, node); + } + + // build graph copy + struct ggml_cgraph * graph_copy = ggml_new_graph_custom(ctx_allocated, graph->size, false); + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + struct ggml_tensor * node_copy = node_copies[ggml_hash_find(&hash_set, node)]; + graph_copy->nodes[i] = node_copy; + } + graph_copy->n_nodes = graph->n_nodes; + + ggml_hash_set_free(&hash_set); + free(node_copies); + free(node_init); + + return (struct ggml_backend_graph_copy) { + /* .buffer = */ buffer, + /* .ctx_allocated = */ ctx_allocated, + /* .ctx_unallocated = */ ctx_unallocated, + /* .graph = */ graph_copy, + }; +} + +void ggml_backend_graph_copy_free(struct ggml_backend_graph_copy copy) { + ggml_backend_buffer_free(copy.buffer); + ggml_free(copy.ctx_allocated); + ggml_free(copy.ctx_unallocated); +} + +bool ggml_backend_compare_graph_backend(ggml_backend_t backend1, ggml_backend_t backend2, struct ggml_cgraph * graph, ggml_backend_eval_callback callback, void * user_data) { + struct ggml_backend_graph_copy copy = ggml_backend_graph_copy(backend2, graph); + if (copy.buffer == NULL) { + return false; + } + + struct ggml_cgraph * g1 = graph; + struct ggml_cgraph * g2 = copy.graph; + + assert(g1->n_nodes == g2->n_nodes); + + for (int i = 0; i < g1->n_nodes; i++) { + //printf("eval %d/%d\n", i, g1->n_nodes); + struct ggml_tensor * t1 = g1->nodes[i]; + struct ggml_tensor * t2 = g2->nodes[i]; + + assert(t1->op == t2->op && ggml_are_same_layout(t1, t2)); + + struct ggml_cgraph g1v = ggml_graph_view(g1, i, i + 1); + struct ggml_cgraph g2v = ggml_graph_view(g2, i, i + 1); + + ggml_backend_graph_compute(backend1, &g1v); + ggml_backend_graph_compute(backend2, &g2v); + + if (ggml_is_view_op(t1->op)) { + continue; + } + + // compare results, calculate rms etc + if (!callback(i, t1, t2, user_data)) { + break; + } + } + + ggml_backend_graph_copy_free(copy); + + return true; +} diff --git a/ml/backend/ggml/ggml-backend.h b/ml/backend/ggml/ggml-backend.h new file mode 100644 index 000000000..f5329ab9e --- /dev/null +++ b/ml/backend/ggml/ggml-backend.h @@ -0,0 +1,267 @@ +/** + * llama.cpp - commit 3f1ae2e32cde00c39b96be6d01c2997c29bae555 - do not edit this file + * + * MIT License + * + * Copyright (c) 2023-2024 The ggml authors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +#pragma once + +#include "ggml.h" +#include "ggml-alloc.h" + +#ifdef __cplusplus +extern "C" { +#endif + + typedef struct ggml_backend_buffer_type * ggml_backend_buffer_type_t; + typedef struct ggml_backend_buffer * ggml_backend_buffer_t; + typedef struct ggml_backend_event * ggml_backend_event_t; + typedef struct ggml_backend * ggml_backend_t; + typedef void * ggml_backend_graph_plan_t; + + // + // Backend buffer + // + + // buffer type + GGML_API const char * ggml_backend_buft_name (ggml_backend_buffer_type_t buft); + GGML_API GGML_CALL ggml_backend_buffer_t ggml_backend_buft_alloc_buffer (ggml_backend_buffer_type_t buft, size_t size); + GGML_API size_t ggml_backend_buft_get_alignment (ggml_backend_buffer_type_t buft); + GGML_API size_t ggml_backend_buft_get_max_size (ggml_backend_buffer_type_t buft); + GGML_API GGML_CALL size_t ggml_backend_buft_get_alloc_size (ggml_backend_buffer_type_t buft, struct ggml_tensor * tensor); + GGML_API bool ggml_backend_buft_is_host (ggml_backend_buffer_type_t buft); + + // buffer + enum ggml_backend_buffer_usage { + GGML_BACKEND_BUFFER_USAGE_ANY = 0, + GGML_BACKEND_BUFFER_USAGE_WEIGHTS = 1, + GGML_BACKEND_BUFFER_USAGE_COMPUTE = 2, + }; + + GGML_API const char * ggml_backend_buffer_name (ggml_backend_buffer_t buffer); + GGML_API void ggml_backend_buffer_free (ggml_backend_buffer_t buffer); + GGML_API void * ggml_backend_buffer_get_base (ggml_backend_buffer_t buffer); + GGML_API size_t ggml_backend_buffer_get_size (ggml_backend_buffer_t buffer); + GGML_API GGML_CALL void ggml_backend_buffer_init_tensor (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); + GGML_API size_t ggml_backend_buffer_get_alignment (ggml_backend_buffer_t buffer); + GGML_API size_t ggml_backend_buffer_get_max_size (ggml_backend_buffer_t buffer); + GGML_API size_t ggml_backend_buffer_get_alloc_size(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); + GGML_API void ggml_backend_buffer_clear (ggml_backend_buffer_t buffer, uint8_t value); + GGML_API bool ggml_backend_buffer_is_host (ggml_backend_buffer_t buffer); + GGML_API void ggml_backend_buffer_set_usage (ggml_backend_buffer_t buffer, enum ggml_backend_buffer_usage usage); + GGML_API enum ggml_backend_buffer_usage ggml_backend_buffer_get_usage (ggml_backend_buffer_t buffer); + GGML_API ggml_backend_buffer_type_t ggml_backend_buffer_get_type (ggml_backend_buffer_t buffer); + GGML_API void ggml_backend_buffer_reset (ggml_backend_buffer_t buffer); + + // + // Backend + // + + GGML_API ggml_guid_t ggml_backend_guid(ggml_backend_t backend); + GGML_API const char * ggml_backend_name(ggml_backend_t backend); + GGML_API void ggml_backend_free(ggml_backend_t backend); + + GGML_API ggml_backend_buffer_type_t ggml_backend_get_default_buffer_type(ggml_backend_t backend); + GGML_API ggml_backend_buffer_t ggml_backend_alloc_buffer(ggml_backend_t backend, size_t size); + GGML_API size_t ggml_backend_get_alignment(ggml_backend_t backend); + GGML_API size_t ggml_backend_get_max_size(ggml_backend_t backend); + + GGML_API void ggml_backend_tensor_set_async(ggml_backend_t backend, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); + GGML_API void ggml_backend_tensor_get_async(ggml_backend_t backend, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); + + // "offset" refers to the offset of the tensor data for setting/getting data + GGML_API GGML_CALL void ggml_backend_tensor_set( struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); + GGML_API GGML_CALL void ggml_backend_tensor_get(const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); + GGML_API GGML_CALL void ggml_backend_tensor_memset( struct ggml_tensor * tensor, uint8_t value, size_t offset, size_t size); + + GGML_API void ggml_backend_synchronize(ggml_backend_t backend); + + GGML_API ggml_backend_graph_plan_t ggml_backend_graph_plan_create(ggml_backend_t backend, struct ggml_cgraph * cgraph); + GGML_API void ggml_backend_graph_plan_free (ggml_backend_t backend, ggml_backend_graph_plan_t plan); + + GGML_API enum ggml_status ggml_backend_graph_plan_compute (ggml_backend_t backend, ggml_backend_graph_plan_t plan); + GGML_API enum ggml_status ggml_backend_graph_compute (ggml_backend_t backend, struct ggml_cgraph * cgraph); + GGML_API enum ggml_status ggml_backend_graph_compute_async(ggml_backend_t backend, struct ggml_cgraph * cgraph); + GGML_API bool ggml_backend_supports_op(ggml_backend_t backend, const struct ggml_tensor * op); + GGML_API bool ggml_backend_supports_buft(ggml_backend_t backend, ggml_backend_buffer_type_t buft); + GGML_API bool ggml_backend_offload_op(ggml_backend_t backend, const struct ggml_tensor * op); + + // tensor copy between different backends + GGML_API void ggml_backend_tensor_copy(struct ggml_tensor * src, struct ggml_tensor * dst); + + // asynchronous copy + // the copy is performed after all the currently queued operations in backend_src + // backend_dst will wait for the copy to complete before performing other operations + // automatic fallback to sync copy if async is not supported + GGML_API void ggml_backend_tensor_copy_async(ggml_backend_t backend_src, ggml_backend_t backend_dst, struct ggml_tensor * src, struct ggml_tensor * dst); + + // events + GGML_API ggml_backend_event_t ggml_backend_event_new (ggml_backend_t backend); + GGML_API void ggml_backend_event_free (ggml_backend_event_t event); + GGML_API void ggml_backend_event_record (ggml_backend_event_t event); + GGML_API void ggml_backend_event_synchronize(ggml_backend_event_t event); + GGML_API void ggml_backend_event_wait (ggml_backend_t backend, ggml_backend_event_t event); + + // + // CPU backend + // + + GGML_API ggml_backend_t ggml_backend_cpu_init(void); + + GGML_API GGML_CALL bool ggml_backend_is_cpu (ggml_backend_t backend); + GGML_API void ggml_backend_cpu_set_n_threads (ggml_backend_t backend_cpu, int n_threads); + GGML_API void ggml_backend_cpu_set_threadpool (ggml_backend_t backend_cpu, ggml_threadpool_t threadpool); + GGML_API void ggml_backend_cpu_set_abort_callback(ggml_backend_t backend_cpu, ggml_abort_callback abort_callback, void * abort_callback_data); + + // Create a backend buffer from an existing pointer + GGML_API GGML_CALL ggml_backend_buffer_t ggml_backend_cpu_buffer_from_ptr(void * ptr, size_t size); + + GGML_API GGML_CALL ggml_backend_buffer_type_t ggml_backend_cpu_buffer_type(void); + +#ifdef GGML_USE_CPU_HBM + GGML_API ggml_backend_buffer_type_t ggml_backend_cpu_hbm_buffer_type(void); +#endif + + // + // Backend registry + // + + // The backend registry is a registry of all the available backends, and allows initializing backends in a generic way + + GGML_API size_t ggml_backend_reg_get_count(void); + GGML_API size_t ggml_backend_reg_find_by_name(const char * name); // returns index of backend with name, or SIZE_MAX if not found + GGML_API ggml_backend_t ggml_backend_reg_init_backend_from_str(const char * backend_str); // str is backend_name:params (params is optional) + GGML_API const char * ggml_backend_reg_get_name(size_t i); + GGML_API ggml_backend_t ggml_backend_reg_init_backend(size_t i, const char * params); // params is backend-specific + GGML_API ggml_backend_buffer_type_t ggml_backend_reg_get_default_buffer_type(size_t i); + GGML_API ggml_backend_buffer_t ggml_backend_reg_alloc_buffer(size_t i, size_t size); + + // + // Backend scheduler + // + + // The backend scheduler allows for multiple backends to be used together + // Handles compute buffer allocation, assignment of tensors to backends, and copying of tensors between backends + // The backends are selected based on: + // - the backend that supports the operation + // - the location of the pre-allocated tensors (e.g. the weights) + /* + Example usage: + + // operations that use tensors allocated in a buffer with USAGE_WEIGHTS will be assigned + // preferrably to run on the same backend as the buffer + ggml_backend_buffer_set_usage(buf_weights, GGML_BACKEND_BUFFER_USAGE_WEIGHTS); + + sched = ggml_backend_sched_new({backend_gpu, backend_gpu2, backend_cpu}, NULL, num_backends, GGML_DEFAULT_GRAPH_SIZE, false); + + // initialize buffers from a max size graph (optional) + reserve_graph = build_graph(sched, max_batch_size); + + // manually assign nodes to a backend (optional, should not be needed in most cases) + struct ggml_tensor * node = ggml_mul_mat(ctx, ...); + ggml_backend_sched_set_tensor_backend(sched, node, backend_gpu); + + ggml_backend_sched_reserve(sched, reserve_graph); + + // compute + graph = build_graph(sched); + ggml_backend_sched_graph_compute(sched, graph); + + // if there are graph inputs: + ggml_backend_sched_reset(sched); + ggml_backend_sched_alloc_graph(sched, graph); + ggml_backend_tensor_set(input_tensor, ...); + ggml_backend_sched_graph_compute(sched, graph); + } + */ + + struct ggml_backend_sched; + typedef struct ggml_backend_sched * ggml_backend_sched_t; + + // when ask == true, the scheduler wants to know if the user wants to observe this node + // this allows the scheduler to batch nodes together in order to evaluate them in a single call + // + // when ask == false, the scheduler is passing the node tensor to the user for observation + // if the user returns false, the scheduler will cancel the graph compute + // + typedef bool (*ggml_backend_sched_eval_callback)(struct ggml_tensor * t, bool ask, void * user_data); + + // Initialize a backend scheduler + GGML_API ggml_backend_sched_t ggml_backend_sched_new(ggml_backend_t * backends, ggml_backend_buffer_type_t * bufts, int n_backends, size_t graph_size, bool parallel); + GGML_API void ggml_backend_sched_free(ggml_backend_sched_t sched); + + // Initialize backend buffers from a measure graph + GGML_API bool ggml_backend_sched_reserve(ggml_backend_sched_t sched, struct ggml_cgraph * measure_graph); + + GGML_API int ggml_backend_sched_get_n_backends(ggml_backend_sched_t sched); + GGML_API ggml_backend_t ggml_backend_sched_get_backend(ggml_backend_sched_t sched, int i); + + // Get the number of splits of the last graph + GGML_API int ggml_backend_sched_get_n_splits(ggml_backend_sched_t sched); + GGML_API int ggml_backend_sched_get_n_copies(ggml_backend_sched_t sched); + + GGML_API size_t ggml_backend_sched_get_buffer_size(ggml_backend_sched_t sched, ggml_backend_t backend); + + GGML_API void ggml_backend_sched_set_tensor_backend(ggml_backend_sched_t sched, struct ggml_tensor * node, ggml_backend_t backend); + GGML_API ggml_backend_t ggml_backend_sched_get_tensor_backend(ggml_backend_sched_t sched, struct ggml_tensor * node); + + // Allocate and compute graph on the backend scheduler + GGML_API bool ggml_backend_sched_alloc_graph(ggml_backend_sched_t sched, struct ggml_cgraph * graph); + GGML_API enum ggml_status ggml_backend_sched_graph_compute(ggml_backend_sched_t sched, struct ggml_cgraph * graph); + GGML_API enum ggml_status ggml_backend_sched_graph_compute_async(ggml_backend_sched_t sched, struct ggml_cgraph * graph); + GGML_API void ggml_backend_sched_synchronize(ggml_backend_sched_t sched); + + // Reset all assignments and allocators - must be called before changing the node backends + GGML_API void ggml_backend_sched_reset(ggml_backend_sched_t sched); + + // Set a callback to be called for each resulting node during graph compute + GGML_API void ggml_backend_sched_set_eval_callback(ggml_backend_sched_t sched, ggml_backend_sched_eval_callback callback, void * user_data); + + // + // Utils + // + + struct ggml_backend_graph_copy { + ggml_backend_buffer_t buffer; + struct ggml_context * ctx_allocated; + struct ggml_context * ctx_unallocated; + struct ggml_cgraph * graph; + }; + + // Copy a graph to a different backend + GGML_API struct ggml_backend_graph_copy ggml_backend_graph_copy(ggml_backend_t backend, struct ggml_cgraph * graph); + GGML_API void ggml_backend_graph_copy_free(struct ggml_backend_graph_copy copy); + + typedef bool (*GGML_CALL ggml_backend_eval_callback)(int node_index, struct ggml_tensor * t1, struct ggml_tensor * t2, void * user_data); + + // Compare the output of two backends + GGML_API bool ggml_backend_compare_graph_backend(ggml_backend_t backend1, ggml_backend_t backend2, struct ggml_cgraph * graph, ggml_backend_eval_callback callback, void * user_data); + + // Tensor initialization + GGML_API void ggml_backend_tensor_alloc(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, void * addr); + GGML_API void ggml_backend_view_init(struct ggml_tensor * tensor); + + +#ifdef __cplusplus +} +#endif diff --git a/ml/backend/ggml/ggml-blas.h b/ml/backend/ggml/ggml-blas.h new file mode 100644 index 000000000..3f81fd96c --- /dev/null +++ b/ml/backend/ggml/ggml-blas.h @@ -0,0 +1,49 @@ +/** + * llama.cpp - commit 3f1ae2e32cde00c39b96be6d01c2997c29bae555 - do not edit this file + * + * MIT License + * + * Copyright (c) 2023-2024 The ggml authors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +#pragma once + +#include "ggml.h" +#include "ggml-backend.h" + + +#ifdef __cplusplus +extern "C" { +#endif + +// backend API +GGML_API GGML_CALL ggml_backend_t ggml_backend_blas_init(void); + +GGML_API GGML_CALL bool ggml_backend_is_blas(ggml_backend_t backend); + +// number of threads used for conversion to float +// for openblas and blis, this will also set the number of threads used for blas operations +GGML_API GGML_CALL void ggml_backend_blas_set_n_threads(ggml_backend_t backend_blas, int n_threads); + + +#ifdef __cplusplus +} +#endif diff --git a/ml/backend/ggml/ggml-common.h b/ml/backend/ggml/ggml-common.h new file mode 100644 index 000000000..28a86a7a0 --- /dev/null +++ b/ml/backend/ggml/ggml-common.h @@ -0,0 +1,1879 @@ +/** + * llama.cpp - commit 3f1ae2e32cde00c39b96be6d01c2997c29bae555 - do not edit this file + * + * MIT License + * + * Copyright (c) 2023-2024 The ggml authors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +#ifndef GGML_COMMON_DECL + +#if defined(GGML_COMMON_DECL_C) +#include + +typedef uint16_t ggml_half; +typedef uint32_t ggml_half2; + +#define GGML_COMMON_AGGR + +#define GGML_COMMON_DECL +#elif defined(GGML_COMMON_DECL_METAL) +#include + +typedef half ggml_half; +typedef half2 ggml_half2; + +#define GGML_COMMON_AGGR + +#define GGML_COMMON_DECL +#elif defined(GGML_COMMON_DECL_CUDA) +#if defined(GGML_COMMON_DECL_MUSA) +#include +#else +#include +#endif +#include + +typedef half ggml_half; +typedef half2 ggml_half2; + +#define GGML_COMMON_AGGR data + +#define GGML_COMMON_DECL +#elif defined(GGML_COMMON_DECL_HIP) +#include +#include + +typedef half ggml_half; +typedef half2 ggml_half2; + +#define GGML_COMMON_AGGR data + +#define GGML_COMMON_DECL +#elif defined(GGML_COMMON_DECL_SYCL) +#include +#include + +typedef sycl::half ggml_half; +typedef sycl::half2 ggml_half2; + +#define GGML_COMMON_AGGR data + +#define GGML_COMMON_DECL +#endif + +#if defined(GGML_COMMON_DECL) + +#ifndef __cplusplus +#ifndef static_assert +#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201100L) +#define static_assert(cond, msg) _Static_assert(cond, msg) +#else +#define static_assert(cond, msg) struct global_scope_noop_trick +#endif +#endif +#endif // __cplusplus + +// QK = number of values after dequantization +// QK_K = super-block size + +#define QK_K 256 +#define K_SCALE_SIZE 12 + +#if defined(GGML_COMMON_DECL_CUDA) || defined(GGML_COMMON_DECL_HIP) || defined(GGML_COMMON_DECL_SYCL) +// QR = QK / number of values before dequantization +// QI = number of 32 bit integers before dequantization + +#define QI4_0 (QK4_0 / (4 * QR4_0)) +#define QR4_0 2 + +#define QI4_1 (QK4_1 / (4 * QR4_1)) +#define QR4_1 2 + +#define QI5_0 (QK5_0 / (4 * QR5_0)) +#define QR5_0 2 + +#define QI5_1 (QK5_1 / (4 * QR5_1)) +#define QR5_1 2 + +#define QI8_0 (QK8_0 / (4 * QR8_0)) +#define QR8_0 1 + +#define QI8_1 (QK8_1 / (4 * QR8_1)) +#define QR8_1 1 + +#define QI2_K (QK_K / (4*QR2_K)) +#define QR2_K 4 + +#define QI3_K (QK_K / (4*QR3_K)) +#define QR3_K 4 + +#define QI4_K (QK_K / (4*QR4_K)) +#define QR4_K 2 + +#define QI5_K (QK_K / (4*QR5_K)) +#define QR5_K 2 + +#define QI6_K (QK_K / (4*QR6_K)) +#define QR6_K 2 + +#define QI2_XXS (QK_K / (4*QR2_XXS)) +#define QR2_XXS 4 + +#define QI2_XS (QK_K / (4*QR2_XS)) +#define QR2_XS 4 + +#define QI2_S (QK_K / (4*QR2_S)) +#define QR2_S 4 + +#define QI3_XXS (QK_K / (4*QR3_XXS)) +#define QR3_XXS 4 + +#define QI3_XS (QK_K / (4*QR3_XS)) +#define QR3_XS 4 + +#define QI1_S (QK_K / (4*QR1_S)) +#define QR1_S 8 + +#define QI1_M (QK_K / (4*QR1_M)) +#define QR1_M 8 + +#define QI4_NL (QK4_NL / (4*QR4_NL)) +#define QR4_NL 2 + +#define QI4_XS (QK_K / (4*QR4_XS)) +#define QR4_XS 2 + +#define QI3_S (QK_K / (4*QR3_S)) +#define QR3_S 4 + +#endif // GGML_COMMON_DECL_CUDA || GGML_COMMON_DECL_HIP + +#define QK4_0 32 +typedef struct { + ggml_half d; // delta + uint8_t qs[QK4_0 / 2]; // nibbles / quants +} block_q4_0; +static_assert(sizeof(block_q4_0) == sizeof(ggml_half) + QK4_0 / 2, "wrong q4_0 block size/padding"); + +#define QK4_1 32 +typedef struct { + union { + struct { + ggml_half d; // delta + ggml_half m; // min + } GGML_COMMON_AGGR; + ggml_half2 dm; + }; + uint8_t qs[QK4_1 / 2]; // nibbles / quants +} block_q4_1; +static_assert(sizeof(block_q4_1) == 2 * sizeof(ggml_half) + QK4_1 / 2, "wrong q4_1 block size/padding"); + +#define QK5_0 32 +typedef struct { + ggml_half d; // delta + uint8_t qh[4]; // 5-th bit of quants + uint8_t qs[QK5_0 / 2]; // nibbles / quants +} block_q5_0; +static_assert(sizeof(block_q5_0) == sizeof(ggml_half) + sizeof(uint32_t) + QK5_0 / 2, "wrong q5_0 block size/padding"); + +#define QK5_1 32 +typedef struct { + union { + struct { + ggml_half d; // delta + ggml_half m; // min + } GGML_COMMON_AGGR; + ggml_half2 dm; + }; + uint8_t qh[4]; // 5-th bit of quants + uint8_t qs[QK5_1 / 2]; // nibbles / quants +} block_q5_1; +static_assert(sizeof(block_q5_1) == 2 * sizeof(ggml_half) + sizeof(uint32_t) + QK5_1 / 2, "wrong q5_1 block size/padding"); + +#define QK8_0 32 +typedef struct { + ggml_half d; // delta + int8_t qs[QK8_0]; // quants +} block_q8_0; +static_assert(sizeof(block_q8_0) == sizeof(ggml_half) + QK8_0, "wrong q8_0 block size/padding"); + +#define QK8_1 32 +typedef struct { + union { + struct { + ggml_half d; // delta + ggml_half s; // d * sum(qs[i]) + } GGML_COMMON_AGGR; + ggml_half2 ds; + }; + int8_t qs[QK8_1]; // quants +} block_q8_1; +static_assert(sizeof(block_q8_1) == 2*sizeof(ggml_half) + QK8_1, "wrong q8_1 block size/padding"); + +typedef struct { + ggml_half d[4]; // deltas for 4 q4_0 blocks + uint8_t qs[QK4_0 * 2]; // nibbles / quants for 4 q4_0 blocks +} block_q4_0x4; +static_assert(sizeof(block_q4_0x4) == 4 * sizeof(ggml_half) + QK4_0 * 2, "wrong q4_0x4 block size/padding"); + +typedef struct { + ggml_half d[8]; // deltas for 8 q4_0 blocks + uint8_t qs[QK4_0 * 4]; // nibbles / quants for 8 q4_0 blocks +} block_q4_0x8; +static_assert(sizeof(block_q4_0x8) == 8 * sizeof(ggml_half) + QK4_0 * 4, "wrong q4_0x8 block size/padding"); + +typedef struct { + ggml_half d[4]; // deltas for 4 q8_0 blocks + int8_t qs[QK8_0 * 4]; // quants for 4 q8_0 blocks +} block_q8_0x4; +static_assert(sizeof(block_q8_0x4) == 4 * sizeof(ggml_half) + QK8_0 * 4, "wrong q8_0x4 block size/padding"); + +typedef struct { + ggml_half d[8]; // deltas for 8 q8_0 blocks + int8_t qs[QK8_0 * 8]; // quants for 8 q8_0 blocks +} block_q8_0x8; +static_assert(sizeof(block_q8_0x8) == 8 * sizeof(ggml_half) + QK8_0 * 8, "wrong q8_0x8 block size/padding"); + +// +// Ternary quantization +// + +// 1.6875 bpw +typedef struct { + uint8_t qs[(QK_K - 4 * QK_K / 64) / 5]; // 5 elements per byte (3^5 = 243 < 256) + uint8_t qh[QK_K/64]; // 4 elements per byte + ggml_half d; +} block_tq1_0; +static_assert(sizeof(block_tq1_0) == sizeof(ggml_half) + QK_K / 64 + (QK_K - 4 * QK_K / 64) / 5, "wrong tq1_0 block size/padding"); + +// 2.0625 bpw +typedef struct { + uint8_t qs[QK_K/4]; // 2 bits per element + ggml_half d; +} block_tq2_0; +static_assert(sizeof(block_tq2_0) == sizeof(ggml_half) + QK_K / 4, "wrong tq2_0 block size/padding"); + +// +// Super-block quantization structures +// + +// 2-bit quantization +// weight is represented as x = a * q + b +// 16 blocks of 16 elements each +// Effectively 2.625 bits per weight +typedef struct { + uint8_t scales[QK_K/16]; // scales and mins, quantized with 4 bits + uint8_t qs[QK_K/4]; // quants + union { + struct { + ggml_half d; // super-block scale for quantized scales + ggml_half dmin; // super-block scale for quantized mins + } GGML_COMMON_AGGR; + ggml_half2 dm; + }; +} block_q2_K; +static_assert(sizeof(block_q2_K) == 2*sizeof(ggml_half) + QK_K/16 + QK_K/4, "wrong q2_K block size/padding"); + +// 3-bit quantization +// weight is represented as x = a * q +// 16 blocks of 16 elements each +// Effectively 3.4375 bits per weight +typedef struct { + uint8_t hmask[QK_K/8]; // quants - high bit + uint8_t qs[QK_K/4]; // quants - low 2 bits + uint8_t scales[12]; // scales, quantized with 6 bits + ggml_half d; // super-block scale +} block_q3_K; +static_assert(sizeof(block_q3_K) == sizeof(ggml_half) + QK_K / 4 + QK_K / 8 + 12, "wrong q3_K block size/padding"); + +// 4-bit quantization +// 8 blocks of 32 elements each +// weight is represented as x = a * q + b +// Effectively 4.5 bits per weight +typedef struct { + union { + struct { + ggml_half d; // super-block scale for quantized scales + ggml_half dmin; // super-block scale for quantized mins + } GGML_COMMON_AGGR; + ggml_half2 dm; + }; + uint8_t scales[K_SCALE_SIZE]; // scales and mins, quantized with 6 bits + uint8_t qs[QK_K/2]; // 4--bit quants +} block_q4_K; +static_assert(sizeof(block_q4_K) == 2*sizeof(ggml_half) + K_SCALE_SIZE + QK_K/2, "wrong q4_K block size/padding"); + +// 5-bit quantization +// 8 blocks of 32 elements each +// weight is represented as x = a * q + b +// Effectively 5.5 bits per weight +typedef struct { + union { + struct { + ggml_half d; // super-block scale for quantized scales + ggml_half dmin; // super-block scale for quantized mins + } GGML_COMMON_AGGR; + ggml_half2 dm; + }; + uint8_t scales[K_SCALE_SIZE]; // scales and mins, quantized with 6 bits + uint8_t qh[QK_K/8]; // quants, high bit + uint8_t qs[QK_K/2]; // quants, low 4 bits +} block_q5_K; +static_assert(sizeof(block_q5_K) == 2*sizeof(ggml_half) + K_SCALE_SIZE + QK_K/2 + QK_K/8, "wrong q5_K block size/padding"); + +// 6-bit quantization +// weight is represented as x = a * q +// 16 blocks of 16 elements each +// Effectively 6.5625 bits per weight +typedef struct { + uint8_t ql[QK_K/2]; // quants, lower 4 bits + uint8_t qh[QK_K/4]; // quants, upper 2 bits + int8_t scales[QK_K/16]; // scales, quantized with 8 bits + ggml_half d; // super-block scale +} block_q6_K; +static_assert(sizeof(block_q6_K) == sizeof(ggml_half) + QK_K / 16 + 3*QK_K/4, "wrong q6_K block size/padding"); + +// This is only used for intermediate quantization and dot products +typedef struct { + float d; // delta + int8_t qs[QK_K]; // quants + int16_t bsums[QK_K/16]; // sum of quants in groups of 16 +} block_q8_K; +static_assert(sizeof(block_q8_K) == sizeof(float) + QK_K + QK_K/16*sizeof(int16_t), "wrong q8_K block size/padding"); + +// (Almost) "true" 2-bit quantization. +// Due to the need to use blocks as per ggml design, it ends up using +// 2.0625 bpw because of the 16-bit scale for each block of 256. +typedef struct { + ggml_half d; + uint16_t qs[QK_K/8]; +} block_iq2_xxs; +static_assert(sizeof(block_iq2_xxs) == sizeof(ggml_half) + QK_K/8*sizeof(uint16_t), "wrong iq2_xxs block size/padding"); + +// 2.3125 bpw quants +typedef struct { + ggml_half d; + uint16_t qs[QK_K/8]; + uint8_t scales[QK_K/32]; +} block_iq2_xs; +static_assert(sizeof(block_iq2_xs) == sizeof(ggml_half) + QK_K/8*sizeof(uint16_t) + QK_K/32, "wrong iq2_xs block size/padding"); + +// 2.5625 bpw quants +typedef struct { + ggml_half d; + uint8_t qs[QK_K/4]; + uint8_t qh[QK_K/32]; + uint8_t scales[QK_K/32]; +} block_iq2_s; +static_assert(sizeof(block_iq2_s) == sizeof(ggml_half) + QK_K/4 + QK_K/16, "wrong iq2_s block size/padding"); + +// (Almost) "true" 3-bit quantization. +// Due to the need to use blocks as per ggml design, it ends up using +// 3.0625 bpw because of the 16-bit scale for each block of 256. +typedef struct { + ggml_half d; + uint8_t qs[3*QK_K/8]; +} block_iq3_xxs; +static_assert(sizeof(block_iq3_xxs) == sizeof(ggml_half) + 3*(QK_K/8), "wrong iq3_xxs block size/padding"); + +// 3.4375 bpw +#define IQ3S_N_SCALE QK_K/64 +typedef struct { + ggml_half d; + uint8_t qs[QK_K/4]; + uint8_t qh[QK_K/32]; + uint8_t signs[QK_K/8]; + uint8_t scales[IQ3S_N_SCALE]; +} block_iq3_s; +static_assert(sizeof(block_iq3_s) == sizeof(ggml_half) + 13*(QK_K/32) + IQ3S_N_SCALE, "wrong iq3_s block size/padding"); + +// 1.5625 bpw +typedef struct { + ggml_half d; + uint8_t qs[QK_K/8]; + uint16_t qh[QK_K/32]; +} block_iq1_s; +static_assert(sizeof(block_iq1_s) == sizeof(ggml_half) + QK_K/8 + QK_K/16, "wrong iq1_s block size/padding"); + +// 1.75 bpw +typedef struct { + uint8_t qs[QK_K/8]; // grid index, low 8 bits + uint8_t qh[QK_K/16]; // grid index, high 3 bits + grid shift bit (for two groups of 8) + uint8_t scales[QK_K/32]; // 3-bit block scales (4-bit if QK_K == 64) +} block_iq1_m; +static_assert(sizeof(block_iq1_m) == QK_K/8 + QK_K/16 + QK_K/32, "wrong iq1_m block size/padding"); + +// Used by IQ1_M quants +typedef union { + ggml_half f16; + uint16_t u16; +} iq1m_scale_t; + +// Non-linear quants +#define QK4_NL 32 +typedef struct { + ggml_half d; + uint8_t qs[QK4_NL/2]; +} block_iq4_nl; +static_assert(sizeof(block_iq4_nl) == sizeof(ggml_half) + QK4_NL/2, "wrong iq4_nl block size/padding"); + +typedef struct { + ggml_half d; + uint16_t scales_h; + uint8_t scales_l[QK_K/64]; + uint8_t qs[QK_K/2]; +} block_iq4_xs; +static_assert(sizeof(block_iq4_xs) == sizeof(ggml_half) + sizeof(uint16_t) + QK_K/64 + QK_K/2, "wrong iq4_xs block size/padding"); + +#endif // GGML_COMMON_DECL +#endif // GGML_COMMON_DECL + +//////////////////////////////////////////////////////////////////////////////// + +#ifndef GGML_COMMON_IMPL + +#if defined(GGML_COMMON_IMPL_C) +#include + +#define GGML_TABLE_BEGIN(type, name, size) static const type name[size] = { +#define GGML_TABLE_END() }; + +#define GGML_COMMON_IMPL +#elif defined(GGML_COMMON_IMPL_METAL) +#include + +#define GGML_TABLE_BEGIN(type, name, size) static const constant type name[size] = { +#define GGML_TABLE_END() }; + +#define GGML_COMMON_IMPL +#elif defined(GGML_COMMON_IMPL_CUDA) || defined(GGML_COMMON_IMPL_HIP) || defined(GGML_COMMON_IMPL_MUSA) +#include + +#define GGML_TABLE_BEGIN(type, name, size) static const __device__ type name[size] = { +#define GGML_TABLE_END() }; + +#define GGML_COMMON_IMPL +#elif defined(GGML_COMMON_IMPL_SYCL) + +#include + +#define GGML_TABLE_BEGIN(type, name, size) static const type name[size] = { +#define GGML_TABLE_END() }; + +#define GGML_COMMON_IMPL +#endif + +#if defined(GGML_COMMON_IMPL) + +GGML_TABLE_BEGIN(uint8_t, kmask_iq2xs, 8) + 1, 2, 4, 8, 16, 32, 64, 128 +GGML_TABLE_END() + +GGML_TABLE_BEGIN(uint8_t, ksigns_iq2xs, 128) + 0, 129, 130, 3, 132, 5, 6, 135, 136, 9, 10, 139, 12, 141, 142, 15, + 144, 17, 18, 147, 20, 149, 150, 23, 24, 153, 154, 27, 156, 29, 30, 159, + 160, 33, 34, 163, 36, 165, 166, 39, 40, 169, 170, 43, 172, 45, 46, 175, + 48, 177, 178, 51, 180, 53, 54, 183, 184, 57, 58, 187, 60, 189, 190, 63, + 192, 65, 66, 195, 68, 197, 198, 71, 72, 201, 202, 75, 204, 77, 78, 207, + 80, 209, 210, 83, 212, 85, 86, 215, 216, 89, 90, 219, 92, 221, 222, 95, + 96, 225, 226, 99, 228, 101, 102, 231, 232, 105, 106, 235, 108, 237, 238, 111, + 240, 113, 114, 243, 116, 245, 246, 119, 120, 249, 250, 123, 252, 125, 126, 255, +GGML_TABLE_END() + +//#if __CUDA_ARCH__ >= MIN_CC_DP4A // lowest compute capability for integer intrinsics +GGML_TABLE_BEGIN(uint64_t, ksigns64, 128) + 0x0000000000000000, 0xff000000000000ff, 0xff0000000000ff00, 0x000000000000ffff, + 0xff00000000ff0000, 0x0000000000ff00ff, 0x0000000000ffff00, 0xff00000000ffffff, + 0xff000000ff000000, 0x00000000ff0000ff, 0x00000000ff00ff00, 0xff000000ff00ffff, + 0x00000000ffff0000, 0xff000000ffff00ff, 0xff000000ffffff00, 0x00000000ffffffff, + 0xff0000ff00000000, 0x000000ff000000ff, 0x000000ff0000ff00, 0xff0000ff0000ffff, + 0x000000ff00ff0000, 0xff0000ff00ff00ff, 0xff0000ff00ffff00, 0x000000ff00ffffff, + 0x000000ffff000000, 0xff0000ffff0000ff, 0xff0000ffff00ff00, 0x000000ffff00ffff, + 0xff0000ffffff0000, 0x000000ffffff00ff, 0x000000ffffffff00, 0xff0000ffffffffff, + 0xff00ff0000000000, 0x0000ff00000000ff, 0x0000ff000000ff00, 0xff00ff000000ffff, + 0x0000ff0000ff0000, 0xff00ff0000ff00ff, 0xff00ff0000ffff00, 0x0000ff0000ffffff, + 0x0000ff00ff000000, 0xff00ff00ff0000ff, 0xff00ff00ff00ff00, 0x0000ff00ff00ffff, + 0xff00ff00ffff0000, 0x0000ff00ffff00ff, 0x0000ff00ffffff00, 0xff00ff00ffffffff, + 0x0000ffff00000000, 0xff00ffff000000ff, 0xff00ffff0000ff00, 0x0000ffff0000ffff, + 0xff00ffff00ff0000, 0x0000ffff00ff00ff, 0x0000ffff00ffff00, 0xff00ffff00ffffff, + 0xff00ffffff000000, 0x0000ffffff0000ff, 0x0000ffffff00ff00, 0xff00ffffff00ffff, + 0x0000ffffffff0000, 0xff00ffffffff00ff, 0xff00ffffffffff00, 0x0000ffffffffffff, + 0xffff000000000000, 0x00ff0000000000ff, 0x00ff00000000ff00, 0xffff00000000ffff, + 0x00ff000000ff0000, 0xffff000000ff00ff, 0xffff000000ffff00, 0x00ff000000ffffff, + 0x00ff0000ff000000, 0xffff0000ff0000ff, 0xffff0000ff00ff00, 0x00ff0000ff00ffff, + 0xffff0000ffff0000, 0x00ff0000ffff00ff, 0x00ff0000ffffff00, 0xffff0000ffffffff, + 0x00ff00ff00000000, 0xffff00ff000000ff, 0xffff00ff0000ff00, 0x00ff00ff0000ffff, + 0xffff00ff00ff0000, 0x00ff00ff00ff00ff, 0x00ff00ff00ffff00, 0xffff00ff00ffffff, + 0xffff00ffff000000, 0x00ff00ffff0000ff, 0x00ff00ffff00ff00, 0xffff00ffff00ffff, + 0x00ff00ffffff0000, 0xffff00ffffff00ff, 0xffff00ffffffff00, 0x00ff00ffffffffff, + 0x00ffff0000000000, 0xffffff00000000ff, 0xffffff000000ff00, 0x00ffff000000ffff, + 0xffffff0000ff0000, 0x00ffff0000ff00ff, 0x00ffff0000ffff00, 0xffffff0000ffffff, + 0xffffff00ff000000, 0x00ffff00ff0000ff, 0x00ffff00ff00ff00, 0xffffff00ff00ffff, + 0x00ffff00ffff0000, 0xffffff00ffff00ff, 0xffffff00ffffff00, 0x00ffff00ffffffff, + 0xffffffff00000000, 0x00ffffff000000ff, 0x00ffffff0000ff00, 0xffffffff0000ffff, + 0x00ffffff00ff0000, 0xffffffff00ff00ff, 0xffffffff00ffff00, 0x00ffffff00ffffff, + 0x00ffffffff000000, 0xffffffffff0000ff, 0xffffffffff00ff00, 0x00ffffffff00ffff, + 0xffffffffffff0000, 0x00ffffffffff00ff, 0x00ffffffffffff00, 0xffffffffffffffff, +GGML_TABLE_END() +//#endif + + +GGML_TABLE_BEGIN(uint64_t, iq2xxs_grid, 256) + 0x0808080808080808, 0x080808080808082b, 0x0808080808081919, 0x0808080808082b08, + 0x0808080808082b2b, 0x0808080808190819, 0x0808080808191908, 0x08080808082b0808, + 0x08080808082b082b, 0x08080808082b2b08, 0x08080808082b2b2b, 0x0808080819080819, + 0x0808080819081908, 0x0808080819190808, 0x0808080819192b08, 0x08080808192b0819, + 0x08080808192b1908, 0x080808082b080808, 0x080808082b08082b, 0x080808082b082b2b, + 0x080808082b2b082b, 0x0808081908080819, 0x0808081908081908, 0x0808081908190808, + 0x0808081908191919, 0x0808081919080808, 0x080808192b081908, 0x080808192b192b08, + 0x0808082b08080808, 0x0808082b0808082b, 0x0808082b082b082b, 0x0808082b2b08082b, + 0x0808190808080819, 0x0808190808081908, 0x0808190808190808, 0x08081908082b0819, + 0x08081908082b1908, 0x0808190819080808, 0x080819081908082b, 0x0808190819082b08, + 0x08081908192b0808, 0x080819082b080819, 0x080819082b081908, 0x080819082b190808, + 0x080819082b2b1908, 0x0808191908080808, 0x080819190808082b, 0x0808191908082b08, + 0x08081919082b0808, 0x080819191908192b, 0x08081919192b2b19, 0x080819192b080808, + 0x080819192b190819, 0x0808192b08082b19, 0x0808192b08190808, 0x0808192b19080808, + 0x0808192b2b081908, 0x0808192b2b2b1908, 0x08082b0808080808, 0x08082b0808081919, + 0x08082b0808082b08, 0x08082b0808191908, 0x08082b08082b2b08, 0x08082b0819080819, + 0x08082b0819081908, 0x08082b0819190808, 0x08082b081919082b, 0x08082b082b082b08, + 0x08082b1908081908, 0x08082b1919080808, 0x08082b2b0808082b, 0x08082b2b08191908, + 0x0819080808080819, 0x0819080808081908, 0x0819080808190808, 0x08190808082b0819, + 0x0819080819080808, 0x08190808192b0808, 0x081908082b081908, 0x081908082b190808, + 0x081908082b191919, 0x0819081908080808, 0x0819081908082b08, 0x08190819082b0808, + 0x0819081919190808, 0x0819081919192b2b, 0x081908192b080808, 0x0819082b082b1908, + 0x0819082b19081919, 0x0819190808080808, 0x0819190808082b08, 0x08191908082b0808, + 0x08191908082b1919, 0x0819190819082b19, 0x081919082b080808, 0x0819191908192b08, + 0x08191919192b082b, 0x0819192b08080808, 0x0819192b0819192b, 0x08192b0808080819, + 0x08192b0808081908, 0x08192b0808190808, 0x08192b0819080808, 0x08192b082b080819, + 0x08192b1908080808, 0x08192b1908081919, 0x08192b192b2b0808, 0x08192b2b19190819, + 0x082b080808080808, 0x082b08080808082b, 0x082b080808082b2b, 0x082b080819081908, + 0x082b0808192b0819, 0x082b08082b080808, 0x082b08082b08082b, 0x082b0819082b2b19, + 0x082b081919082b08, 0x082b082b08080808, 0x082b082b0808082b, 0x082b190808080819, + 0x082b190808081908, 0x082b190808190808, 0x082b190819080808, 0x082b19081919192b, + 0x082b191908080808, 0x082b191919080819, 0x082b1919192b1908, 0x082b192b2b190808, + 0x082b2b0808082b08, 0x082b2b08082b0808, 0x082b2b082b191908, 0x082b2b2b19081908, + 0x1908080808080819, 0x1908080808081908, 0x1908080808190808, 0x1908080808192b08, + 0x19080808082b0819, 0x19080808082b1908, 0x1908080819080808, 0x1908080819082b08, + 0x190808081919192b, 0x19080808192b0808, 0x190808082b080819, 0x190808082b081908, + 0x190808082b190808, 0x1908081908080808, 0x19080819082b0808, 0x19080819192b0819, + 0x190808192b080808, 0x190808192b081919, 0x1908082b08080819, 0x1908082b08190808, + 0x1908082b19082b08, 0x1908082b1919192b, 0x1908082b192b2b08, 0x1908190808080808, + 0x1908190808082b08, 0x19081908082b0808, 0x190819082b080808, 0x190819082b192b19, + 0x190819190819082b, 0x19081919082b1908, 0x1908192b08080808, 0x19082b0808080819, + 0x19082b0808081908, 0x19082b0808190808, 0x19082b0819080808, 0x19082b0819081919, + 0x19082b1908080808, 0x19082b1919192b08, 0x19082b19192b0819, 0x19082b192b08082b, + 0x19082b2b19081919, 0x19082b2b2b190808, 0x1919080808080808, 0x1919080808082b08, + 0x1919080808190819, 0x1919080808192b19, 0x19190808082b0808, 0x191908082b080808, + 0x191908082b082b08, 0x1919081908081908, 0x191908191908082b, 0x191908192b2b1908, + 0x1919082b2b190819, 0x191919082b190808, 0x191919082b19082b, 0x1919191908082b2b, + 0x1919192b08080819, 0x1919192b19191908, 0x19192b0808080808, 0x19192b0808190819, + 0x19192b0808192b19, 0x19192b08192b1908, 0x19192b1919080808, 0x19192b2b08082b08, + 0x192b080808081908, 0x192b080808190808, 0x192b080819080808, 0x192b0808192b2b08, + 0x192b081908080808, 0x192b081919191919, 0x192b082b08192b08, 0x192b082b192b0808, + 0x192b190808080808, 0x192b190808081919, 0x192b191908190808, 0x192b19190819082b, + 0x192b19192b081908, 0x192b2b081908082b, 0x2b08080808080808, 0x2b0808080808082b, + 0x2b08080808082b2b, 0x2b08080819080819, 0x2b0808082b08082b, 0x2b08081908081908, + 0x2b08081908192b08, 0x2b08081919080808, 0x2b08082b08190819, 0x2b08190808080819, + 0x2b08190808081908, 0x2b08190808190808, 0x2b08190808191919, 0x2b08190819080808, + 0x2b081908192b0808, 0x2b08191908080808, 0x2b0819191908192b, 0x2b0819192b191908, + 0x2b08192b08082b19, 0x2b08192b19080808, 0x2b08192b192b0808, 0x2b082b080808082b, + 0x2b082b1908081908, 0x2b082b2b08190819, 0x2b19080808081908, 0x2b19080808190808, + 0x2b190808082b1908, 0x2b19080819080808, 0x2b1908082b2b0819, 0x2b1908190819192b, + 0x2b1908192b080808, 0x2b19082b19081919, 0x2b19190808080808, 0x2b191908082b082b, + 0x2b19190819081908, 0x2b19191919190819, 0x2b192b082b080819, 0x2b192b19082b0808, + 0x2b2b08080808082b, 0x2b2b080819190808, 0x2b2b08082b081919, 0x2b2b081908082b19, + 0x2b2b082b08080808, 0x2b2b190808192b08, 0x2b2b2b0819190808, 0x2b2b2b1908081908, +GGML_TABLE_END() + +GGML_TABLE_BEGIN(uint64_t, iq2xs_grid, 512) + 0x0808080808080808, 0x080808080808082b, 0x0808080808081919, 0x0808080808082b08, + 0x0808080808082b2b, 0x0808080808190819, 0x0808080808191908, 0x080808080819192b, + 0x0808080808192b19, 0x08080808082b0808, 0x08080808082b082b, 0x08080808082b1919, + 0x08080808082b2b08, 0x0808080819080819, 0x0808080819081908, 0x080808081908192b, + 0x0808080819082b19, 0x0808080819190808, 0x080808081919082b, 0x0808080819191919, + 0x0808080819192b08, 0x08080808192b0819, 0x08080808192b1908, 0x080808082b080808, + 0x080808082b08082b, 0x080808082b081919, 0x080808082b082b08, 0x080808082b190819, + 0x080808082b191908, 0x080808082b192b19, 0x080808082b2b0808, 0x0808081908080819, + 0x0808081908081908, 0x080808190808192b, 0x0808081908082b19, 0x0808081908190808, + 0x080808190819082b, 0x0808081908191919, 0x0808081908192b08, 0x0808081908192b2b, + 0x08080819082b0819, 0x08080819082b1908, 0x0808081919080808, 0x080808191908082b, + 0x0808081919081919, 0x0808081919082b08, 0x0808081919190819, 0x0808081919191908, + 0x08080819192b0808, 0x08080819192b2b08, 0x080808192b080819, 0x080808192b081908, + 0x080808192b190808, 0x0808082b08080808, 0x0808082b0808082b, 0x0808082b08081919, + 0x0808082b08082b08, 0x0808082b08190819, 0x0808082b08191908, 0x0808082b082b0808, + 0x0808082b19080819, 0x0808082b19081908, 0x0808082b19190808, 0x0808082b19191919, + 0x0808082b2b080808, 0x0808082b2b082b2b, 0x0808190808080819, 0x0808190808081908, + 0x080819080808192b, 0x0808190808082b19, 0x0808190808190808, 0x080819080819082b, + 0x0808190808191919, 0x0808190808192b08, 0x08081908082b0819, 0x08081908082b1908, + 0x0808190819080808, 0x080819081908082b, 0x0808190819081919, 0x0808190819082b08, + 0x0808190819190819, 0x0808190819191908, 0x080819081919192b, 0x08081908192b0808, + 0x080819082b080819, 0x080819082b081908, 0x080819082b190808, 0x0808191908080808, + 0x080819190808082b, 0x0808191908081919, 0x0808191908082b08, 0x0808191908190819, + 0x0808191908191908, 0x08081919082b0808, 0x0808191919080819, 0x0808191919081908, + 0x0808191919190808, 0x08081919192b0819, 0x080819192b080808, 0x0808192b08080819, + 0x0808192b08081908, 0x0808192b08190808, 0x0808192b082b192b, 0x0808192b19080808, + 0x0808192b1908082b, 0x0808192b2b081908, 0x08082b0808080808, 0x08082b080808082b, + 0x08082b0808081919, 0x08082b0808082b08, 0x08082b0808082b2b, 0x08082b0808190819, + 0x08082b0808191908, 0x08082b08082b0808, 0x08082b08082b1919, 0x08082b0819080819, + 0x08082b0819081908, 0x08082b0819190808, 0x08082b0819192b08, 0x08082b082b080808, + 0x08082b082b2b0808, 0x08082b082b2b2b2b, 0x08082b1908080819, 0x08082b1908081908, + 0x08082b1908190808, 0x08082b1919080808, 0x08082b192b080819, 0x08082b192b082b19, + 0x08082b2b08080808, 0x08082b2b082b0808, 0x08082b2b082b2b08, 0x08082b2b2b19192b, + 0x08082b2b2b2b0808, 0x0819080808080819, 0x0819080808081908, 0x081908080808192b, + 0x0819080808082b19, 0x0819080808190808, 0x081908080819082b, 0x0819080808191919, + 0x0819080808192b08, 0x08190808082b0819, 0x08190808082b1908, 0x0819080819080808, + 0x081908081908082b, 0x0819080819081919, 0x0819080819082b08, 0x0819080819190819, + 0x0819080819191908, 0x08190808192b0808, 0x08190808192b2b2b, 0x081908082b080819, + 0x081908082b081908, 0x081908082b190808, 0x0819081908080808, 0x081908190808082b, + 0x0819081908081919, 0x0819081908082b08, 0x0819081908190819, 0x0819081908191908, + 0x08190819082b0808, 0x0819081919080819, 0x0819081919081908, 0x0819081919190808, + 0x081908192b080808, 0x081908192b191908, 0x081908192b19192b, 0x0819082b08080819, + 0x0819082b08081908, 0x0819082b0808192b, 0x0819082b08190808, 0x0819082b19080808, + 0x0819082b192b0808, 0x0819190808080808, 0x081919080808082b, 0x0819190808081919, + 0x0819190808082b08, 0x0819190808190819, 0x0819190808191908, 0x08191908082b0808, + 0x0819190819080819, 0x0819190819081908, 0x0819190819082b19, 0x0819190819190808, + 0x08191908192b1908, 0x081919082b080808, 0x0819191908080819, 0x0819191908081908, + 0x0819191908190808, 0x0819191919080808, 0x0819192b08080808, 0x0819192b08191908, + 0x0819192b19082b19, 0x08192b0808080819, 0x08192b0808081908, 0x08192b0808190808, + 0x08192b080819082b, 0x08192b0819080808, 0x08192b0819191908, 0x08192b082b08192b, + 0x08192b1908080808, 0x08192b1908081919, 0x08192b19192b192b, 0x08192b2b19190819, + 0x08192b2b2b2b2b19, 0x082b080808080808, 0x082b08080808082b, 0x082b080808081919, + 0x082b080808082b08, 0x082b080808082b2b, 0x082b080808190819, 0x082b080808191908, + 0x082b0808082b0808, 0x082b080819080819, 0x082b080819081908, 0x082b080819190808, + 0x082b08082b080808, 0x082b08082b2b0808, 0x082b081908080819, 0x082b081908081908, + 0x082b081908190808, 0x082b081919080808, 0x082b081919082b08, 0x082b0819192b1919, + 0x082b082b08080808, 0x082b082b082b082b, 0x082b082b2b080808, 0x082b082b2b2b2b08, + 0x082b190808080819, 0x082b190808081908, 0x082b190808190808, 0x082b1908082b2b19, + 0x082b190819080808, 0x082b191908080808, 0x082b191919080819, 0x082b19191919082b, + 0x082b19192b192b19, 0x082b192b08080819, 0x082b192b08192b2b, 0x082b192b2b2b192b, + 0x082b2b0808080808, 0x082b2b0808082b08, 0x082b2b0808082b2b, 0x082b2b08082b0808, + 0x082b2b0819191919, 0x082b2b082b082b08, 0x082b2b082b2b082b, 0x082b2b19192b2b08, + 0x082b2b192b190808, 0x082b2b2b08082b08, 0x082b2b2b082b0808, 0x082b2b2b2b08082b, + 0x082b2b2b2b082b08, 0x082b2b2b2b082b2b, 0x1908080808080819, 0x1908080808081908, + 0x190808080808192b, 0x1908080808082b19, 0x1908080808190808, 0x190808080819082b, + 0x1908080808191919, 0x1908080808192b08, 0x19080808082b0819, 0x19080808082b1908, + 0x1908080819080808, 0x190808081908082b, 0x1908080819081919, 0x1908080819082b08, + 0x1908080819082b2b, 0x1908080819190819, 0x1908080819191908, 0x19080808192b0808, + 0x19080808192b1919, 0x190808082b080819, 0x190808082b081908, 0x190808082b190808, + 0x1908081908080808, 0x190808190808082b, 0x1908081908081919, 0x1908081908082b08, + 0x1908081908190819, 0x1908081908191908, 0x19080819082b0808, 0x1908081919080819, + 0x1908081919081908, 0x1908081919190808, 0x190808192b080808, 0x190808192b081919, + 0x190808192b2b082b, 0x1908082b08080819, 0x1908082b08081908, 0x1908082b08190808, + 0x1908082b0819082b, 0x1908082b082b2b19, 0x1908082b19080808, 0x1908190808080808, + 0x190819080808082b, 0x1908190808081919, 0x1908190808082b08, 0x1908190808190819, + 0x1908190808191908, 0x1908190808192b19, 0x19081908082b0808, 0x1908190819080819, + 0x1908190819081908, 0x1908190819190808, 0x190819082b080808, 0x190819082b191908, + 0x1908191908080819, 0x1908191908081908, 0x1908191908190808, 0x19081919082b1908, + 0x1908191919080808, 0x190819192b192b2b, 0x1908192b08080808, 0x1908192b08082b2b, + 0x1908192b19081908, 0x1908192b19190808, 0x19082b0808080819, 0x19082b0808081908, + 0x19082b0808190808, 0x19082b0819080808, 0x19082b0819081919, 0x19082b0819191908, + 0x19082b08192b082b, 0x19082b1908080808, 0x19082b1908190819, 0x19082b1919081908, + 0x19082b1919190808, 0x19082b19192b2b19, 0x19082b2b08081908, 0x1919080808080808, + 0x191908080808082b, 0x1919080808081919, 0x1919080808082b08, 0x1919080808190819, + 0x1919080808191908, 0x19190808082b0808, 0x19190808082b2b08, 0x1919080819080819, + 0x1919080819081908, 0x1919080819190808, 0x191908082b080808, 0x1919081908080819, + 0x1919081908081908, 0x1919081908190808, 0x1919081908191919, 0x1919081919080808, + 0x191908191908082b, 0x1919082b08080808, 0x1919082b19081908, 0x1919082b2b2b2b2b, + 0x1919190808080819, 0x1919190808081908, 0x1919190808190808, 0x19191908082b0819, + 0x1919190819080808, 0x19191908192b0808, 0x191919082b080819, 0x191919082b2b0819, + 0x1919191908080808, 0x1919191908082b08, 0x191919192b080808, 0x191919192b082b08, + 0x1919192b082b0819, 0x1919192b192b2b08, 0x1919192b2b2b0819, 0x19192b0808080808, + 0x19192b0808191908, 0x19192b0819080819, 0x19192b0819190808, 0x19192b082b192b19, + 0x19192b1908192b2b, 0x19192b1919080808, 0x19192b191908082b, 0x19192b2b2b081919, + 0x192b080808080819, 0x192b080808081908, 0x192b080808190808, 0x192b080819080808, + 0x192b080819191908, 0x192b0808192b082b, 0x192b08082b08192b, 0x192b08082b2b2b19, + 0x192b081908080808, 0x192b082b082b1908, 0x192b082b19082b2b, 0x192b082b2b19082b, + 0x192b190808080808, 0x192b19080819192b, 0x192b191908190808, 0x192b191919080808, + 0x192b191919081919, 0x192b19192b2b1908, 0x192b2b0808080819, 0x192b2b08192b2b2b, + 0x192b2b19082b1919, 0x192b2b2b0808192b, 0x192b2b2b19191908, 0x192b2b2b192b082b, + 0x2b08080808080808, 0x2b0808080808082b, 0x2b08080808081919, 0x2b08080808082b08, + 0x2b08080808190819, 0x2b08080808191908, 0x2b080808082b0808, 0x2b080808082b2b2b, + 0x2b08080819080819, 0x2b08080819081908, 0x2b08080819190808, 0x2b0808082b080808, + 0x2b0808082b08082b, 0x2b0808082b2b2b08, 0x2b0808082b2b2b2b, 0x2b08081908080819, + 0x2b08081908081908, 0x2b0808190808192b, 0x2b08081908190808, 0x2b08081919080808, + 0x2b08081919190819, 0x2b08081919192b19, 0x2b08082b08080808, 0x2b08082b082b0808, + 0x2b08082b2b080808, 0x2b08082b2b08082b, 0x2b08082b2b2b0808, 0x2b08082b2b2b2b08, + 0x2b08190808080819, 0x2b08190808081908, 0x2b08190808190808, 0x2b0819080819082b, + 0x2b08190808191919, 0x2b08190819080808, 0x2b081908192b0808, 0x2b0819082b082b19, + 0x2b08191908080808, 0x2b08191919081908, 0x2b0819192b2b1919, 0x2b08192b08192b08, + 0x2b08192b192b2b2b, 0x2b082b0808080808, 0x2b082b0808082b08, 0x2b082b08082b1919, + 0x2b082b0819192b2b, 0x2b082b082b080808, 0x2b082b082b08082b, 0x2b082b082b2b2b08, + 0x2b082b190808192b, 0x2b082b2b082b082b, 0x2b082b2b2b080808, 0x2b082b2b2b082b08, + 0x2b082b2b2b19192b, 0x2b082b2b2b2b2b08, 0x2b19080808080819, 0x2b19080808081908, + 0x2b19080808190808, 0x2b19080819080808, 0x2b1908081919192b, 0x2b1908082b081908, + 0x2b19081908080808, 0x2b190819082b082b, 0x2b190819192b1908, 0x2b19082b1919192b, + 0x2b19082b2b082b19, 0x2b19190808080808, 0x2b19190808081919, 0x2b19190819081908, + 0x2b19190819190808, 0x2b19190819192b08, 0x2b191919082b2b19, 0x2b1919192b190808, + 0x2b1919192b19082b, 0x2b19192b19080819, 0x2b192b0819190819, 0x2b192b082b2b192b, + 0x2b192b1919082b19, 0x2b192b2b08191919, 0x2b192b2b192b0808, 0x2b2b080808080808, + 0x2b2b08080808082b, 0x2b2b080808082b08, 0x2b2b080808082b2b, 0x2b2b0808082b0808, + 0x2b2b0808082b2b2b, 0x2b2b08082b2b0808, 0x2b2b081919190819, 0x2b2b081919192b19, + 0x2b2b08192b2b192b, 0x2b2b082b08080808, 0x2b2b082b0808082b, 0x2b2b082b08082b08, + 0x2b2b082b082b2b2b, 0x2b2b082b2b080808, 0x2b2b082b2b2b0808, 0x2b2b190819080808, + 0x2b2b19082b191919, 0x2b2b192b192b1919, 0x2b2b192b2b192b08, 0x2b2b2b0808082b2b, + 0x2b2b2b08082b0808, 0x2b2b2b08082b082b, 0x2b2b2b08082b2b08, 0x2b2b2b082b2b0808, + 0x2b2b2b082b2b2b08, 0x2b2b2b1908081908, 0x2b2b2b192b081908, 0x2b2b2b192b08192b, + 0x2b2b2b2b082b2b08, 0x2b2b2b2b082b2b2b, 0x2b2b2b2b2b190819, 0x2b2b2b2b2b2b2b2b, +GGML_TABLE_END() + +GGML_TABLE_BEGIN(uint64_t, iq2s_grid, 1024) + 0x0808080808080808, 0x080808080808082b, 0x0808080808081919, 0x0808080808082b08, + 0x0808080808082b2b, 0x0808080808190819, 0x0808080808191908, 0x080808080819192b, + 0x0808080808192b19, 0x08080808082b0808, 0x08080808082b082b, 0x08080808082b1919, + 0x08080808082b2b08, 0x0808080819080819, 0x0808080819081908, 0x080808081908192b, + 0x0808080819082b19, 0x0808080819190808, 0x080808081919082b, 0x0808080819191919, + 0x0808080819192b08, 0x08080808192b0819, 0x08080808192b1908, 0x08080808192b192b, + 0x08080808192b2b19, 0x080808082b080808, 0x080808082b08082b, 0x080808082b081919, + 0x080808082b082b08, 0x080808082b190819, 0x080808082b191908, 0x080808082b2b0808, + 0x080808082b2b1919, 0x080808082b2b2b2b, 0x0808081908080819, 0x0808081908081908, + 0x080808190808192b, 0x0808081908082b19, 0x0808081908190808, 0x080808190819082b, + 0x0808081908191919, 0x0808081908192b08, 0x08080819082b0819, 0x08080819082b1908, + 0x0808081919080808, 0x080808191908082b, 0x0808081919081919, 0x0808081919082b08, + 0x0808081919190819, 0x0808081919191908, 0x080808191919192b, 0x0808081919192b19, + 0x08080819192b0808, 0x08080819192b1919, 0x08080819192b2b08, 0x080808192b080819, + 0x080808192b081908, 0x080808192b190808, 0x080808192b19082b, 0x080808192b191919, + 0x080808192b2b0819, 0x080808192b2b1908, 0x0808082b08080808, 0x0808082b0808082b, + 0x0808082b08081919, 0x0808082b08082b08, 0x0808082b08190819, 0x0808082b08191908, + 0x0808082b082b0808, 0x0808082b082b2b2b, 0x0808082b19080819, 0x0808082b19081908, + 0x0808082b1908192b, 0x0808082b19082b19, 0x0808082b19190808, 0x0808082b19191919, + 0x0808082b2b080808, 0x0808082b2b081919, 0x0808082b2b082b2b, 0x0808082b2b191908, + 0x0808082b2b2b082b, 0x0808190808080819, 0x0808190808081908, 0x080819080808192b, + 0x0808190808082b19, 0x0808190808190808, 0x080819080819082b, 0x0808190808191919, + 0x0808190808192b08, 0x08081908082b0819, 0x08081908082b1908, 0x08081908082b192b, + 0x08081908082b2b19, 0x0808190819080808, 0x080819081908082b, 0x0808190819081919, + 0x0808190819082b08, 0x0808190819082b2b, 0x0808190819190819, 0x0808190819191908, + 0x080819081919192b, 0x0808190819192b19, 0x08081908192b0808, 0x08081908192b082b, + 0x08081908192b1919, 0x080819082b080819, 0x080819082b081908, 0x080819082b08192b, + 0x080819082b082b19, 0x080819082b190808, 0x080819082b191919, 0x080819082b192b08, + 0x080819082b2b0819, 0x080819082b2b1908, 0x0808191908080808, 0x080819190808082b, + 0x0808191908081919, 0x0808191908082b08, 0x0808191908082b2b, 0x0808191908190819, + 0x0808191908191908, 0x080819190819192b, 0x0808191908192b19, 0x08081919082b0808, + 0x08081919082b1919, 0x08081919082b2b08, 0x0808191919080819, 0x0808191919081908, + 0x080819191908192b, 0x0808191919082b19, 0x0808191919190808, 0x080819191919082b, + 0x0808191919191919, 0x0808191919192b08, 0x08081919192b0819, 0x08081919192b1908, + 0x080819192b080808, 0x080819192b08082b, 0x080819192b081919, 0x080819192b082b08, + 0x080819192b190819, 0x080819192b191908, 0x080819192b2b0808, 0x0808192b08080819, + 0x0808192b08081908, 0x0808192b0808192b, 0x0808192b08082b19, 0x0808192b08190808, + 0x0808192b08191919, 0x0808192b19080808, 0x0808192b19081919, 0x0808192b19082b08, + 0x0808192b19190819, 0x0808192b19191908, 0x0808192b192b0808, 0x0808192b2b080819, + 0x0808192b2b081908, 0x0808192b2b190808, 0x08082b0808080808, 0x08082b080808082b, + 0x08082b0808081919, 0x08082b0808082b08, 0x08082b0808190819, 0x08082b0808191908, + 0x08082b080819192b, 0x08082b0808192b19, 0x08082b08082b0808, 0x08082b08082b1919, + 0x08082b08082b2b2b, 0x08082b0819080819, 0x08082b0819081908, 0x08082b081908192b, + 0x08082b0819082b19, 0x08082b0819190808, 0x08082b081919082b, 0x08082b0819191919, + 0x08082b0819192b08, 0x08082b08192b0819, 0x08082b08192b1908, 0x08082b082b080808, + 0x08082b082b081919, 0x08082b082b191908, 0x08082b082b2b2b2b, 0x08082b1908080819, + 0x08082b1908081908, 0x08082b1908190808, 0x08082b190819082b, 0x08082b1908191919, + 0x08082b1908192b08, 0x08082b19082b0819, 0x08082b1919080808, 0x08082b1919081919, + 0x08082b1919082b08, 0x08082b1919190819, 0x08082b1919191908, 0x08082b19192b0808, + 0x08082b192b080819, 0x08082b192b190808, 0x08082b2b08080808, 0x08082b2b08190819, + 0x08082b2b08191908, 0x08082b2b082b082b, 0x08082b2b082b2b08, 0x08082b2b082b2b2b, + 0x08082b2b19190808, 0x08082b2b2b192b19, 0x0819080808080819, 0x0819080808081908, + 0x081908080808192b, 0x0819080808082b19, 0x0819080808190808, 0x081908080819082b, + 0x0819080808191919, 0x0819080808192b08, 0x08190808082b0819, 0x08190808082b1908, + 0x08190808082b192b, 0x0819080819080808, 0x081908081908082b, 0x0819080819081919, + 0x0819080819082b08, 0x0819080819190819, 0x0819080819191908, 0x081908081919192b, + 0x0819080819192b19, 0x08190808192b0808, 0x08190808192b082b, 0x08190808192b1919, + 0x08190808192b2b08, 0x081908082b080819, 0x081908082b081908, 0x081908082b08192b, + 0x081908082b190808, 0x081908082b191919, 0x081908082b192b08, 0x081908082b2b0819, + 0x081908082b2b1908, 0x0819081908080808, 0x081908190808082b, 0x0819081908081919, + 0x0819081908082b08, 0x0819081908082b2b, 0x0819081908190819, 0x0819081908191908, + 0x081908190819192b, 0x0819081908192b19, 0x08190819082b0808, 0x08190819082b082b, + 0x08190819082b1919, 0x08190819082b2b08, 0x0819081919080819, 0x0819081919081908, + 0x081908191908192b, 0x0819081919082b19, 0x0819081919190808, 0x081908191919082b, + 0x0819081919191919, 0x0819081919192b08, 0x08190819192b0819, 0x08190819192b1908, + 0x081908192b080808, 0x081908192b08082b, 0x081908192b081919, 0x081908192b082b08, + 0x081908192b190819, 0x081908192b191908, 0x0819082b08080819, 0x0819082b08081908, + 0x0819082b08082b19, 0x0819082b08190808, 0x0819082b08191919, 0x0819082b082b0819, + 0x0819082b082b1908, 0x0819082b19080808, 0x0819082b19081919, 0x0819082b19190819, + 0x0819082b19191908, 0x0819082b2b080819, 0x0819082b2b081908, 0x0819082b2b190808, + 0x0819190808080808, 0x081919080808082b, 0x0819190808081919, 0x0819190808082b08, + 0x0819190808190819, 0x0819190808191908, 0x081919080819192b, 0x0819190808192b19, + 0x08191908082b0808, 0x08191908082b1919, 0x08191908082b2b08, 0x0819190819080819, + 0x0819190819081908, 0x081919081908192b, 0x0819190819082b19, 0x0819190819190808, + 0x081919081919082b, 0x0819190819191919, 0x0819190819192b08, 0x08191908192b0819, + 0x08191908192b1908, 0x081919082b080808, 0x081919082b08082b, 0x081919082b081919, + 0x081919082b082b08, 0x081919082b190819, 0x081919082b191908, 0x081919082b2b0808, + 0x0819191908080819, 0x0819191908081908, 0x081919190808192b, 0x0819191908082b19, + 0x0819191908190808, 0x081919190819082b, 0x0819191908191919, 0x0819191908192b08, + 0x08191919082b0819, 0x08191919082b1908, 0x0819191919080808, 0x081919191908082b, + 0x0819191919081919, 0x0819191919082b08, 0x0819191919190819, 0x0819191919191908, + 0x08191919192b0808, 0x081919192b080819, 0x081919192b081908, 0x081919192b190808, + 0x0819192b08080808, 0x0819192b08081919, 0x0819192b08082b08, 0x0819192b08190819, + 0x0819192b08191908, 0x0819192b082b0808, 0x0819192b19080819, 0x0819192b19081908, + 0x0819192b19190808, 0x0819192b2b080808, 0x0819192b2b2b2b2b, 0x08192b0808080819, + 0x08192b0808081908, 0x08192b080808192b, 0x08192b0808082b19, 0x08192b0808190808, + 0x08192b0808191919, 0x08192b0808192b08, 0x08192b08082b0819, 0x08192b0819080808, + 0x08192b081908082b, 0x08192b0819081919, 0x08192b0819082b08, 0x08192b0819190819, + 0x08192b0819191908, 0x08192b08192b0808, 0x08192b082b080819, 0x08192b082b081908, + 0x08192b1908080808, 0x08192b190808082b, 0x08192b1908081919, 0x08192b1908082b08, + 0x08192b1908190819, 0x08192b1908191908, 0x08192b19082b0808, 0x08192b1919080819, + 0x08192b1919081908, 0x08192b1919190808, 0x08192b19192b2b19, 0x08192b192b2b082b, + 0x08192b2b08081908, 0x08192b2b08190808, 0x08192b2b19080808, 0x08192b2b1919192b, + 0x082b080808080808, 0x082b08080808082b, 0x082b080808081919, 0x082b080808082b08, + 0x082b080808190819, 0x082b080808191908, 0x082b08080819192b, 0x082b080808192b19, + 0x082b0808082b0808, 0x082b0808082b1919, 0x082b0808082b2b2b, 0x082b080819080819, + 0x082b080819081908, 0x082b080819190808, 0x082b08081919082b, 0x082b080819191919, + 0x082b0808192b1908, 0x082b08082b080808, 0x082b08082b082b2b, 0x082b08082b191908, + 0x082b08082b2b2b2b, 0x082b081908080819, 0x082b081908081908, 0x082b081908190808, + 0x082b08190819082b, 0x082b081908191919, 0x082b0819082b0819, 0x082b081919080808, + 0x082b08191908082b, 0x082b081919081919, 0x082b081919190819, 0x082b081919191908, + 0x082b0819192b0808, 0x082b08192b080819, 0x082b08192b081908, 0x082b08192b190808, + 0x082b082b08080808, 0x082b082b08082b2b, 0x082b082b082b082b, 0x082b082b082b2b08, + 0x082b082b082b2b2b, 0x082b082b19081908, 0x082b082b19190808, 0x082b082b2b082b08, + 0x082b082b2b082b2b, 0x082b082b2b2b2b08, 0x082b190808080819, 0x082b190808081908, + 0x082b19080808192b, 0x082b190808082b19, 0x082b190808190808, 0x082b190808191919, + 0x082b190808192b08, 0x082b1908082b0819, 0x082b1908082b1908, 0x082b190819080808, + 0x082b19081908082b, 0x082b190819081919, 0x082b190819082b08, 0x082b190819190819, + 0x082b190819191908, 0x082b1908192b0808, 0x082b19082b080819, 0x082b19082b081908, + 0x082b19082b190808, 0x082b191908080808, 0x082b191908081919, 0x082b191908082b08, + 0x082b191908190819, 0x082b191908191908, 0x082b1919082b0808, 0x082b191919080819, + 0x082b191919081908, 0x082b191919190808, 0x082b1919192b192b, 0x082b19192b080808, + 0x082b192b08080819, 0x082b192b08081908, 0x082b192b08190808, 0x082b192b19080808, + 0x082b192b19192b19, 0x082b2b0808080808, 0x082b2b0808081919, 0x082b2b0808190819, + 0x082b2b0808191908, 0x082b2b0819080819, 0x082b2b0819081908, 0x082b2b0819190808, + 0x082b2b082b082b2b, 0x082b2b082b2b2b2b, 0x082b2b1908080819, 0x082b2b1908081908, + 0x082b2b1908190808, 0x082b2b192b191919, 0x082b2b2b08082b2b, 0x082b2b2b082b082b, + 0x082b2b2b192b1908, 0x082b2b2b2b082b08, 0x082b2b2b2b082b2b, 0x1908080808080819, + 0x1908080808081908, 0x190808080808192b, 0x1908080808082b19, 0x1908080808190808, + 0x190808080819082b, 0x1908080808191919, 0x1908080808192b08, 0x1908080808192b2b, + 0x19080808082b0819, 0x19080808082b1908, 0x19080808082b192b, 0x1908080819080808, + 0x190808081908082b, 0x1908080819081919, 0x1908080819082b08, 0x1908080819082b2b, + 0x1908080819190819, 0x1908080819191908, 0x190808081919192b, 0x1908080819192b19, + 0x19080808192b0808, 0x19080808192b082b, 0x19080808192b1919, 0x190808082b080819, + 0x190808082b081908, 0x190808082b190808, 0x190808082b191919, 0x190808082b192b08, + 0x190808082b2b0819, 0x190808082b2b1908, 0x1908081908080808, 0x190808190808082b, + 0x1908081908081919, 0x1908081908082b08, 0x1908081908190819, 0x1908081908191908, + 0x190808190819192b, 0x1908081908192b19, 0x19080819082b0808, 0x19080819082b082b, + 0x19080819082b1919, 0x1908081919080819, 0x1908081919081908, 0x190808191908192b, + 0x1908081919082b19, 0x1908081919190808, 0x190808191919082b, 0x1908081919191919, + 0x1908081919192b08, 0x19080819192b0819, 0x19080819192b1908, 0x190808192b080808, + 0x190808192b08082b, 0x190808192b081919, 0x190808192b082b08, 0x190808192b190819, + 0x190808192b191908, 0x190808192b2b0808, 0x1908082b08080819, 0x1908082b08081908, + 0x1908082b08190808, 0x1908082b0819082b, 0x1908082b08191919, 0x1908082b08192b08, + 0x1908082b082b1908, 0x1908082b19080808, 0x1908082b19081919, 0x1908082b19082b08, + 0x1908082b19190819, 0x1908082b19191908, 0x1908082b192b0808, 0x1908082b2b080819, + 0x1908082b2b081908, 0x1908190808080808, 0x190819080808082b, 0x1908190808081919, + 0x1908190808082b08, 0x1908190808082b2b, 0x1908190808190819, 0x1908190808191908, + 0x190819080819192b, 0x1908190808192b19, 0x19081908082b0808, 0x19081908082b082b, + 0x19081908082b1919, 0x19081908082b2b08, 0x1908190819080819, 0x1908190819081908, + 0x190819081908192b, 0x1908190819082b19, 0x1908190819190808, 0x190819081919082b, + 0x1908190819191919, 0x1908190819192b08, 0x19081908192b0819, 0x19081908192b1908, + 0x190819082b080808, 0x190819082b08082b, 0x190819082b081919, 0x190819082b082b08, + 0x190819082b190819, 0x190819082b191908, 0x190819082b2b0808, 0x1908191908080819, + 0x1908191908081908, 0x190819190808192b, 0x1908191908082b19, 0x1908191908190808, + 0x190819190819082b, 0x1908191908191919, 0x1908191908192b08, 0x19081919082b0819, + 0x19081919082b1908, 0x1908191919080808, 0x190819191908082b, 0x1908191919081919, + 0x1908191919082b08, 0x1908191919190819, 0x1908191919191908, 0x19081919192b0808, + 0x19081919192b2b2b, 0x190819192b080819, 0x190819192b081908, 0x190819192b190808, + 0x1908192b08080808, 0x1908192b0808082b, 0x1908192b08081919, 0x1908192b08082b08, + 0x1908192b08190819, 0x1908192b08191908, 0x1908192b082b0808, 0x1908192b19080819, + 0x1908192b19081908, 0x1908192b19190808, 0x1908192b2b080808, 0x1908192b2b2b1919, + 0x19082b0808080819, 0x19082b0808081908, 0x19082b0808082b19, 0x19082b0808190808, + 0x19082b080819082b, 0x19082b0808191919, 0x19082b0808192b08, 0x19082b08082b0819, + 0x19082b08082b1908, 0x19082b0819080808, 0x19082b081908082b, 0x19082b0819081919, + 0x19082b0819082b08, 0x19082b0819190819, 0x19082b0819191908, 0x19082b08192b0808, + 0x19082b082b081908, 0x19082b082b190808, 0x19082b1908080808, 0x19082b190808082b, + 0x19082b1908081919, 0x19082b1908082b08, 0x19082b1908190819, 0x19082b1908191908, + 0x19082b19082b0808, 0x19082b1919080819, 0x19082b1919081908, 0x19082b1919190808, + 0x19082b192b080808, 0x19082b192b19192b, 0x19082b2b08080819, 0x19082b2b08081908, + 0x19082b2b08190808, 0x19082b2b19080808, 0x1919080808080808, 0x191908080808082b, + 0x1919080808081919, 0x1919080808082b08, 0x1919080808190819, 0x1919080808191908, + 0x191908080819192b, 0x1919080808192b19, 0x19190808082b0808, 0x19190808082b082b, + 0x19190808082b1919, 0x19190808082b2b08, 0x1919080819080819, 0x1919080819081908, + 0x191908081908192b, 0x1919080819082b19, 0x1919080819190808, 0x191908081919082b, + 0x1919080819191919, 0x1919080819192b08, 0x19190808192b0819, 0x19190808192b1908, + 0x191908082b080808, 0x191908082b08082b, 0x191908082b081919, 0x191908082b082b08, + 0x191908082b190819, 0x191908082b191908, 0x1919081908080819, 0x1919081908081908, + 0x191908190808192b, 0x1919081908082b19, 0x1919081908190808, 0x191908190819082b, + 0x1919081908191919, 0x1919081908192b08, 0x19190819082b0819, 0x19190819082b1908, + 0x1919081919080808, 0x191908191908082b, 0x1919081919081919, 0x1919081919082b08, + 0x1919081919190819, 0x1919081919191908, 0x19190819192b0808, 0x191908192b080819, + 0x191908192b081908, 0x191908192b190808, 0x1919082b08080808, 0x1919082b08081919, + 0x1919082b08082b08, 0x1919082b08190819, 0x1919082b08191908, 0x1919082b082b0808, + 0x1919082b19080819, 0x1919082b19081908, 0x1919082b19190808, 0x1919082b192b2b19, + 0x1919082b2b080808, 0x1919190808080819, 0x1919190808081908, 0x191919080808192b, + 0x1919190808082b19, 0x1919190808190808, 0x191919080819082b, 0x1919190808191919, + 0x1919190808192b08, 0x19191908082b0819, 0x19191908082b1908, 0x1919190819080808, + 0x191919081908082b, 0x1919190819081919, 0x1919190819082b08, 0x1919190819190819, + 0x1919190819191908, 0x19191908192b0808, 0x191919082b080819, 0x191919082b081908, + 0x191919082b190808, 0x1919191908080808, 0x191919190808082b, 0x1919191908081919, + 0x1919191908082b08, 0x1919191908190819, 0x1919191908191908, 0x19191919082b0808, + 0x1919191919080819, 0x1919191919081908, 0x1919191919190808, 0x191919192b080808, + 0x1919192b08080819, 0x1919192b08081908, 0x1919192b08190808, 0x1919192b082b192b, + 0x1919192b19080808, 0x19192b0808080808, 0x19192b080808082b, 0x19192b0808081919, + 0x19192b0808082b08, 0x19192b0808190819, 0x19192b0808191908, 0x19192b08082b0808, + 0x19192b0819080819, 0x19192b0819081908, 0x19192b0819190808, 0x19192b0819192b2b, + 0x19192b082b080808, 0x19192b1908080819, 0x19192b1908081908, 0x19192b1908190808, + 0x19192b1919080808, 0x19192b2b08080808, 0x19192b2b08192b19, 0x19192b2b2b081919, + 0x19192b2b2b2b2b08, 0x192b080808080819, 0x192b080808081908, 0x192b08080808192b, + 0x192b080808190808, 0x192b08080819082b, 0x192b080808191919, 0x192b080808192b08, + 0x192b0808082b0819, 0x192b0808082b1908, 0x192b080819080808, 0x192b080819081919, + 0x192b080819082b08, 0x192b080819190819, 0x192b080819191908, 0x192b0808192b0808, + 0x192b08082b081908, 0x192b08082b190808, 0x192b081908080808, 0x192b08190808082b, + 0x192b081908081919, 0x192b081908082b08, 0x192b081908190819, 0x192b081908191908, + 0x192b0819082b0808, 0x192b081919080819, 0x192b081919081908, 0x192b081919190808, + 0x192b08192b080808, 0x192b08192b192b19, 0x192b082b08081908, 0x192b082b08190808, + 0x192b082b19080808, 0x192b082b1919192b, 0x192b082b2b2b0819, 0x192b190808080808, + 0x192b190808081919, 0x192b190808082b08, 0x192b190808190819, 0x192b190808191908, + 0x192b1908082b0808, 0x192b190819080819, 0x192b190819081908, 0x192b190819190808, + 0x192b19082b080808, 0x192b191908080819, 0x192b191908081908, 0x192b191908190808, + 0x192b191919080808, 0x192b191919082b2b, 0x192b1919192b2b08, 0x192b19192b19082b, + 0x192b192b08080808, 0x192b192b2b191908, 0x192b2b0808080819, 0x192b2b0808081908, + 0x192b2b0808190808, 0x192b2b08192b1919, 0x192b2b082b192b08, 0x192b2b1908080808, + 0x192b2b19082b2b2b, 0x192b2b2b1908082b, 0x192b2b2b2b2b0819, 0x2b08080808080808, + 0x2b0808080808082b, 0x2b08080808081919, 0x2b08080808082b08, 0x2b08080808190819, + 0x2b08080808191908, 0x2b08080808192b19, 0x2b080808082b0808, 0x2b080808082b1919, + 0x2b08080819080819, 0x2b08080819081908, 0x2b08080819190808, 0x2b0808081919082b, + 0x2b08080819191919, 0x2b08080819192b08, 0x2b080808192b0819, 0x2b0808082b080808, + 0x2b0808082b081919, 0x2b0808082b190819, 0x2b0808082b191908, 0x2b08081908080819, + 0x2b08081908081908, 0x2b08081908082b19, 0x2b08081908190808, 0x2b0808190819082b, + 0x2b08081908191919, 0x2b08081908192b08, 0x2b080819082b0819, 0x2b080819082b1908, + 0x2b08081919080808, 0x2b0808191908082b, 0x2b08081919081919, 0x2b08081919082b08, + 0x2b08081919190819, 0x2b08081919191908, 0x2b0808192b080819, 0x2b0808192b081908, + 0x2b0808192b190808, 0x2b0808192b2b2b19, 0x2b08082b08080808, 0x2b08082b08081919, + 0x2b08082b08082b2b, 0x2b08082b08190819, 0x2b08082b08191908, 0x2b08082b19080819, + 0x2b08082b19081908, 0x2b08082b19190808, 0x2b08190808080819, 0x2b08190808081908, + 0x2b0819080808192b, 0x2b08190808082b19, 0x2b08190808190808, 0x2b0819080819082b, + 0x2b08190808191919, 0x2b08190808192b08, 0x2b081908082b0819, 0x2b08190819080808, + 0x2b0819081908082b, 0x2b08190819081919, 0x2b08190819082b08, 0x2b08190819190819, + 0x2b08190819191908, 0x2b081908192b0808, 0x2b0819082b080819, 0x2b0819082b081908, + 0x2b0819082b190808, 0x2b08191908080808, 0x2b0819190808082b, 0x2b08191908081919, + 0x2b08191908082b08, 0x2b08191908190819, 0x2b08191908191908, 0x2b081919082b0808, + 0x2b08191919080819, 0x2b08191919081908, 0x2b08191919190808, 0x2b0819192b080808, + 0x2b0819192b082b2b, 0x2b08192b08080819, 0x2b08192b08081908, 0x2b08192b08190808, + 0x2b08192b082b2b19, 0x2b08192b19080808, 0x2b082b0808080808, 0x2b082b0808081919, + 0x2b082b0808190819, 0x2b082b0808191908, 0x2b082b0819080819, 0x2b082b0819081908, + 0x2b082b0819190808, 0x2b082b082b2b082b, 0x2b082b1908080819, 0x2b082b1908081908, + 0x2b082b1919080808, 0x2b082b19192b1919, 0x2b082b2b082b082b, 0x2b082b2b19192b08, + 0x2b082b2b19192b2b, 0x2b082b2b2b08082b, 0x2b082b2b2b2b082b, 0x2b19080808080819, + 0x2b19080808081908, 0x2b19080808082b19, 0x2b19080808190808, 0x2b1908080819082b, + 0x2b19080808191919, 0x2b19080808192b08, 0x2b190808082b1908, 0x2b19080819080808, + 0x2b1908081908082b, 0x2b19080819081919, 0x2b19080819082b08, 0x2b19080819190819, + 0x2b19080819191908, 0x2b190808192b0808, 0x2b1908082b080819, 0x2b1908082b081908, + 0x2b1908082b190808, 0x2b19081908080808, 0x2b19081908081919, 0x2b19081908190819, + 0x2b19081908191908, 0x2b19081919080819, 0x2b19081919081908, 0x2b19081919190808, + 0x2b19081919192b2b, 0x2b19082b08080819, 0x2b19082b08081908, 0x2b19082b08190808, + 0x2b19082b19080808, 0x2b19082b2b2b192b, 0x2b19190808080808, 0x2b1919080808082b, + 0x2b19190808081919, 0x2b19190808082b08, 0x2b19190808190819, 0x2b19190808191908, + 0x2b191908082b0808, 0x2b19190819080819, 0x2b19190819081908, 0x2b19190819190808, + 0x2b1919082b080808, 0x2b1919082b19192b, 0x2b19191908080819, 0x2b19191908081908, + 0x2b19191908190808, 0x2b19191919080808, 0x2b1919192b192b08, 0x2b1919192b2b0819, + 0x2b19192b08080808, 0x2b19192b1908192b, 0x2b19192b192b1908, 0x2b192b0808080819, + 0x2b192b0808081908, 0x2b192b0808190808, 0x2b192b08082b192b, 0x2b192b0819080808, + 0x2b192b082b2b2b19, 0x2b192b1908080808, 0x2b192b1919082b19, 0x2b192b191919082b, + 0x2b192b2b2b190808, 0x2b2b080808080808, 0x2b2b080808081919, 0x2b2b080808082b2b, + 0x2b2b080808191908, 0x2b2b0808082b082b, 0x2b2b0808082b2b2b, 0x2b2b080819080819, + 0x2b2b080819081908, 0x2b2b080819190808, 0x2b2b08082b2b082b, 0x2b2b08082b2b2b2b, + 0x2b2b081919080808, 0x2b2b0819192b1919, 0x2b2b082b0808082b, 0x2b2b082b08082b2b, + 0x2b2b082b082b082b, 0x2b2b082b082b2b08, 0x2b2b082b082b2b2b, 0x2b2b082b2b08082b, + 0x2b2b082b2b082b08, 0x2b2b082b2b082b2b, 0x2b2b082b2b2b2b08, 0x2b2b190808080819, + 0x2b2b190808081908, 0x2b2b190808190808, 0x2b2b190819080808, 0x2b2b19082b082b19, + 0x2b2b19082b2b1908, 0x2b2b191908080808, 0x2b2b191908192b19, 0x2b2b192b19190819, + 0x2b2b2b0808082b2b, 0x2b2b2b08082b2b08, 0x2b2b2b082b2b082b, 0x2b2b2b1919191908, + 0x2b2b2b192b08192b, 0x2b2b2b2b08082b08, 0x2b2b2b2b08082b2b, 0x2b2b2b2b082b0808, + 0x2b2b2b2b082b082b, 0x2b2b2b2b082b2b08, 0x2b2b2b2b2b082b08, 0x2b2b2b2b2b2b2b2b, +GGML_TABLE_END() + +GGML_TABLE_BEGIN(uint32_t, iq3xxs_grid, 256) + 0x04040404, 0x04040414, 0x04040424, 0x04040c0c, 0x04040c1c, 0x04040c3e, 0x04041404, 0x04041414, + 0x04041c0c, 0x04042414, 0x04043e1c, 0x04043e2c, 0x040c040c, 0x040c041c, 0x040c0c04, 0x040c0c14, + 0x040c140c, 0x040c142c, 0x040c1c04, 0x040c1c14, 0x040c240c, 0x040c2c24, 0x040c3e04, 0x04140404, + 0x04140414, 0x04140424, 0x04140c0c, 0x04141404, 0x04141414, 0x04141c0c, 0x04141c1c, 0x04141c3e, + 0x04142c0c, 0x04142c3e, 0x04143e2c, 0x041c040c, 0x041c043e, 0x041c0c04, 0x041c0c14, 0x041c142c, + 0x041c3e04, 0x04240c1c, 0x04241c3e, 0x04242424, 0x04242c3e, 0x04243e1c, 0x04243e2c, 0x042c040c, + 0x042c043e, 0x042c1c14, 0x042c2c14, 0x04341c2c, 0x04343424, 0x043e0c04, 0x043e0c24, 0x043e0c34, + 0x043e241c, 0x043e340c, 0x0c04040c, 0x0c04041c, 0x0c040c04, 0x0c040c14, 0x0c04140c, 0x0c04141c, + 0x0c041c04, 0x0c041c14, 0x0c041c24, 0x0c04243e, 0x0c042c04, 0x0c0c0404, 0x0c0c0414, 0x0c0c0c0c, + 0x0c0c1404, 0x0c0c1414, 0x0c14040c, 0x0c14041c, 0x0c140c04, 0x0c140c14, 0x0c14140c, 0x0c141c04, + 0x0c143e14, 0x0c1c0404, 0x0c1c0414, 0x0c1c1404, 0x0c1c1c0c, 0x0c1c2434, 0x0c1c3434, 0x0c24040c, + 0x0c24042c, 0x0c242c04, 0x0c2c1404, 0x0c2c1424, 0x0c2c2434, 0x0c2c3e0c, 0x0c34042c, 0x0c3e1414, + 0x0c3e2404, 0x14040404, 0x14040414, 0x14040c0c, 0x14040c1c, 0x14041404, 0x14041414, 0x14041434, + 0x14041c0c, 0x14042414, 0x140c040c, 0x140c041c, 0x140c042c, 0x140c0c04, 0x140c0c14, 0x140c140c, + 0x140c1c04, 0x140c341c, 0x140c343e, 0x140c3e04, 0x14140404, 0x14140414, 0x14140c0c, 0x14140c3e, + 0x14141404, 0x14141414, 0x14141c3e, 0x14142404, 0x14142c2c, 0x141c040c, 0x141c0c04, 0x141c0c24, + 0x141c3e04, 0x141c3e24, 0x14241c2c, 0x14242c1c, 0x142c041c, 0x142c143e, 0x142c240c, 0x142c3e24, + 0x143e040c, 0x143e041c, 0x143e0c34, 0x143e242c, 0x1c04040c, 0x1c040c04, 0x1c040c14, 0x1c04140c, + 0x1c04141c, 0x1c042c04, 0x1c04342c, 0x1c043e14, 0x1c0c0404, 0x1c0c0414, 0x1c0c1404, 0x1c0c1c0c, + 0x1c0c2424, 0x1c0c2434, 0x1c14040c, 0x1c14041c, 0x1c140c04, 0x1c14142c, 0x1c142c14, 0x1c143e14, + 0x1c1c0c0c, 0x1c1c1c1c, 0x1c241c04, 0x1c24243e, 0x1c243e14, 0x1c2c0404, 0x1c2c0434, 0x1c2c1414, + 0x1c2c2c2c, 0x1c340c24, 0x1c341c34, 0x1c34341c, 0x1c3e1c1c, 0x1c3e3404, 0x24040424, 0x24040c3e, + 0x24041c2c, 0x24041c3e, 0x24042c1c, 0x24042c3e, 0x240c3e24, 0x24141404, 0x24141c3e, 0x24142404, + 0x24143404, 0x24143434, 0x241c043e, 0x241c242c, 0x24240424, 0x24242c0c, 0x24243424, 0x242c142c, + 0x242c241c, 0x242c3e04, 0x243e042c, 0x243e0c04, 0x243e0c14, 0x243e1c04, 0x2c040c14, 0x2c04240c, + 0x2c043e04, 0x2c0c0404, 0x2c0c0434, 0x2c0c1434, 0x2c0c2c2c, 0x2c140c24, 0x2c141c14, 0x2c143e14, + 0x2c1c0414, 0x2c1c2c1c, 0x2c240c04, 0x2c24141c, 0x2c24143e, 0x2c243e14, 0x2c2c0414, 0x2c2c1c0c, + 0x2c342c04, 0x2c3e1424, 0x2c3e2414, 0x34041424, 0x34042424, 0x34042434, 0x34043424, 0x340c140c, + 0x340c340c, 0x34140c3e, 0x34143424, 0x341c1c04, 0x341c1c34, 0x34242424, 0x342c042c, 0x342c2c14, + 0x34341c1c, 0x343e041c, 0x343e140c, 0x3e04041c, 0x3e04042c, 0x3e04043e, 0x3e040c04, 0x3e041c14, + 0x3e042c14, 0x3e0c1434, 0x3e0c2404, 0x3e140c14, 0x3e14242c, 0x3e142c14, 0x3e1c0404, 0x3e1c0c2c, + 0x3e1c1c1c, 0x3e1c3404, 0x3e24140c, 0x3e24240c, 0x3e2c0404, 0x3e2c0414, 0x3e2c1424, 0x3e341c04, +GGML_TABLE_END() + +GGML_TABLE_BEGIN(uint32_t, iq3s_grid, 512) + 0x01010101, 0x01010103, 0x01010105, 0x0101010b, 0x0101010f, 0x01010301, 0x01010303, 0x01010305, + 0x01010309, 0x0101030d, 0x01010501, 0x01010503, 0x0101050b, 0x01010707, 0x01010901, 0x01010905, + 0x0101090b, 0x0101090f, 0x01010b03, 0x01010b07, 0x01010d01, 0x01010d05, 0x01010f03, 0x01010f09, + 0x01010f0f, 0x01030101, 0x01030103, 0x01030105, 0x01030109, 0x01030301, 0x01030303, 0x0103030b, + 0x01030501, 0x01030507, 0x0103050f, 0x01030703, 0x0103070b, 0x01030909, 0x01030d03, 0x01030d0b, + 0x01030f05, 0x01050101, 0x01050103, 0x0105010b, 0x0105010f, 0x01050301, 0x01050307, 0x0105030d, + 0x01050503, 0x0105050b, 0x01050701, 0x01050709, 0x01050905, 0x0105090b, 0x0105090f, 0x01050b03, + 0x01050b07, 0x01050f01, 0x01050f07, 0x01070107, 0x01070303, 0x0107030b, 0x01070501, 0x01070505, + 0x01070703, 0x01070707, 0x0107070d, 0x01070909, 0x01070b01, 0x01070b05, 0x01070d0f, 0x01070f03, + 0x01070f0b, 0x01090101, 0x01090307, 0x0109030f, 0x01090503, 0x01090509, 0x01090705, 0x01090901, + 0x01090907, 0x01090b03, 0x01090f01, 0x010b0105, 0x010b0109, 0x010b0501, 0x010b0505, 0x010b050d, + 0x010b0707, 0x010b0903, 0x010b090b, 0x010b090f, 0x010b0d0d, 0x010b0f07, 0x010d010d, 0x010d0303, + 0x010d0307, 0x010d0703, 0x010d0b05, 0x010d0f03, 0x010f0101, 0x010f0105, 0x010f0109, 0x010f0501, + 0x010f0505, 0x010f050d, 0x010f0707, 0x010f0b01, 0x010f0b09, 0x03010101, 0x03010103, 0x03010105, + 0x03010109, 0x03010301, 0x03010303, 0x03010307, 0x0301030b, 0x0301030f, 0x03010501, 0x03010505, + 0x03010703, 0x03010709, 0x0301070d, 0x03010b09, 0x03010b0d, 0x03010d03, 0x03010f05, 0x03030101, + 0x03030103, 0x03030107, 0x0303010d, 0x03030301, 0x03030309, 0x03030503, 0x03030701, 0x03030707, + 0x03030903, 0x03030b01, 0x03030b05, 0x03030f01, 0x03030f0d, 0x03050101, 0x03050305, 0x0305030b, + 0x0305030f, 0x03050501, 0x03050509, 0x03050705, 0x03050901, 0x03050907, 0x03050b0b, 0x03050d01, + 0x03050f05, 0x03070103, 0x03070109, 0x0307010f, 0x03070301, 0x03070307, 0x03070503, 0x0307050f, + 0x03070701, 0x03070709, 0x03070903, 0x03070d05, 0x03070f01, 0x03090107, 0x0309010b, 0x03090305, + 0x03090309, 0x03090703, 0x03090707, 0x03090905, 0x0309090d, 0x03090b01, 0x03090b09, 0x030b0103, + 0x030b0301, 0x030b0307, 0x030b0503, 0x030b0701, 0x030b0705, 0x030b0b03, 0x030d0501, 0x030d0509, + 0x030d050f, 0x030d0909, 0x030d090d, 0x030f0103, 0x030f0107, 0x030f0301, 0x030f0305, 0x030f0503, + 0x030f070b, 0x030f0903, 0x030f0d05, 0x030f0f01, 0x05010101, 0x05010103, 0x05010107, 0x0501010b, + 0x0501010f, 0x05010301, 0x05010305, 0x05010309, 0x0501030d, 0x05010503, 0x05010507, 0x0501050f, + 0x05010701, 0x05010705, 0x05010903, 0x05010907, 0x0501090b, 0x05010b01, 0x05010b05, 0x05010d0f, + 0x05010f01, 0x05010f07, 0x05010f0b, 0x05030101, 0x05030105, 0x05030301, 0x05030307, 0x0503030f, + 0x05030505, 0x0503050b, 0x05030703, 0x05030709, 0x05030905, 0x05030b03, 0x05050103, 0x05050109, + 0x0505010f, 0x05050503, 0x05050507, 0x05050701, 0x0505070f, 0x05050903, 0x05050b07, 0x05050b0f, + 0x05050f03, 0x05050f09, 0x05070101, 0x05070105, 0x0507010b, 0x05070303, 0x05070505, 0x05070509, + 0x05070703, 0x05070707, 0x05070905, 0x05070b01, 0x05070d0d, 0x05090103, 0x0509010f, 0x05090501, + 0x05090507, 0x05090705, 0x0509070b, 0x05090903, 0x05090f05, 0x05090f0b, 0x050b0109, 0x050b0303, + 0x050b0505, 0x050b070f, 0x050b0901, 0x050b0b07, 0x050b0f01, 0x050d0101, 0x050d0105, 0x050d010f, + 0x050d0503, 0x050d0b0b, 0x050d0d03, 0x050f010b, 0x050f0303, 0x050f050d, 0x050f0701, 0x050f0907, + 0x050f0b01, 0x07010105, 0x07010303, 0x07010307, 0x0701030b, 0x0701030f, 0x07010505, 0x07010703, + 0x07010707, 0x0701070b, 0x07010905, 0x07010909, 0x0701090f, 0x07010b03, 0x07010d07, 0x07010f03, + 0x07030103, 0x07030107, 0x0703010b, 0x07030309, 0x07030503, 0x07030507, 0x07030901, 0x07030d01, + 0x07030f05, 0x07030f0d, 0x07050101, 0x07050305, 0x07050501, 0x07050705, 0x07050709, 0x07050b01, + 0x07070103, 0x07070301, 0x07070309, 0x07070503, 0x07070507, 0x0707050f, 0x07070701, 0x07070903, + 0x07070907, 0x0707090f, 0x07070b0b, 0x07070f07, 0x07090107, 0x07090303, 0x0709030d, 0x07090505, + 0x07090703, 0x07090b05, 0x07090d01, 0x07090d09, 0x070b0103, 0x070b0301, 0x070b0305, 0x070b050b, + 0x070b0705, 0x070b0909, 0x070b0b0d, 0x070b0f07, 0x070d030d, 0x070d0903, 0x070f0103, 0x070f0107, + 0x070f0501, 0x070f0505, 0x070f070b, 0x09010101, 0x09010109, 0x09010305, 0x09010501, 0x09010509, + 0x0901050f, 0x09010705, 0x09010903, 0x09010b01, 0x09010f01, 0x09030105, 0x0903010f, 0x09030303, + 0x09030307, 0x09030505, 0x09030701, 0x0903070b, 0x09030907, 0x09030b03, 0x09030b0b, 0x09050103, + 0x09050107, 0x09050301, 0x0905030b, 0x09050503, 0x09050707, 0x09050901, 0x09050b0f, 0x09050d05, + 0x09050f01, 0x09070109, 0x09070303, 0x09070307, 0x09070501, 0x09070505, 0x09070703, 0x0907070b, + 0x09090101, 0x09090105, 0x09090509, 0x0909070f, 0x09090901, 0x09090f03, 0x090b010b, 0x090b010f, + 0x090b0503, 0x090b0d05, 0x090d0307, 0x090d0709, 0x090d0d01, 0x090f0301, 0x090f030b, 0x090f0701, + 0x090f0907, 0x090f0b03, 0x0b010105, 0x0b010301, 0x0b010309, 0x0b010505, 0x0b010901, 0x0b010909, + 0x0b01090f, 0x0b010b05, 0x0b010d0d, 0x0b010f09, 0x0b030103, 0x0b030107, 0x0b03010b, 0x0b030305, + 0x0b030503, 0x0b030705, 0x0b030f05, 0x0b050101, 0x0b050303, 0x0b050507, 0x0b050701, 0x0b05070d, + 0x0b050b07, 0x0b070105, 0x0b07010f, 0x0b070301, 0x0b07050f, 0x0b070909, 0x0b070b03, 0x0b070d0b, + 0x0b070f07, 0x0b090103, 0x0b090109, 0x0b090501, 0x0b090705, 0x0b09090d, 0x0b0b0305, 0x0b0b050d, + 0x0b0b0b03, 0x0b0b0b07, 0x0b0d0905, 0x0b0f0105, 0x0b0f0109, 0x0b0f0505, 0x0d010303, 0x0d010307, + 0x0d01030b, 0x0d010703, 0x0d010707, 0x0d010d01, 0x0d030101, 0x0d030501, 0x0d03050f, 0x0d030d09, + 0x0d050305, 0x0d050709, 0x0d050905, 0x0d050b0b, 0x0d050d05, 0x0d050f01, 0x0d070101, 0x0d070309, + 0x0d070503, 0x0d070901, 0x0d09050b, 0x0d090907, 0x0d090d05, 0x0d0b0101, 0x0d0b0107, 0x0d0b0709, + 0x0d0b0d01, 0x0d0d010b, 0x0d0d0901, 0x0d0f0303, 0x0d0f0307, 0x0f010101, 0x0f010109, 0x0f01010f, + 0x0f010501, 0x0f010505, 0x0f01070d, 0x0f010901, 0x0f010b09, 0x0f010d05, 0x0f030105, 0x0f030303, + 0x0f030509, 0x0f030907, 0x0f03090b, 0x0f050103, 0x0f050109, 0x0f050301, 0x0f05030d, 0x0f050503, + 0x0f050701, 0x0f050b03, 0x0f070105, 0x0f070705, 0x0f07070b, 0x0f070b07, 0x0f090103, 0x0f09010b, + 0x0f090307, 0x0f090501, 0x0f090b01, 0x0f0b0505, 0x0f0b0905, 0x0f0d0105, 0x0f0d0703, 0x0f0f0101, +GGML_TABLE_END() + +#define NGRID_IQ1S 2048 +#define IQ1S_DELTA 0.125f +#define IQ1M_DELTA 0.125f +#if defined(GGML_COMMON_IMPL_C) +GGML_TABLE_BEGIN(uint64_t, iq1s_grid, NGRID_IQ1S) + 0xffffffffffffffff, 0xffffffffffffff01, 0xffffffffffff0000, 0xffffffffffff01ff, + 0xffffffffffff0101, 0xffffffffff00ff00, 0xffffffffff000000, 0xffffffffff01ffff, + 0xffffffffff01ff01, 0xffffffffff0101ff, 0xffffffffff010101, 0xffffffff00ff0000, + 0xffffffff0000ff00, 0xffffffff000000ff, 0xffffffff00000001, 0xffffffff00010000, + 0xffffffff01ffffff, 0xffffffff01ffff01, 0xffffffff01ff01ff, 0xffffffff01ff0101, + 0xffffffff01000000, 0xffffffff0101ffff, 0xffffffff0101ff01, 0xffffffff010101ff, + 0xffffffff01010101, 0xffffff00ffff00ff, 0xffffff00ffff0000, 0xffffff00ff00ff00, + 0xffffff00ff0000ff, 0xffffff00ff000001, 0xffffff00ff000100, 0xffffff00ff000101, + 0xffffff00ff010000, 0xffffff0000ffff00, 0xffffff0000ff0001, 0xffffff0000ff0100, + 0xffffff000000ff01, 0xffffff0000000000, 0xffffff0000000101, 0xffffff000001ff00, + 0xffffff00000100ff, 0xffffff0000010001, 0xffffff00000101ff, 0xffffff0001ff0000, + 0xffffff000100ff00, 0xffffff00010000ff, 0xffffff0001000001, 0xffffff0001010000, + 0xffffff01ffffffff, 0xffffff01ffffff01, 0xffffff01ffff01ff, 0xffffff01ffff0101, + 0xffffff01ff000000, 0xffffff01ff01ffff, 0xffffff01ff01ff01, 0xffffff01ff0101ff, + 0xffffff01ff010101, 0xffffff0100ff0000, 0xffffff010000ff00, 0xffffff0100000100, + 0xffffff01000100ff, 0xffffff0100010100, 0xffffff0101ffffff, 0xffffff0101ffff01, + 0xffffff0101ff01ff, 0xffffff0101ff0101, 0xffffff010100ff00, 0xffffff0101000000, + 0xffffff0101000100, 0xffffff010101ffff, 0xffffff010101ff01, 0xffffff01010101ff, + 0xffffff0101010101, 0xffff00ffff00ff00, 0xffff00ffff0000ff, 0xffff00ffff000001, + 0xffff00ffff010000, 0xffff00ff00ffff00, 0xffff00ff00ff0100, 0xffff00ff00000000, + 0xffff00ff00000101, 0xffff00ff000100ff, 0xffff00ff00010000, 0xffff00ff0100ff00, + 0xffff00ff01000100, 0xffff00ff01010000, 0xffff0000ffffff00, 0xffff0000ffff00ff, + 0xffff0000ffff0000, 0xffff0000ffff0001, 0xffff0000ff000000, 0xffff0000ff0001ff, + 0xffff0000ff000101, 0xffff0000ff010100, 0xffff000000ffffff, 0xffff000000ff0000, + 0xffff000000ff0101, 0xffff00000000ffff, 0xffff00000000ff00, 0xffff0000000000ff, + 0xffff000000000000, 0xffff000000000001, 0xffff000000000100, 0xffff00000001ffff, + 0xffff00000001ff01, 0xffff000000010000, 0xffff0000000101ff, 0xffff000000010101, + 0xffff000001ffff00, 0xffff00000100ff00, 0xffff000001000000, 0xffff0000010001ff, + 0xffff000001000101, 0xffff00000101ff00, 0xffff0000010100ff, 0xffff000001010000, + 0xffff000001010001, 0xffff000001010100, 0xffff0001ff0000ff, 0xffff0001ff000100, + 0xffff000100ffff00, 0xffff000100ff00ff, 0xffff00010000ffff, 0xffff00010000ff01, + 0xffff000100000000, 0xffff0001000001ff, 0xffff00010001ffff, 0xffff00010001ff00, + 0xffff000100010001, 0xffff000100010100, 0xffff000101ff0000, 0xffff00010100ff00, + 0xffff0001010000ff, 0xffff000101000100, 0xffff01ffffffffff, 0xffff01ffffffff01, + 0xffff01ffffff01ff, 0xffff01ffffff0101, 0xffff01ffff000000, 0xffff01ffff01ffff, + 0xffff01ffff01ff01, 0xffff01ffff0101ff, 0xffff01ffff010101, 0xffff01ff00ff0000, + 0xffff01ff0000ff00, 0xffff01ff00000001, 0xffff01ff00010000, 0xffff01ff01ffffff, + 0xffff01ff01ffff01, 0xffff01ff01ff01ff, 0xffff01ff01ff0101, 0xffff01ff01000000, + 0xffff01ff0101ffff, 0xffff01ff0101ff01, 0xffff01ff010101ff, 0xffff01ff01010101, + 0xffff0100ffff0000, 0xffff0100ff00ff00, 0xffff0100ff0000ff, 0xffff0100ff000100, + 0xffff0100ff0100ff, 0xffff0100ff010000, 0xffff010000ffff00, 0xffff01000000ffff, + 0xffff01000000ff00, 0xffff010000000000, 0xffff01000001ff00, 0xffff0100000100ff, + 0xffff010000010100, 0xffff01000100ff00, 0xffff0100010000ff, 0xffff010001000001, + 0xffff010001000100, 0xffff010001010000, 0xffff0101ffffffff, 0xffff0101ffffff01, + 0xffff0101ffff01ff, 0xffff0101ffff0101, 0xffff0101ff000000, 0xffff0101ff01ffff, + 0xffff0101ff01ff01, 0xffff0101ff0101ff, 0xffff0101ff010101, 0xffff010100ff0000, + 0xffff01010000ff00, 0xffff010100000100, 0xffff01010001ff00, 0xffff010100010000, + 0xffff010101ffffff, 0xffff010101ffff01, 0xffff010101ff0000, 0xffff010101ff01ff, + 0xffff010101ff0101, 0xffff010101000000, 0xffff01010101ffff, 0xffff01010101ff01, + 0xffff0101010101ff, 0xffff010101010101, 0xff00ffffff00ffff, 0xff00ffffff00ff00, + 0xff00ffffff0000ff, 0xff00ffffff000100, 0xff00ffffff0100ff, 0xff00ffffff010000, + 0xff00ffff00ffff00, 0xff00ffff00ff00ff, 0xff00ffff0000ffff, 0xff00ffff00000000, + 0xff00ffff000001ff, 0xff00ffff0001ff00, 0xff00ffff000100ff, 0xff00ffff00010000, + 0xff00ffff00010100, 0xff00ffff0100ff00, 0xff00ffff010000ff, 0xff00ffff01000001, + 0xff00ffff0101ff00, 0xff00ffff01010000, 0xff00ff00ffffff00, 0xff00ff00ffff00ff, + 0xff00ff00ffff0001, 0xff00ff00ffff0100, 0xff00ff00ff00ffff, 0xff00ff00ff00ff01, + 0xff00ff00ff000000, 0xff00ff00ff0001ff, 0xff00ff00ff01ff00, 0xff00ff00ff0100ff, + 0xff00ff00ff010100, 0xff00ff0000ff0000, 0xff00ff0000ff0101, 0xff00ff000000ffff, + 0xff00ff000000ff00, 0xff00ff000000ff01, 0xff00ff00000000ff, 0xff00ff0000000000, + 0xff00ff0000000001, 0xff00ff0000000100, 0xff00ff000001ffff, 0xff00ff0000010000, + 0xff00ff0001ff00ff, 0xff00ff000100ff01, 0xff00ff0001000000, 0xff00ff000101ff00, + 0xff00ff00010100ff, 0xff00ff01ff00ff00, 0xff00ff01ff0000ff, 0xff00ff01ff000001, + 0xff00ff01ff010000, 0xff00ff0100ffffff, 0xff00ff0100ff0001, 0xff00ff0100ff0100, + 0xff00ff010000ff01, 0xff00ff0100000000, 0xff00ff01000001ff, 0xff00ff0100000101, + 0xff00ff01000100ff, 0xff00ff0100010001, 0xff00ff0101ff0000, 0xff00ff010100ff00, + 0xff00ff01010000ff, 0xff00ff0101000001, 0xff00ff0101010000, 0xff0000ffffffff00, + 0xff0000ffffff0001, 0xff0000ffffff0100, 0xff0000ffff0000ff, 0xff0000ffff000000, + 0xff0000ffff0001ff, 0xff0000ffff000100, 0xff0000ffff01ff00, 0xff0000ffff010001, + 0xff0000ff00ffff00, 0xff0000ff00ff0000, 0xff0000ff00ff0001, 0xff0000ff00ff01ff, + 0xff0000ff00ff0101, 0xff0000ff0000ff00, 0xff0000ff000000ff, 0xff0000ff00000000, + 0xff0000ff00000001, 0xff0000ff00000100, 0xff0000ff0001ff01, 0xff0000ff00010000, + 0xff0000ff000101ff, 0xff0000ff01ff00ff, 0xff0000ff01ff0100, 0xff0000ff0100ffff, + 0xff0000ff010000ff, 0xff0000ff01000000, 0xff0000ff010001ff, 0xff0000ff01000100, + 0xff0000ff01000101, 0xff0000ff0101ff00, 0xff0000ff010100ff, 0xff0000ff01010000, + 0xff0000ff01010100, 0xff000000ffffff01, 0xff000000ffff0000, 0xff000000ffff0101, + 0xff000000ff00ff00, 0xff000000ff0000ff, 0xff000000ff000000, 0xff000000ff000001, + 0xff000000ff000100, 0xff000000ff01ffff, 0xff000000ff01ff01, 0xff000000ff010000, + 0xff000000ff0101ff, 0xff000000ff010101, 0xff00000000ffff00, 0xff00000000ff00ff, + 0xff00000000ff0000, 0xff00000000ff0001, 0xff0000000000ff00, 0xff0000000000ff01, + 0xff000000000000ff, 0xff00000000000000, 0xff00000000000001, 0xff00000000000100, + 0xff00000000000101, 0xff0000000001ff00, 0xff000000000100ff, 0xff00000000010000, + 0xff00000000010001, 0xff00000000010100, 0xff00000001ffffff, 0xff00000001ffff01, + 0xff00000001ff00ff, 0xff00000001ff0000, 0xff00000001ff01ff, 0xff00000001ff0101, + 0xff0000000100ffff, 0xff0000000100ff00, 0xff000000010000ff, 0xff00000001000000, + 0xff00000001000001, 0xff00000001000100, 0xff00000001000101, 0xff0000000101ffff, + 0xff0000000101ff01, 0xff00000001010000, 0xff000001ffffff00, 0xff000001ffff00ff, + 0xff000001ffff0000, 0xff000001ffff0001, 0xff000001ff000000, 0xff000001ff000001, + 0xff000001ff0001ff, 0xff000001ff000101, 0xff000001ff01ff00, 0xff000001ff010001, + 0xff00000100ffffff, 0xff00000100ffff01, 0xff00000100ff00ff, 0xff00000100ff0000, + 0xff00000100ff01ff, 0xff00000100ff0101, 0xff0000010000ff00, 0xff00000100000000, + 0xff00000100000001, 0xff000001000001ff, 0xff00000100000100, 0xff0000010001ff00, + 0xff000001000100ff, 0xff00000100010000, 0xff000001000101ff, 0xff00000100010100, + 0xff00000100010101, 0xff00000101ff0001, 0xff00000101ff0101, 0xff0000010100ff01, + 0xff00000101000000, 0xff000001010100ff, 0xff00000101010100, 0xff0001ffff00ff00, + 0xff0001ffff000001, 0xff0001ffff010000, 0xff0001ff00ffff00, 0xff0001ff00ff00ff, + 0xff0001ff00ff0001, 0xff0001ff00ff0100, 0xff0001ff0000ffff, 0xff0001ff00000000, + 0xff0001ff000001ff, 0xff0001ff00000101, 0xff0001ff0001ffff, 0xff0001ff0001ff00, + 0xff0001ff000100ff, 0xff0001ff00010001, 0xff0001ff00010100, 0xff0001ff01ff0000, + 0xff0001ff0100ff00, 0xff0001ff010000ff, 0xff0001ff01010000, 0xff000100ff00ffff, + 0xff000100ff00ff01, 0xff000100ff000000, 0xff000100ff000101, 0xff000100ff01ff00, + 0xff000100ff010000, 0xff00010000ffff01, 0xff00010000ff00ff, 0xff00010000ff0000, + 0xff00010000ff01ff, 0xff0001000000ff00, 0xff000100000000ff, 0xff00010000000000, + 0xff00010000000001, 0xff00010000000100, 0xff00010000000101, 0xff0001000001ffff, + 0xff00010000010000, 0xff00010000010101, 0xff00010001ff0100, 0xff0001000100ff00, + 0xff0001000100ff01, 0xff00010001000000, 0xff000100010001ff, 0xff0001000101ff00, + 0xff00010001010001, 0xff00010001010100, 0xff000101ffff0100, 0xff000101ff000001, + 0xff000101ff0100ff, 0xff000101ff010001, 0xff00010100ff00ff, 0xff00010100ff0001, + 0xff00010100ff0100, 0xff0001010000ffff, 0xff0001010000ff01, 0xff00010100000000, + 0xff000101000001ff, 0xff0001010001ff00, 0xff00010100010001, 0xff00010100010100, + 0xff00010101ff0000, 0xff0001010100ff00, 0xff00010101000001, 0xff00010101000101, + 0xff01ffffffffffff, 0xff01ffffffffff01, 0xff01ffffffff01ff, 0xff01ffffffff0101, + 0xff01ffffff000000, 0xff01ffffff01ffff, 0xff01ffffff01ff01, 0xff01ffffff010000, + 0xff01ffffff0101ff, 0xff01ffffff010101, 0xff01ffff00ff0000, 0xff01ffff0000ff00, + 0xff01ffff00000100, 0xff01ffff0001ff00, 0xff01ffff00010000, 0xff01ffff01ffffff, + 0xff01ffff01ffff01, 0xff01ffff01ff01ff, 0xff01ffff01ff0101, 0xff01ffff01000000, + 0xff01ffff0101ffff, 0xff01ffff0101ff01, 0xff01ffff01010000, 0xff01ffff010101ff, + 0xff01ffff01010101, 0xff01ff00ffff0000, 0xff01ff00ff00ff00, 0xff01ff00ff0000ff, + 0xff01ff00ff000100, 0xff01ff00ff010000, 0xff01ff0000ffff01, 0xff01ff0000ff00ff, + 0xff01ff0000ff0100, 0xff01ff0000000000, 0xff01ff00000001ff, 0xff01ff0000000101, + 0xff01ff000001ff00, 0xff01ff00000100ff, 0xff01ff0000010000, 0xff01ff0000010001, + 0xff01ff0001ff0000, 0xff01ff000100ffff, 0xff01ff0001000001, 0xff01ff0001000100, + 0xff01ff0001010000, 0xff01ff01ffffff00, 0xff01ff01ffff01ff, 0xff01ff01ffff0101, + 0xff01ff01ff00ff00, 0xff01ff01ff000000, 0xff01ff01ff01ffff, 0xff01ff01ff01ff01, + 0xff01ff01ff0101ff, 0xff01ff01ff010101, 0xff01ff0100ff0000, 0xff01ff010000ff00, + 0xff01ff0100000001, 0xff01ff0100000100, 0xff01ff0100010000, 0xff01ff0101ffff00, + 0xff01ff0101ff01ff, 0xff01ff0101ff0101, 0xff01ff010100ff00, 0xff01ff0101000000, + 0xff01ff010101ffff, 0xff01ff010101ff01, 0xff01ff01010101ff, 0xff01ff0101010101, + 0xff0100ffffff0000, 0xff0100ffff0000ff, 0xff0100ffff000001, 0xff0100ffff000100, + 0xff0100ffff010000, 0xff0100ff00ff00ff, 0xff0100ff00ff0000, 0xff0100ff00ff0001, + 0xff0100ff00ff0100, 0xff0100ff0000ff01, 0xff0100ff00000000, 0xff0100ff000001ff, + 0xff0100ff00000101, 0xff0100ff00010001, 0xff0100ff01ff0000, 0xff0100ff0100ff00, + 0xff0100ff010000ff, 0xff0100ff01000100, 0xff0100ff0101ff00, 0xff0100ff01010000, + 0xff010000ffff0100, 0xff010000ff000000, 0xff010000ff01ff00, 0xff010000ff010100, + 0xff01000000ffffff, 0xff01000000ff0000, 0xff01000000ff01ff, 0xff0100000000ff00, + 0xff010000000000ff, 0xff01000000000000, 0xff01000000000100, 0xff0100000001ff01, + 0xff01000000010000, 0xff010000000101ff, 0xff01000001ff0100, 0xff0100000100ffff, + 0xff010000010000ff, 0xff01000001000000, 0xff010000010001ff, 0xff01000001000101, + 0xff0100000101ff00, 0xff010000010100ff, 0xff01000001010001, 0xff01000001010100, + 0xff010001ffff0000, 0xff010001ff00ffff, 0xff010001ff00ff01, 0xff010001ff000100, + 0xff010001ff010000, 0xff01000100ffff00, 0xff01000100ff0100, 0xff01000100000000, + 0xff0100010001ffff, 0xff0100010001ff00, 0xff01000100010100, 0xff01000101ff00ff, + 0xff01000101ff0001, 0xff0100010100ffff, 0xff01000101000101, 0xff0101ffffffffff, + 0xff0101ffffffff01, 0xff0101ffffff01ff, 0xff0101ffffff0101, 0xff0101ffff000000, + 0xff0101ffff01ffff, 0xff0101ffff01ff01, 0xff0101ffff0101ff, 0xff0101ffff010101, + 0xff0101ff00ff0000, 0xff0101ff0000ff00, 0xff0101ff000000ff, 0xff0101ff00010000, + 0xff0101ff01ffffff, 0xff0101ff01ffff01, 0xff0101ff01ff01ff, 0xff0101ff01ff0101, + 0xff0101ff0101ffff, 0xff0101ff0101ff01, 0xff0101ff010101ff, 0xff0101ff01010101, + 0xff010100ffff0100, 0xff010100ff00ff00, 0xff010100ff0000ff, 0xff010100ff000100, + 0xff010100ff010000, 0xff01010000ff0001, 0xff01010000ff0100, 0xff0101000000ff01, + 0xff01010000000000, 0xff0101000001ff00, 0xff010100000100ff, 0xff01010000010001, + 0xff01010000010100, 0xff01010001ff0000, 0xff0101000100ffff, 0xff01010001000001, + 0xff01010001000100, 0xff010100010100ff, 0xff01010001010000, 0xff010101ffffffff, + 0xff010101ffffff01, 0xff010101ffff01ff, 0xff010101ffff0101, 0xff010101ff01ffff, + 0xff010101ff01ff01, 0xff010101ff0101ff, 0xff010101ff010101, 0xff01010100ff0000, + 0xff0101010000ff00, 0xff01010100000001, 0xff01010100000100, 0xff01010100010000, + 0xff01010101ffffff, 0xff01010101ffff01, 0xff01010101ff01ff, 0xff01010101ff0101, + 0xff01010101000000, 0xff0101010101ffff, 0xff0101010101ff01, 0xff010101010101ff, + 0xff01010101010101, 0x00ffffffffff0000, 0x00ffffffff00ff00, 0x00ffffffff000001, + 0x00ffffffff010000, 0x00ffffff00ff0100, 0x00ffffff0000ff01, 0x00ffffff00000000, + 0x00ffffff000001ff, 0x00ffffff00000101, 0x00ffffff0001ff00, 0x00ffffff000100ff, + 0x00ffffff00010001, 0x00ffffff010000ff, 0x00ffffff01000100, 0x00ffffff0101ff00, + 0x00ffffff01010001, 0x00ffff00ffffffff, 0x00ffff00ffffff00, 0x00ffff00ffff00ff, + 0x00ffff00ffff0001, 0x00ffff00ffff0100, 0x00ffff00ff00ff01, 0x00ffff00ff000000, + 0x00ffff00ff000001, 0x00ffff00ff0001ff, 0x00ffff00ff000101, 0x00ffff00ff01ff00, + 0x00ffff00ff010001, 0x00ffff00ff010100, 0x00ffff0000ff0000, 0x00ffff0000ff01ff, + 0x00ffff0000ff0101, 0x00ffff000000ff00, 0x00ffff00000000ff, 0x00ffff0000000000, + 0x00ffff0000000001, 0x00ffff0000000100, 0x00ffff0000000101, 0x00ffff0000010000, + 0x00ffff00000101ff, 0x00ffff0000010101, 0x00ffff0001ffff00, 0x00ffff0001ff00ff, + 0x00ffff0001ff0001, 0x00ffff000100ffff, 0x00ffff000100ff01, 0x00ffff0001000000, + 0x00ffff000101ffff, 0x00ffff000101ff00, 0x00ffff000101ff01, 0x00ffff01ffff0000, + 0x00ffff01ff00ff00, 0x00ffff01ff0000ff, 0x00ffff01ff000001, 0x00ffff01ff010000, + 0x00ffff0100ffff00, 0x00ffff010000ff01, 0x00ffff0100000000, 0x00ffff0100000101, + 0x00ffff01000100ff, 0x00ffff0100010100, 0x00ffff0101ff0100, 0x00ffff01010000ff, + 0x00ffff0101010000, 0x00ff00ffffffff00, 0x00ff00ffff000000, 0x00ff00ffff000100, + 0x00ff00ffff010100, 0x00ff00ff00ff0000, 0x00ff00ff00ff01ff, 0x00ff00ff00ff0101, + 0x00ff00ff0000ff00, 0x00ff00ff000000ff, 0x00ff00ff00000000, 0x00ff00ff00000001, + 0x00ff00ff0001ff00, 0x00ff00ff0001ff01, 0x00ff00ff00010000, 0x00ff00ff000101ff, + 0x00ff00ff00010101, 0x00ff00ff01ffff00, 0x00ff00ff01ff0001, 0x00ff00ff01ff0100, + 0x00ff00ff0100ffff, 0x00ff00ff0100ff01, 0x00ff00ff01000000, 0x00ff00ff0101ffff, + 0x00ff00ff0101ff00, 0x00ff00ff01010100, 0x00ff0000ffffff00, 0x00ff0000ffffff01, + 0x00ff0000ffff0000, 0x00ff0000ffff0101, 0x00ff0000ff00ff00, 0x00ff0000ff0000ff, + 0x00ff0000ff000000, 0x00ff0000ff000001, 0x00ff0000ff000100, 0x00ff0000ff01ffff, + 0x00ff0000ff010000, 0x00ff0000ff010101, 0x00ff000000ffff00, 0x00ff000000ff00ff, + 0x00ff000000ff0000, 0x00ff000000ff0001, 0x00ff000000ff0100, 0x00ff00000000ffff, + 0x00ff00000000ff00, 0x00ff0000000000ff, 0x00ff000000000000, 0x00ff000000000001, + 0x00ff0000000001ff, 0x00ff000000000100, 0x00ff00000001ff00, 0x00ff0000000100ff, + 0x00ff000000010000, 0x00ff000000010001, 0x00ff000000010100, 0x00ff000001ffff01, + 0x00ff000001ff00ff, 0x00ff000001ff0000, 0x00ff000001ff01ff, 0x00ff00000100ff00, + 0x00ff0000010000ff, 0x00ff000001000000, 0x00ff000001000001, 0x00ff000001000100, + 0x00ff000001000101, 0x00ff000001010000, 0x00ff0000010101ff, 0x00ff000001010101, + 0x00ff0001ffffff00, 0x00ff0001ffff0000, 0x00ff0001ffff0100, 0x00ff0001ff0000ff, + 0x00ff0001ff000000, 0x00ff0001ff0001ff, 0x00ff0001ff000101, 0x00ff0001ff01ff00, + 0x00ff0001ff0100ff, 0x00ff0001ff010100, 0x00ff000100ffffff, 0x00ff000100ffff01, + 0x00ff000100ff0000, 0x00ff000100ff01ff, 0x00ff00010000ffff, 0x00ff00010000ff00, + 0x00ff00010000ff01, 0x00ff000100000000, 0x00ff000100000001, 0x00ff000100000100, + 0x00ff00010001ff01, 0x00ff000100010000, 0x00ff0001000101ff, 0x00ff000101ffff00, + 0x00ff000101ff0000, 0x00ff000101ff0101, 0x00ff0001010000ff, 0x00ff000101000000, + 0x00ff00010101ff00, 0x00ff0001010100ff, 0x00ff000101010001, 0x00ff01ffffff0000, + 0x00ff01ffff00ff00, 0x00ff01ffff000000, 0x00ff01ffff000101, 0x00ff01ffff010000, + 0x00ff01ff00ffff01, 0x00ff01ff00ff0100, 0x00ff01ff0000ffff, 0x00ff01ff00000000, + 0x00ff01ff000001ff, 0x00ff01ff0001ff00, 0x00ff01ff000100ff, 0x00ff01ff00010001, + 0x00ff01ff00010100, 0x00ff01ff01ff0000, 0x00ff01ff0100ff00, 0x00ff01ff010000ff, + 0x00ff01ff01000001, 0x00ff01ff01000100, 0x00ff01ff01010000, 0x00ff0100ffffff00, + 0x00ff0100ffff0000, 0x00ff0100ffff0001, 0x00ff0100ffff0101, 0x00ff0100ff00ffff, + 0x00ff0100ff0000ff, 0x00ff0100ff000000, 0x00ff0100ff0001ff, 0x00ff0100ff01ff00, + 0x00ff0100ff0100ff, 0x00ff0100ff010001, 0x00ff010000ffffff, 0x00ff010000ff0000, + 0x00ff010000ff0101, 0x00ff01000000ff00, 0x00ff01000000ff01, 0x00ff0100000000ff, + 0x00ff010000000000, 0x00ff010000000001, 0x00ff010000000100, 0x00ff01000001ffff, + 0x00ff01000001ff01, 0x00ff010000010000, 0x00ff010000010001, 0x00ff010000010101, + 0x00ff010001ff0001, 0x00ff010001ff0100, 0x00ff01000100ff01, 0x00ff010001000000, + 0x00ff010001000001, 0x00ff0100010001ff, 0x00ff01000101ff00, 0x00ff0100010100ff, + 0x00ff010001010001, 0x00ff010001010100, 0x00ff0101ff000001, 0x00ff010100ff00ff, + 0x00ff010100ff0001, 0x00ff010100ff0100, 0x00ff010100000000, 0x00ff0101000001ff, + 0x00ff010100000101, 0x00ff0101000100ff, 0x00ff010100010100, 0x00ff0101010000ff, + 0x00ff010101010000, 0x0000ffffffffff00, 0x0000ffffffff00ff, 0x0000ffffffff0000, + 0x0000ffffffff0001, 0x0000ffffffff0100, 0x0000ffffff00ff01, 0x0000ffffff000000, + 0x0000ffffff000101, 0x0000ffffff01ff00, 0x0000ffffff0100ff, 0x0000ffffff010100, + 0x0000ffff00ffffff, 0x0000ffff00ff0000, 0x0000ffff00ff01ff, 0x0000ffff0000ff00, + 0x0000ffff000000ff, 0x0000ffff00000000, 0x0000ffff00000001, 0x0000ffff00000100, + 0x0000ffff00010000, 0x0000ffff000101ff, 0x0000ffff01ff0001, 0x0000ffff01ff0100, + 0x0000ffff01000000, 0x0000ffff010001ff, 0x0000ffff0101ffff, 0x0000ffff0101ff00, + 0x0000ffff01010001, 0x0000ffff01010100, 0x0000ff00ffff0000, 0x0000ff00ffff01ff, + 0x0000ff00ffff0100, 0x0000ff00ffff0101, 0x0000ff00ff00ff00, 0x0000ff00ff0000ff, + 0x0000ff00ff000000, 0x0000ff00ff000001, 0x0000ff00ff0001ff, 0x0000ff00ff000100, + 0x0000ff00ff01ffff, 0x0000ff00ff010000, 0x0000ff00ff010001, 0x0000ff00ff0101ff, + 0x0000ff00ff010101, 0x0000ff0000ffff00, 0x0000ff0000ff00ff, 0x0000ff0000ff0000, + 0x0000ff0000ff0001, 0x0000ff0000ff0100, 0x0000ff000000ffff, 0x0000ff000000ff00, + 0x0000ff000000ff01, 0x0000ff00000000ff, 0x0000ff0000000000, 0x0000ff0000000001, + 0x0000ff00000001ff, 0x0000ff0000000100, 0x0000ff0000000101, 0x0000ff000001ff00, + 0x0000ff00000100ff, 0x0000ff0000010000, 0x0000ff0000010001, 0x0000ff0000010100, + 0x0000ff0001ffff01, 0x0000ff0001ff0000, 0x0000ff000100ff00, 0x0000ff00010000ff, + 0x0000ff0001000000, 0x0000ff0001000001, 0x0000ff0001000100, 0x0000ff000101ffff, + 0x0000ff0001010000, 0x0000ff0001010101, 0x0000ff01ffffff00, 0x0000ff01ffff0001, + 0x0000ff01ff00ff01, 0x0000ff01ff000000, 0x0000ff01ff000101, 0x0000ff01ff01ff00, + 0x0000ff01ff0100ff, 0x0000ff0100ffff01, 0x0000ff0100ff0000, 0x0000ff0100ff0101, + 0x0000ff010000ff00, 0x0000ff01000000ff, 0x0000ff0100000000, 0x0000ff0100000001, + 0x0000ff0100000100, 0x0000ff010001ff01, 0x0000ff0100010000, 0x0000ff0101ff0000, + 0x0000ff010100ffff, 0x0000ff010100ff01, 0x0000ff0101000000, 0x0000ff0101000100, + 0x0000ff0101000101, 0x0000ff01010100ff, 0x000000ffffff00ff, 0x000000ffffff0000, + 0x000000ffff00ff00, 0x000000ffff0000ff, 0x000000ffff000000, 0x000000ffff000001, + 0x000000ffff0001ff, 0x000000ffff000100, 0x000000ffff01ff00, 0x000000ffff010000, + 0x000000ffff0101ff, 0x000000ffff010101, 0x000000ff00ffff00, 0x000000ff00ff00ff, + 0x000000ff00ff0000, 0x000000ff00ff0001, 0x000000ff00ff0100, 0x000000ff00ff0101, + 0x000000ff0000ffff, 0x000000ff0000ff00, 0x000000ff000000ff, 0x000000ff00000000, + 0x000000ff00000001, 0x000000ff000001ff, 0x000000ff00000100, 0x000000ff00000101, + 0x000000ff0001ff00, 0x000000ff0001ff01, 0x000000ff000100ff, 0x000000ff00010000, + 0x000000ff00010001, 0x000000ff00010100, 0x000000ff01ffffff, 0x000000ff01ff01ff, + 0x000000ff01ff0101, 0x000000ff0100ff00, 0x000000ff010000ff, 0x000000ff01000000, + 0x000000ff01000001, 0x000000ff01000100, 0x000000ff0101ff00, 0x000000ff010100ff, + 0x000000ff01010000, 0x000000ff01010101, 0x00000000ffffff00, 0x00000000ffffff01, + 0x00000000ffff00ff, 0x00000000ffff0000, 0x00000000ffff0001, 0x00000000ffff0100, + 0x00000000ff00ffff, 0x00000000ff00ff00, 0x00000000ff00ff01, 0x00000000ff0000ff, + 0x00000000ff000000, 0x00000000ff000001, 0x00000000ff000100, 0x00000000ff000101, + 0x00000000ff01ff00, 0x00000000ff0100ff, 0x00000000ff010000, 0x00000000ff010001, + 0x00000000ff010100, 0x0000000000ffffff, 0x0000000000ffff00, 0x0000000000ffff01, + 0x0000000000ff00ff, 0x0000000000ff0000, 0x0000000000ff0001, 0x0000000000ff01ff, + 0x0000000000ff0100, 0x000000000000ffff, 0x000000000000ff00, 0x000000000000ff01, + 0x00000000000000ff, 0x0000000000000000, 0x0000000000000001, 0x00000000000001ff, + 0x0000000000000100, 0x0000000000000101, 0x000000000001ffff, 0x000000000001ff00, + 0x00000000000100ff, 0x0000000000010000, 0x0000000000010001, 0x00000000000101ff, + 0x0000000000010100, 0x0000000000010101, 0x0000000001ffff00, 0x0000000001ff00ff, + 0x0000000001ff0000, 0x0000000001ff0100, 0x0000000001ff0101, 0x000000000100ffff, + 0x000000000100ff00, 0x00000000010000ff, 0x0000000001000000, 0x0000000001000001, + 0x00000000010001ff, 0x0000000001000100, 0x000000000101ff00, 0x00000000010100ff, + 0x0000000001010000, 0x0000000001010001, 0x0000000001010100, 0x00000001ffffffff, + 0x00000001ffffff00, 0x00000001ffffff01, 0x00000001ffff00ff, 0x00000001ffff0001, + 0x00000001ffff01ff, 0x00000001ffff0100, 0x00000001ff00ff00, 0x00000001ff0000ff, + 0x00000001ff000000, 0x00000001ff0001ff, 0x00000001ff000100, 0x00000001ff01ffff, + 0x00000001ff01ff00, 0x00000001ff01ff01, 0x00000001ff0100ff, 0x00000001ff010000, + 0x00000001ff010001, 0x00000001ff0101ff, 0x00000001ff010100, 0x0000000100ffff00, + 0x0000000100ff0000, 0x0000000100ff0001, 0x0000000100ff01ff, 0x0000000100ff0100, + 0x0000000100ff0101, 0x000000010000ffff, 0x000000010000ff00, 0x000000010000ff01, + 0x00000001000000ff, 0x0000000100000000, 0x0000000100000001, 0x00000001000001ff, + 0x0000000100000100, 0x0000000100000101, 0x000000010001ff00, 0x00000001000100ff, + 0x0000000100010000, 0x0000000100010100, 0x0000000101ffff01, 0x0000000101ff0000, + 0x0000000101ff0001, 0x0000000101ff01ff, 0x0000000101ff0100, 0x0000000101ff0101, + 0x000000010100ff00, 0x0000000101000000, 0x0000000101000101, 0x000000010101ff01, + 0x0000000101010000, 0x0000000101010001, 0x00000001010101ff, 0x0000000101010100, + 0x000001ffffff00ff, 0x000001ffffff0000, 0x000001ffffff0001, 0x000001ffffff0100, + 0x000001ffff00ffff, 0x000001ffff000000, 0x000001ffff0001ff, 0x000001ffff01ff00, + 0x000001ffff010101, 0x000001ff00ff0000, 0x000001ff00ff01ff, 0x000001ff00ff0101, + 0x000001ff0000ff00, 0x000001ff000000ff, 0x000001ff00000000, 0x000001ff00000001, + 0x000001ff000001ff, 0x000001ff00000100, 0x000001ff0001ffff, 0x000001ff0001ff01, + 0x000001ff000100ff, 0x000001ff00010000, 0x000001ff01ffff01, 0x000001ff01ff0100, + 0x000001ff0100ffff, 0x000001ff0100ff01, 0x000001ff01000000, 0x000001ff010001ff, + 0x000001ff0101ff00, 0x000001ff01010100, 0x00000100ffffff00, 0x00000100ffffff01, + 0x00000100ffff0000, 0x00000100ffff0101, 0x00000100ff00ff00, 0x00000100ff0000ff, + 0x00000100ff000000, 0x00000100ff000001, 0x00000100ff000100, 0x00000100ff010000, + 0x0000010000ffff00, 0x0000010000ff00ff, 0x0000010000ff0000, 0x0000010000ff0001, + 0x0000010000ff0100, 0x000001000000ffff, 0x000001000000ff00, 0x000001000000ff01, + 0x00000100000000ff, 0x0000010000000000, 0x0000010000000001, 0x00000100000001ff, + 0x0000010000000100, 0x0000010000000101, 0x000001000001ff00, 0x00000100000100ff, + 0x0000010000010000, 0x0000010000010001, 0x0000010000010100, 0x0000010001ffff00, + 0x0000010001ff0000, 0x0000010001ff0100, 0x000001000100ff00, 0x00000100010000ff, + 0x0000010001000000, 0x0000010001000001, 0x00000100010001ff, 0x0000010001000100, + 0x0000010001010000, 0x00000101ffff00ff, 0x00000101ffff01ff, 0x00000101ff000000, + 0x00000101ff000101, 0x00000101ff01ffff, 0x00000101ff010000, 0x00000101ff010001, + 0x00000101ff010100, 0x0000010100ff0000, 0x0000010100ff01ff, 0x0000010100ff0100, + 0x000001010000ff00, 0x0000010100000000, 0x0000010100000001, 0x00000101000001ff, + 0x0000010100000100, 0x000001010001ff01, 0x0000010100010000, 0x00000101000101ff, + 0x0000010100010101, 0x0000010101ffff00, 0x0000010101ff0101, 0x000001010100ff01, + 0x0000010101000000, 0x0000010101000001, 0x00000101010001ff, 0x0000010101000101, + 0x000001010101ff00, 0x0001ffffffff0000, 0x0001ffffff0000ff, 0x0001ffffff000001, + 0x0001ffffff000100, 0x0001ffffff010000, 0x0001ffff00ff00ff, 0x0001ffff0000ffff, + 0x0001ffff00000000, 0x0001ffff00000001, 0x0001ffff000001ff, 0x0001ffff00000101, + 0x0001ffff0001ff00, 0x0001ffff000100ff, 0x0001ffff00010001, 0x0001ffff00010100, + 0x0001ffff01ffff00, 0x0001ffff01000001, 0x0001ffff01010000, 0x0001ff00ffffff00, + 0x0001ff00ffff00ff, 0x0001ff00ffff0001, 0x0001ff00ffff0100, 0x0001ff00ff00ff01, + 0x0001ff00ff000000, 0x0001ff00ff01ff00, 0x0001ff00ff01ff01, 0x0001ff00ff010001, + 0x0001ff00ff010100, 0x0001ff0000ff0000, 0x0001ff0000ff0100, 0x0001ff000000ff00, + 0x0001ff0000000000, 0x0001ff0000000001, 0x0001ff0000000100, 0x0001ff0000010000, + 0x0001ff0000010001, 0x0001ff0000010101, 0x0001ff0001ff00ff, 0x0001ff0001ff0101, + 0x0001ff000100ff01, 0x0001ff0001000000, 0x0001ff000101ff00, 0x0001ff0001010001, + 0x0001ff0001010100, 0x0001ff01ff00ff00, 0x0001ff01ff000001, 0x0001ff01ff000100, + 0x0001ff0100ffffff, 0x0001ff0100ffff00, 0x0001ff0100ff0001, 0x0001ff0100000000, + 0x0001ff0100000001, 0x0001ff01000001ff, 0x0001ff010001ffff, 0x0001ff0101ff0000, + 0x0001ff010100ff00, 0x0001ff0101000001, 0x0001ff0101010000, 0x000100ffff00ff00, + 0x000100ffff00ff01, 0x000100ffff000000, 0x000100ffff000001, 0x000100ffff000101, + 0x000100ffff01ff00, 0x000100ffff010001, 0x000100ffff010100, 0x000100ff00ffffff, + 0x000100ff00ffff01, 0x000100ff00ff0000, 0x000100ff00ff01ff, 0x000100ff00ff0101, + 0x000100ff0000ff00, 0x000100ff000000ff, 0x000100ff00000000, 0x000100ff00000001, + 0x000100ff00000100, 0x000100ff00000101, 0x000100ff0001ffff, 0x000100ff0001ff01, + 0x000100ff00010000, 0x000100ff01ff00ff, 0x000100ff01ff0000, 0x000100ff01ff0100, + 0x000100ff0100ffff, 0x000100ff0100ff01, 0x000100ff010000ff, 0x000100ff01000000, + 0x000100ff01000001, 0x000100ff010001ff, 0x000100ff01000101, 0x000100ff0101ff00, + 0x000100ff010100ff, 0x000100ff01010100, 0x00010000ffff0000, 0x00010000ffff01ff, + 0x00010000ffff0101, 0x00010000ff00ff00, 0x00010000ff000000, 0x00010000ff000001, + 0x00010000ff000100, 0x0001000000ff00ff, 0x0001000000ff0000, 0x0001000000ff0001, + 0x0001000000ff0100, 0x000100000000ffff, 0x000100000000ff00, 0x00010000000000ff, + 0x0001000000000000, 0x0001000000000001, 0x0001000000000100, 0x000100000001ff00, + 0x00010000000100ff, 0x0001000000010000, 0x0001000000010001, 0x0001000000010100, + 0x0001000001ff0001, 0x0001000001ff0100, 0x0001000001ff0101, 0x000100000100ff00, + 0x0001000001000000, 0x0001000001000001, 0x0001000001000100, 0x0001000001000101, + 0x000100000101ff01, 0x0001000001010000, 0x0001000001010001, 0x00010000010101ff, + 0x00010001ffffff01, 0x00010001ffff0100, 0x00010001ff000000, 0x00010001ff01ffff, + 0x00010001ff010001, 0x00010001ff0101ff, 0x00010001ff010100, 0x0001000100ffffff, + 0x0001000100ff0000, 0x0001000100ff01ff, 0x0001000100ff0101, 0x000100010000ff00, + 0x00010001000000ff, 0x0001000100000000, 0x0001000100000001, 0x00010001000001ff, + 0x0001000100000101, 0x000100010001ffff, 0x0001000100010000, 0x00010001000101ff, + 0x0001000101ffffff, 0x0001000101ffff01, 0x0001000101ff0000, 0x0001000101ff0101, + 0x00010001010000ff, 0x0001000101000001, 0x00010001010001ff, 0x0001000101000100, + 0x000100010101ffff, 0x00010001010100ff, 0x0001000101010001, 0x0001000101010101, + 0x000101ffff000001, 0x000101ffff000100, 0x000101ffff010000, 0x000101ff00ffff00, + 0x000101ff0000ff01, 0x000101ff00000000, 0x000101ff00000101, 0x000101ff0001ff00, + 0x000101ff00010100, 0x000101ff01ff0000, 0x000101ff0100ff00, 0x000101ff010001ff, + 0x000101ff01010001, 0x00010100ffffff00, 0x00010100ffff00ff, 0x00010100ff00ffff, + 0x00010100ff000000, 0x00010100ff01ff00, 0x00010100ff0100ff, 0x00010100ff010001, + 0x00010100ff010100, 0x0001010000ffffff, 0x0001010000ffff00, 0x0001010000ff0000, + 0x0001010000ff0001, 0x0001010000ff01ff, 0x000101000000ff00, 0x00010100000000ff, + 0x0001010000000000, 0x0001010000000001, 0x0001010000000100, 0x000101000001ffff, + 0x0001010000010000, 0x0001010000010101, 0x0001010001ffff01, 0x0001010001ff00ff, + 0x0001010001ff0101, 0x0001010001000000, 0x000101000101ff00, 0x00010100010100ff, + 0x0001010001010000, 0x0001010001010100, 0x00010101ff00ff00, 0x00010101ff000001, + 0x00010101ff0001ff, 0x0001010100ffff00, 0x0001010100ff00ff, 0x0001010100ff0100, + 0x000101010000ffff, 0x0001010100000000, 0x00010101000001ff, 0x0001010100000101, + 0x00010101000100ff, 0x0001010100010000, 0x0001010100010100, 0x0001010101ff0001, + 0x00010101010000ff, 0x00010101010001ff, 0x0001010101000101, 0x0001010101010001, + 0x01ffffffffffffff, 0x01ffffffffffff01, 0x01ffffffffff01ff, 0x01ffffffffff0101, + 0x01ffffffff01ffff, 0x01ffffffff01ff01, 0x01ffffffff0101ff, 0x01ffffffff010101, + 0x01ffffff00ff0000, 0x01ffffff0000ffff, 0x01ffffff0000ff00, 0x01ffffff000000ff, + 0x01ffffff00000001, 0x01ffffff00000100, 0x01ffffff00010000, 0x01ffffff01ffffff, + 0x01ffffff01ffff01, 0x01ffffff01ff01ff, 0x01ffffff01ff0101, 0x01ffffff01000000, + 0x01ffffff0101ffff, 0x01ffffff0101ff01, 0x01ffffff010101ff, 0x01ffffff01010101, + 0x01ffff00ffff0000, 0x01ffff00ff00ff00, 0x01ffff00ff0000ff, 0x01ffff00ff000001, + 0x01ffff00ff000100, 0x01ffff00ff010000, 0x01ffff0000ffff00, 0x01ffff0000ff00ff, + 0x01ffff0000ff0100, 0x01ffff000000ffff, 0x01ffff000000ff01, 0x01ffff0000000000, + 0x01ffff0000000001, 0x01ffff00000001ff, 0x01ffff0000000100, 0x01ffff00000100ff, + 0x01ffff0000010001, 0x01ffff0000010100, 0x01ffff0001ff0000, 0x01ffff0001ff0100, + 0x01ffff00010000ff, 0x01ffff0001000001, 0x01ffff0001000100, 0x01ffff0001010000, + 0x01ffff01ffffffff, 0x01ffff01ffffff01, 0x01ffff01ffff01ff, 0x01ffff01ffff0101, + 0x01ffff01ff000000, 0x01ffff01ff01ffff, 0x01ffff01ff01ff01, 0x01ffff01ff0101ff, + 0x01ffff01ff010101, 0x01ffff010000ff00, 0x01ffff01000000ff, 0x01ffff0100000100, + 0x01ffff0100010000, 0x01ffff0101ffffff, 0x01ffff0101ffff01, 0x01ffff0101ff01ff, + 0x01ffff0101ff0101, 0x01ffff0101000000, 0x01ffff010101ffff, 0x01ffff010101ff01, + 0x01ffff01010101ff, 0x01ffff0101010101, 0x01ff00ffff0000ff, 0x01ff00ffff000100, + 0x01ff00ff00ffff00, 0x01ff00ff00ff00ff, 0x01ff00ff0000ff00, 0x01ff00ff00000000, + 0x01ff00ff00000101, 0x01ff00ff0001ff00, 0x01ff00ff000100ff, 0x01ff00ff00010100, + 0x01ff00ff010000ff, 0x01ff00ff01000100, 0x01ff0000ffffff00, 0x01ff0000ffff0100, + 0x01ff0000ff00ff01, 0x01ff0000ff000000, 0x01ff0000ff000101, 0x01ff0000ff010001, + 0x01ff0000ff010100, 0x01ff000000ffffff, 0x01ff000000ffff00, 0x01ff000000ff0000, + 0x01ff000000ff01ff, 0x01ff00000000ff00, 0x01ff0000000000ff, 0x01ff000000000000, + 0x01ff000000000001, 0x01ff000000000100, 0x01ff000000000101, 0x01ff000000010000, + 0x01ff000000010001, 0x01ff0000000101ff, 0x01ff000000010101, 0x01ff000001ffff00, + 0x01ff000001ff00ff, 0x01ff000001ff0001, 0x01ff000001ff0100, 0x01ff00000100ffff, + 0x01ff00000100ff01, 0x01ff000001000000, 0x01ff0000010001ff, 0x01ff000001010001, + 0x01ff0001ff00ff00, 0x01ff0001ff000001, 0x01ff0001ff000100, 0x01ff0001ff010000, + 0x01ff000100ffff00, 0x01ff000100ff00ff, 0x01ff000100ff0100, 0x01ff000100ff0101, + 0x01ff00010000ffff, 0x01ff000100000000, 0x01ff000100000100, 0x01ff000100000101, + 0x01ff00010001ff00, 0x01ff000100010001, 0x01ff000100010101, 0x01ff000101ff0000, + 0x01ff00010100ff00, 0x01ff000101000101, 0x01ff0001010100ff, 0x01ff01ffffffffff, + 0x01ff01ffffffff01, 0x01ff01ffffff01ff, 0x01ff01ffffff0101, 0x01ff01ffff000000, + 0x01ff01ffff01ffff, 0x01ff01ffff01ff01, 0x01ff01ffff0101ff, 0x01ff01ffff010101, + 0x01ff01ff00ffff00, 0x01ff01ff00ff0000, 0x01ff01ff0000ff00, 0x01ff01ff000000ff, + 0x01ff01ff00000100, 0x01ff01ff00010000, 0x01ff01ff00010100, 0x01ff01ff01ffffff, + 0x01ff01ff01ffff01, 0x01ff01ff01ff01ff, 0x01ff01ff01ff0101, 0x01ff01ff01000000, + 0x01ff01ff0101ffff, 0x01ff01ff0101ff01, 0x01ff01ff010101ff, 0x01ff01ff01010101, + 0x01ff0100ffff0000, 0x01ff0100ffff0001, 0x01ff0100ff00ff00, 0x01ff0100ff0000ff, + 0x01ff0100ff000001, 0x01ff0100ff010000, 0x01ff010000ffff00, 0x01ff010000ff00ff, + 0x01ff010000ff0001, 0x01ff010000ff0100, 0x01ff01000000ffff, 0x01ff01000000ff01, + 0x01ff010000000000, 0x01ff010000000101, 0x01ff01000001ff00, 0x01ff0100000100ff, + 0x01ff010001ff0000, 0x01ff010001000001, 0x01ff010001000100, 0x01ff010001010000, + 0x01ff0101ffffffff, 0x01ff0101ffffff01, 0x01ff0101ffff01ff, 0x01ff0101ffff0101, + 0x01ff0101ff000000, 0x01ff0101ff01ffff, 0x01ff0101ff01ff01, 0x01ff0101ff0101ff, + 0x01ff0101ff010101, 0x01ff010100ff0000, 0x01ff01010000ff00, 0x01ff0101000000ff, + 0x01ff010100000001, 0x01ff010101ffffff, 0x01ff010101ffff01, 0x01ff010101ff01ff, + 0x01ff010101ff0101, 0x01ff010101000000, 0x01ff01010101ffff, 0x01ff01010101ff01, + 0x01ff0101010101ff, 0x01ff010101010101, 0x0100ffffffff0000, 0x0100ffffff00ff00, + 0x0100ffffff000001, 0x0100ffffff0001ff, 0x0100ffffff000100, 0x0100ffffff010000, + 0x0100ffff00ffff00, 0x0100ffff00ff0001, 0x0100ffff00ff0100, 0x0100ffff00000000, + 0x0100ffff000001ff, 0x0100ffff00000101, 0x0100ffff00010100, 0x0100ffff00010101, + 0x0100ffff01ff0000, 0x0100ffff0100ff00, 0x0100ffff010000ff, 0x0100ffff01000001, + 0x0100ffff01000100, 0x0100ffff01010000, 0x0100ff00ffffff00, 0x0100ff00ffff00ff, + 0x0100ff00ffff0001, 0x0100ff00ffff0100, 0x0100ff00ff00ffff, 0x0100ff00ff000000, + 0x0100ff00ff0001ff, 0x0100ff00ff000101, 0x0100ff00ff01ff00, 0x0100ff00ff0100ff, + 0x0100ff00ff010001, 0x0100ff00ff010100, 0x0100ff0000ffffff, 0x0100ff0000ff0000, + 0x0100ff000000ffff, 0x0100ff000000ff00, 0x0100ff00000000ff, 0x0100ff0000000000, + 0x0100ff0000000001, 0x0100ff0000000100, 0x0100ff000001ff01, 0x0100ff0000010000, + 0x0100ff0001ff00ff, 0x0100ff0001ff0001, 0x0100ff000100ff01, 0x0100ff0001000000, + 0x0100ff00010001ff, 0x0100ff000101ff00, 0x0100ff00010100ff, 0x0100ff0001010001, + 0x0100ff0001010100, 0x0100ff01ffff0000, 0x0100ff01ff00ff00, 0x0100ff01ff0000ff, + 0x0100ff01ff000100, 0x0100ff01ff010000, 0x0100ff0100ff00ff, 0x0100ff0100ff0001, + 0x0100ff0100ff0100, 0x0100ff010000ffff, 0x0100ff010000ff01, 0x0100ff0100000000, + 0x0100ff01000001ff, 0x0100ff0100010001, 0x0100ff0100010100, 0x0100ff0101ff0000, + 0x0100ff01010000ff, 0x0100ff0101000001, 0x0100ff0101010100, 0x010000ffffffff00, + 0x010000ffffff00ff, 0x010000ffffff0001, 0x010000ffff00ffff, 0x010000ffff000000, + 0x010000ffff0001ff, 0x010000ffff010001, 0x010000ff00ffffff, 0x010000ff00ff0101, + 0x010000ff0000ff00, 0x010000ff000000ff, 0x010000ff00000000, 0x010000ff00000001, + 0x010000ff000001ff, 0x010000ff00000100, 0x010000ff0001ffff, 0x010000ff0001ff00, + 0x010000ff0001ff01, 0x010000ff00010000, 0x010000ff01ff00ff, 0x010000ff01ff0001, + 0x010000ff0100ff01, 0x010000ff010000ff, 0x010000ff01000000, 0x010000ff010001ff, + 0x010000ff0101ff00, 0x010000ff01010100, 0x01000000ffffffff, 0x01000000ffff0000, + 0x01000000ffff01ff, 0x01000000ffff0101, 0x01000000ff00ffff, 0x01000000ff00ff00, + 0x01000000ff0000ff, 0x01000000ff000000, 0x01000000ff000001, 0x01000000ff000100, + 0x01000000ff01ff00, 0x01000000ff010000, 0x01000000ff010100, 0x01000000ff010101, + 0x0100000000ffff00, 0x0100000000ff00ff, 0x0100000000ff0000, 0x0100000000ff0001, + 0x0100000000ff0100, 0x010000000000ffff, 0x010000000000ff00, 0x010000000000ff01, + 0x01000000000000ff, 0x0100000000000000, 0x0100000000000001, 0x01000000000001ff, + 0x0100000000000100, 0x0100000000000101, 0x010000000001ff00, 0x01000000000100ff, + 0x0100000000010000, 0x0100000000010001, 0x0100000000010100, 0x0100000001ffff00, + 0x0100000001ff0000, 0x0100000001ff01ff, 0x010000000100ff00, 0x010000000100ff01, + 0x01000000010000ff, 0x0100000001000000, 0x0100000001000001, 0x0100000001000100, + 0x0100000001000101, 0x010000000101ffff, 0x010000000101ff01, 0x0100000001010000, + 0x01000000010101ff, 0x0100000001010101, 0x01000001ffffff00, 0x01000001ffff00ff, + 0x01000001ff00ffff, 0x01000001ff000000, 0x01000001ff000100, 0x01000001ff01ffff, + 0x01000001ff010001, 0x01000001ff010100, 0x0100000100ff0000, 0x0100000100ff01ff, + 0x0100000100ff0100, 0x010000010000ff00, 0x010000010000ff01, 0x0100000100000000, + 0x0100000100000001, 0x0100000100000100, 0x0100000100010000, 0x01000001000101ff, + 0x0100000101ffff01, 0x0100000101ff00ff, 0x0100000101ff0100, 0x0100000101ff0101, + 0x010000010100ff01, 0x01000001010000ff, 0x0100000101000000, 0x01000001010100ff, + 0x0100000101010001, 0x0100000101010100, 0x010001ffffff0000, 0x010001ffff000001, + 0x010001ffff000100, 0x010001ffff010000, 0x010001ff00ffff00, 0x010001ff00ff0001, + 0x010001ff0000ffff, 0x010001ff0000ff01, 0x010001ff00000000, 0x010001ff00000001, + 0x010001ff00000101, 0x010001ff000100ff, 0x010001ff00010000, 0x010001ff01ff0000, + 0x010001ff0100ff00, 0x010001ff01000001, 0x010001ff01000100, 0x010001ff01010000, + 0x01000100ffff00ff, 0x01000100ffff0001, 0x01000100ffff0100, 0x01000100ff00ffff, + 0x01000100ff00ff01, 0x01000100ff000000, 0x01000100ff0001ff, 0x01000100ff000101, + 0x01000100ff01ffff, 0x01000100ff01ff00, 0x01000100ff0100ff, 0x01000100ff010001, + 0x0100010000ffffff, 0x0100010000ffff01, 0x0100010000ff0000, 0x0100010000ff01ff, + 0x0100010000ff0101, 0x010001000000ff00, 0x01000100000000ff, 0x0100010000000000, + 0x0100010000000001, 0x0100010000000100, 0x010001000001ff01, 0x0100010000010000, + 0x0100010000010001, 0x0100010000010101, 0x0100010001ffff00, 0x0100010001ff00ff, + 0x010001000100ffff, 0x010001000100ff01, 0x0100010001000000, 0x0100010001000101, + 0x010001000101ff00, 0x0100010001010001, 0x01000101ffff0000, 0x01000101ff000000, + 0x01000101ff010000, 0x0100010100ff00ff, 0x0100010100ff0001, 0x0100010100ff0100, + 0x010001010000ffff, 0x0100010100000000, 0x01000101000001ff, 0x010001010001ff00, + 0x0100010101ff0000, 0x010001010100ff00, 0x01000101010000ff, 0x0100010101000000, + 0x0100010101000001, 0x0101ffffffffffff, 0x0101ffffffffff01, 0x0101ffffffff01ff, + 0x0101ffffffff0101, 0x0101ffffff000000, 0x0101ffffff01ffff, 0x0101ffffff01ff01, + 0x0101ffffff0101ff, 0x0101ffffff010101, 0x0101ffff00ff0000, 0x0101ffff0000ff00, + 0x0101ffff000000ff, 0x0101ffff00000001, 0x0101ffff00000100, 0x0101ffff01ffffff, + 0x0101ffff01ffff01, 0x0101ffff01ff01ff, 0x0101ffff01ff0101, 0x0101ffff01000000, + 0x0101ffff0101ffff, 0x0101ffff0101ff01, 0x0101ffff010101ff, 0x0101ffff01010101, + 0x0101ff00ffff0000, 0x0101ff00ffff0100, 0x0101ff00ff00ff00, 0x0101ff00ff0000ff, + 0x0101ff00ff000001, 0x0101ff00ff000100, 0x0101ff00ff000101, 0x0101ff0000ff0001, + 0x0101ff0000ff0100, 0x0101ff000000ff00, 0x0101ff0000000000, 0x0101ff00000001ff, + 0x0101ff0000000101, 0x0101ff000001ff00, 0x0101ff00000100ff, 0x0101ff0001ff0000, + 0x0101ff000100ffff, 0x0101ff000100ff01, 0x0101ff0001000001, 0x0101ff0001000100, + 0x0101ff01ffffff01, 0x0101ff01ffff01ff, 0x0101ff01ffff0101, 0x0101ff01ff00ffff, + 0x0101ff01ff000100, 0x0101ff01ff01ff01, 0x0101ff01ff0101ff, 0x0101ff01ff010101, + 0x0101ff0100ff0000, 0x0101ff010000ff00, 0x0101ff0100000001, 0x0101ff0100000100, + 0x0101ff0100010000, 0x0101ff0101ffffff, 0x0101ff0101ffff01, 0x0101ff0101ff01ff, + 0x0101ff0101ff0101, 0x0101ff0101000000, 0x0101ff010101ffff, 0x0101ff010101ff01, + 0x0101ff01010101ff, 0x0101ff0101010101, 0x010100ffff000100, 0x010100ffff010000, + 0x010100ff00ffff00, 0x010100ff00ff00ff, 0x010100ff0000ffff, 0x010100ff000000ff, + 0x010100ff00000000, 0x010100ff000001ff, 0x010100ff00000101, 0x010100ff0001ff00, + 0x010100ff00010000, 0x010100ff00010001, 0x010100ff000101ff, 0x010100ff00010100, + 0x010100ff01ff0000, 0x01010000ffff0001, 0x01010000ffff0100, 0x01010000ff00ffff, + 0x01010000ff00ff01, 0x01010000ff000000, 0x01010000ff0001ff, 0x01010000ff010001, + 0x01010000ff010100, 0x0101000000ffff01, 0x0101000000ff0000, 0x010100000000ff00, + 0x01010000000000ff, 0x0101000000000000, 0x0101000000000001, 0x0101000000000100, + 0x0101000000010000, 0x0101000000010101, 0x0101000001ffff00, 0x0101000001ff00ff, + 0x0101000001ff0000, 0x0101000001ff0001, 0x0101000001ff0100, 0x010100000100ff01, + 0x0101000001000000, 0x01010000010001ff, 0x01010001ffff0000, 0x01010001ff00ff00, + 0x01010001ff000001, 0x01010001ff000101, 0x01010001ff01ff00, 0x01010001ff010000, + 0x0101000100ff00ff, 0x0101000100ff0001, 0x0101000100ff0101, 0x010100010000ff01, + 0x0101000100000000, 0x0101000100000001, 0x01010001000001ff, 0x010100010001ffff, + 0x010100010001ff01, 0x0101000101ff0001, 0x010100010100ffff, 0x0101000101000000, + 0x0101000101000001, 0x0101000101000100, 0x010100010101ff00, 0x01010001010100ff, + 0x0101000101010001, 0x010101ffffffffff, 0x010101ffffffff01, 0x010101ffffff01ff, + 0x010101ffffff0101, 0x010101ffff01ffff, 0x010101ffff01ff01, 0x010101ffff0101ff, + 0x010101ffff010101, 0x010101ff0000ff00, 0x010101ff000000ff, 0x010101ff00000001, + 0x010101ff00000100, 0x010101ff01ffffff, 0x010101ff01ffff01, 0x010101ff01ff01ff, + 0x010101ff01ff0101, 0x010101ff01000000, 0x010101ff0101ffff, 0x010101ff0101ff01, + 0x010101ff010101ff, 0x010101ff01010101, 0x01010100ffff0000, 0x01010100ff0000ff, + 0x01010100ff000100, 0x01010100ff01ff00, 0x01010100ff010000, 0x0101010000ffff00, + 0x010101000000ffff, 0x0101010000000000, 0x0101010000000101, 0x010101000001ff00, + 0x0101010000010001, 0x0101010000010100, 0x010101000100ffff, 0x0101010001000001, + 0x01010101ffffffff, 0x01010101ffffff01, 0x01010101ffff01ff, 0x01010101ffff0101, + 0x01010101ff01ffff, 0x01010101ff01ff01, 0x01010101ff0101ff, 0x01010101ff010101, + 0x010101010000ff00, 0x01010101000000ff, 0x0101010100000001, 0x0101010101ffffff, + 0x0101010101ffff01, 0x0101010101ff01ff, 0x0101010101ff0101, 0x0101010101000000, + 0x010101010101ffff, 0x010101010101ff01, 0x01010101010101ff, 0x0101010101010101, +GGML_TABLE_END() +#else +GGML_TABLE_BEGIN(uint32_t, iq1s_grid_gpu, NGRID_IQ1S) + 0x00000000, 0x00000002, 0x00000101, 0x00000200, 0x00000202, 0x00010001, 0x00010101, 0x00020000, + 0x00020002, 0x00020200, 0x00020202, 0x01000101, 0x01010001, 0x01010100, 0x01010102, 0x01020101, + 0x02000000, 0x02000002, 0x02000200, 0x02000202, 0x02010101, 0x02020000, 0x02020002, 0x02020200, + 0x02020202, 0x00000110, 0x00000111, 0x00010011, 0x00010110, 0x00010112, 0x00010211, 0x00010212, + 0x00020111, 0x01000011, 0x01000112, 0x01000211, 0x01010012, 0x01010111, 0x01010212, 0x01020011, + 0x01020110, 0x01020112, 0x01020210, 0x02000111, 0x02010011, 0x02010110, 0x02010112, 0x02020111, + 0x00000020, 0x00000022, 0x00000220, 0x00000222, 0x00010121, 0x00020020, 0x00020022, 0x00020220, + 0x00020222, 0x01000121, 0x01010021, 0x01010221, 0x01020120, 0x01020221, 0x02000020, 0x02000022, + 0x02000220, 0x02000222, 0x02010021, 0x02010121, 0x02010221, 0x02020020, 0x02020022, 0x02020220, + 0x02020222, 0x00011001, 0x00011100, 0x00011102, 0x00021101, 0x01001001, 0x01001201, 0x01011101, + 0x01011202, 0x01021100, 0x01021101, 0x02011001, 0x02011201, 0x02021101, 0x00001011, 0x00001110, + 0x00001111, 0x00001112, 0x00011111, 0x00011210, 0x00011212, 0x00021211, 0x01001010, 0x01001111, + 0x01001212, 0x01011010, 0x01011011, 0x01011110, 0x01011111, 0x01011112, 0x01011211, 0x01021010, + 0x01021012, 0x01021111, 0x01021210, 0x01021212, 0x02001011, 0x02011011, 0x02011111, 0x02011210, + 0x02011212, 0x02021011, 0x02021110, 0x02021111, 0x02021112, 0x02021211, 0x00011120, 0x00011221, + 0x01001021, 0x01001120, 0x01011020, 0x01011022, 0x01011121, 0x01011220, 0x01021020, 0x01021021, + 0x01021122, 0x01021221, 0x02001121, 0x02011021, 0x02011120, 0x02011221, 0x00002000, 0x00002002, + 0x00002200, 0x00002202, 0x00012101, 0x00022000, 0x00022002, 0x00022200, 0x00022202, 0x01002101, + 0x01012001, 0x01012102, 0x01022101, 0x02002000, 0x02002002, 0x02002200, 0x02002202, 0x02012101, + 0x02022000, 0x02022002, 0x02022200, 0x02022202, 0x00002111, 0x00012011, 0x00012110, 0x00012211, + 0x00022110, 0x00022111, 0x01002011, 0x01012010, 0x01012011, 0x01012111, 0x01022011, 0x01022110, + 0x01022211, 0x02012011, 0x02012110, 0x02012112, 0x02012211, 0x02022111, 0x00002020, 0x00002022, + 0x00002220, 0x00002222, 0x00012121, 0x00022020, 0x00022022, 0x00022220, 0x00022222, 0x01002121, + 0x01012021, 0x01012221, 0x01022021, 0x01022121, 0x02002020, 0x02002022, 0x02002121, 0x02002220, + 0x02002222, 0x02012121, 0x02022020, 0x02022022, 0x02022220, 0x02022222, 0x00110000, 0x00110001, + 0x00110100, 0x00110201, 0x00120100, 0x00120101, 0x01100001, 0x01100100, 0x01110000, 0x01110101, + 0x01110200, 0x01120001, 0x01120100, 0x01120101, 0x01120201, 0x02110001, 0x02110100, 0x02110102, + 0x02120001, 0x02120101, 0x00100011, 0x00100110, 0x00100112, 0x00100211, 0x00110010, 0x00110012, + 0x00110111, 0x00110210, 0x00120011, 0x00120110, 0x00120211, 0x01100111, 0x01100212, 0x01110010, + 0x01110011, 0x01110012, 0x01110110, 0x01110111, 0x01110112, 0x01110211, 0x01120010, 0x01120111, + 0x02100110, 0x02110012, 0x02110111, 0x02120011, 0x02120110, 0x00110021, 0x00110120, 0x00110122, + 0x00120121, 0x01100020, 0x01100122, 0x01100221, 0x01110022, 0x01110121, 0x01110220, 0x01110222, + 0x01120120, 0x01120122, 0x02100121, 0x02110021, 0x02110120, 0x02110122, 0x02120121, 0x00101001, + 0x00101102, 0x00101201, 0x00111100, 0x00111101, 0x00111200, 0x00111201, 0x00121001, 0x00121102, + 0x01101001, 0x01101101, 0x01101102, 0x01101200, 0x01101202, 0x01111001, 0x01111100, 0x01111101, + 0x01111102, 0x01111201, 0x01121002, 0x01121101, 0x01121200, 0x02101100, 0x02101201, 0x02111000, + 0x02111100, 0x02111101, 0x02111200, 0x02111201, 0x02111202, 0x02121001, 0x02121100, 0x02121101, + 0x02121201, 0x00101012, 0x00101111, 0x00101212, 0x00111011, 0x00111110, 0x00111111, 0x00111112, + 0x00111211, 0x00121010, 0x00121012, 0x00121111, 0x00121210, 0x00121212, 0x01101011, 0x01101110, + 0x01101111, 0x01101112, 0x01111011, 0x01111012, 0x01111110, 0x01111111, 0x01111112, 0x01111211, + 0x01111212, 0x01121011, 0x01121110, 0x01121111, 0x01121112, 0x01121211, 0x02101010, 0x02101012, + 0x02101110, 0x02101111, 0x02101210, 0x02101212, 0x02111010, 0x02111011, 0x02111110, 0x02111111, + 0x02111112, 0x02111211, 0x02111212, 0x02121010, 0x02121012, 0x02121111, 0x00101021, 0x00101120, + 0x00101121, 0x00101122, 0x00111121, 0x00111122, 0x00111220, 0x00111222, 0x00121021, 0x00121122, + 0x01101020, 0x01101022, 0x01101120, 0x01101121, 0x01101220, 0x01101222, 0x01111021, 0x01111121, + 0x01111122, 0x01111220, 0x01111221, 0x01121021, 0x01121120, 0x01121121, 0x01121220, 0x01121221, + 0x01121222, 0x02101122, 0x02101222, 0x02111022, 0x02111121, 0x02121120, 0x02121221, 0x00112001, + 0x00112102, 0x00122101, 0x01102001, 0x01102100, 0x01102102, 0x01102201, 0x01112000, 0x01112101, + 0x01112200, 0x01112202, 0x01122000, 0x01122001, 0x01122100, 0x01122102, 0x01122201, 0x02102101, + 0x02112001, 0x02112100, 0x02122101, 0x00112010, 0x00112012, 0x00112111, 0x00112212, 0x00122011, + 0x00122111, 0x01102012, 0x01102110, 0x01102111, 0x01102210, 0x01112011, 0x01112110, 0x01112111, + 0x01112112, 0x01112211, 0x01112212, 0x01122010, 0x01122111, 0x01122212, 0x02102211, 0x02112011, + 0x02112012, 0x02112111, 0x02112210, 0x02122011, 0x02122112, 0x02122211, 0x00102221, 0x00112122, + 0x00122120, 0x00122122, 0x01102120, 0x01102122, 0x01102221, 0x01112020, 0x01112022, 0x01112121, + 0x01112220, 0x01122021, 0x01122122, 0x01122221, 0x02102121, 0x02112021, 0x02112122, 0x02112222, + 0x00200000, 0x00200002, 0x00200200, 0x00200202, 0x00210101, 0x00220000, 0x00220002, 0x00220101, + 0x00220200, 0x00220202, 0x01200101, 0x01210001, 0x01210201, 0x01220001, 0x01220101, 0x02200000, + 0x02200002, 0x02200200, 0x02200202, 0x02210101, 0x02220000, 0x02220002, 0x02220101, 0x02220200, + 0x02220202, 0x00200111, 0x00210011, 0x00210110, 0x00210211, 0x00220111, 0x01200012, 0x01200110, + 0x01200211, 0x01210111, 0x01210210, 0x01210212, 0x01220011, 0x01220110, 0x01220111, 0x01220112, + 0x02200111, 0x02210010, 0x02210112, 0x02210211, 0x02220111, 0x00200021, 0x00200220, 0x00200222, + 0x00210021, 0x00210121, 0x00220020, 0x00220022, 0x00220220, 0x00220222, 0x01200121, 0x01210021, + 0x01210122, 0x01210221, 0x01220121, 0x02200021, 0x02200220, 0x02200222, 0x02210021, 0x02210121, + 0x02220020, 0x02220022, 0x02220220, 0x02220222, 0x00201101, 0x00211100, 0x00211102, 0x00211201, + 0x00221101, 0x01201100, 0x01201101, 0x01201102, 0x01201201, 0x01211002, 0x01211101, 0x01211200, + 0x01211202, 0x01221102, 0x02201101, 0x02211001, 0x02211100, 0x02211201, 0x02221001, 0x02221101, + 0x00201211, 0x00211111, 0x00221011, 0x00221211, 0x01201010, 0x01201111, 0x01201210, 0x01211011, + 0x01211110, 0x01211111, 0x01211211, 0x01221012, 0x01221111, 0x01221210, 0x02201211, 0x02211010, + 0x02211110, 0x02211111, 0x02211210, 0x02211212, 0x02221011, 0x02221110, 0x02221112, 0x02221211, + 0x00201121, 0x00211020, 0x00211022, 0x00211221, 0x00221121, 0x01201021, 0x01201221, 0x01211121, + 0x01221020, 0x01221021, 0x01221221, 0x02201120, 0x02201122, 0x02211020, 0x02211222, 0x00202000, + 0x00202002, 0x00202200, 0x00202202, 0x00212101, 0x00222000, 0x00222002, 0x00222200, 0x00222202, + 0x01202101, 0x01212001, 0x01212100, 0x01222101, 0x02202000, 0x02202002, 0x02202200, 0x02202202, + 0x02222000, 0x02222002, 0x02222200, 0x02222202, 0x00202211, 0x00212011, 0x00212110, 0x00212211, + 0x00222111, 0x01202112, 0x01202211, 0x01212012, 0x01212111, 0x01222011, 0x01222110, 0x01222112, + 0x01222211, 0x02202111, 0x02212010, 0x02212112, 0x02212211, 0x02222110, 0x02222111, 0x00202020, + 0x00202022, 0x00202220, 0x00202222, 0x00222020, 0x00222022, 0x00222220, 0x00222222, 0x01202121, + 0x01212021, 0x01212122, 0x01212221, 0x01222121, 0x02202020, 0x02202022, 0x02202220, 0x02202222, + 0x02212121, 0x02222020, 0x02222022, 0x02222220, 0x02222222, 0x10000101, 0x10010001, 0x10010102, + 0x10020101, 0x11000201, 0x11010002, 0x11010101, 0x11010200, 0x11010202, 0x11020001, 0x11020100, + 0x11020102, 0x12010100, 0x12010201, 0x12020001, 0x12020102, 0x10000010, 0x10000011, 0x10000110, + 0x10000112, 0x10000211, 0x10010012, 0x10010111, 0x10010112, 0x10010210, 0x10010212, 0x10020011, + 0x10020112, 0x10020211, 0x11000111, 0x11000210, 0x11000212, 0x11010011, 0x11010110, 0x11010111, + 0x11010112, 0x11010211, 0x11010212, 0x11020111, 0x11020210, 0x11020212, 0x12000011, 0x12000110, + 0x12000112, 0x12010010, 0x12010012, 0x12010111, 0x12020010, 0x12020011, 0x12020012, 0x10000121, + 0x10010021, 0x10010120, 0x10010122, 0x10020121, 0x11000021, 0x11010022, 0x11010121, 0x11010222, + 0x11020120, 0x11020221, 0x12000221, 0x12010120, 0x12020121, 0x10001001, 0x10011101, 0x10011201, + 0x10021201, 0x11001101, 0x11001200, 0x11001202, 0x11011001, 0x11011100, 0x11011101, 0x11011102, + 0x11021001, 0x11021002, 0x11021101, 0x11021200, 0x11021202, 0x12001001, 0x12001102, 0x12001201, + 0x12011000, 0x12011002, 0x12011101, 0x12021000, 0x12021001, 0x12021201, 0x10001011, 0x10001012, + 0x10001111, 0x10001212, 0x10011011, 0x10011110, 0x10011111, 0x10011112, 0x10011211, 0x10021010, + 0x10021111, 0x10021212, 0x11001011, 0x11001110, 0x11001111, 0x11001112, 0x11001211, 0x11011010, + 0x11011011, 0x11011110, 0x11011111, 0x11011112, 0x11011210, 0x11011211, 0x11021011, 0x11021110, + 0x11021111, 0x11021112, 0x11021211, 0x12001012, 0x12001110, 0x12001111, 0x12001210, 0x12011011, + 0x12011110, 0x12011111, 0x12011112, 0x12011211, 0x12011212, 0x12021111, 0x12021210, 0x12021212, + 0x10001021, 0x10001121, 0x10001221, 0x10011120, 0x10011121, 0x10011220, 0x10011222, 0x10021021, + 0x10021120, 0x10021221, 0x11001020, 0x11001022, 0x11001121, 0x11001220, 0x11011020, 0x11011021, + 0x11011022, 0x11011121, 0x11011122, 0x11011221, 0x11021022, 0x11021121, 0x11021220, 0x12001021, + 0x12001121, 0x12001222, 0x12011120, 0x12011121, 0x12021021, 0x12021120, 0x12021122, 0x10002101, + 0x10012001, 0x10012101, 0x10012202, 0x10022101, 0x11002002, 0x11002201, 0x11012000, 0x11012101, + 0x11012200, 0x11022001, 0x11022100, 0x11022102, 0x11022201, 0x12002101, 0x12012001, 0x12012100, + 0x12012102, 0x12012201, 0x12022101, 0x10002011, 0x10002111, 0x10002112, 0x10002212, 0x10012010, + 0x10012110, 0x10012111, 0x10012210, 0x10022011, 0x10022110, 0x10022112, 0x11002010, 0x11002111, + 0x11002212, 0x11012011, 0x11012012, 0x11012110, 0x11012111, 0x11012112, 0x11012211, 0x11022010, + 0x11022012, 0x11022111, 0x11022112, 0x11022212, 0x12002112, 0x12002211, 0x12012012, 0x12012111, + 0x12012112, 0x12012210, 0x12022011, 0x12022110, 0x12022112, 0x12022211, 0x10012122, 0x11002120, + 0x11002122, 0x11002221, 0x11012121, 0x11012220, 0x11012222, 0x11022120, 0x11022221, 0x12012120, + 0x12022121, 0x10100001, 0x10100100, 0x10100101, 0x10100102, 0x10100201, 0x10110002, 0x10110101, + 0x10110202, 0x10120001, 0x10120100, 0x10120201, 0x11100000, 0x11100101, 0x11100200, 0x11110001, + 0x11110100, 0x11110101, 0x11110102, 0x11110201, 0x11120101, 0x11120200, 0x12100102, 0x12100201, + 0x12110101, 0x12110200, 0x12120000, 0x12120001, 0x12120102, 0x12120201, 0x10100111, 0x10100210, + 0x10100211, 0x10100212, 0x10110011, 0x10110110, 0x10110111, 0x10110112, 0x10110210, 0x10110211, + 0x10120010, 0x10120111, 0x10120112, 0x10120210, 0x10120212, 0x11100011, 0x11100110, 0x11100111, + 0x11100112, 0x11100211, 0x11110010, 0x11110011, 0x11110012, 0x11110110, 0x11110111, 0x11110112, + 0x11110210, 0x11110211, 0x11110212, 0x11120011, 0x11120110, 0x11120111, 0x11120112, 0x11120211, + 0x12100012, 0x12100111, 0x12110011, 0x12110110, 0x12110111, 0x12110112, 0x12110211, 0x12120010, + 0x12120111, 0x12120212, 0x10100021, 0x10100122, 0x10110022, 0x10110121, 0x10110222, 0x10120021, + 0x10120120, 0x11100022, 0x11100121, 0x11100222, 0x11110021, 0x11110120, 0x11110121, 0x11110122, + 0x11110221, 0x11120022, 0x11120121, 0x12100121, 0x12110020, 0x12110022, 0x12110121, 0x12110221, + 0x12110222, 0x12120120, 0x10101100, 0x10101101, 0x10111001, 0x10111100, 0x10111101, 0x10111102, + 0x10111200, 0x10111201, 0x10121001, 0x10121101, 0x10121200, 0x10121202, 0x11101001, 0x11101100, + 0x11101101, 0x11101102, 0x11101201, 0x11101202, 0x11111000, 0x11111001, 0x11111100, 0x11111101, + 0x11111102, 0x11111200, 0x11111201, 0x11111202, 0x11121001, 0x11121002, 0x11121100, 0x11121101, + 0x11121102, 0x11121201, 0x12101000, 0x12101200, 0x12101202, 0x12111001, 0x12111100, 0x12111101, + 0x12111102, 0x12111201, 0x12121001, 0x12121100, 0x12121101, 0x12121202, 0x10101011, 0x10101012, + 0x10101110, 0x10101111, 0x10101112, 0x10101211, 0x10111010, 0x10111011, 0x10111012, 0x10111110, + 0x10111111, 0x10111112, 0x10111211, 0x10111212, 0x10121011, 0x10121110, 0x10121111, 0x10121112, + 0x10121211, 0x11101010, 0x11101011, 0x11101012, 0x11101110, 0x11101111, 0x11101112, 0x11101210, + 0x11101211, 0x11111010, 0x11111011, 0x11111012, 0x11111110, 0x11111111, 0x11111112, 0x11111210, + 0x11111211, 0x11111212, 0x11121010, 0x11121011, 0x11121110, 0x11121111, 0x11121112, 0x11121210, + 0x11121211, 0x11121212, 0x12101011, 0x12101110, 0x12101111, 0x12101211, 0x12101212, 0x12111010, + 0x12111011, 0x12111110, 0x12111111, 0x12111112, 0x12111210, 0x12111211, 0x12121011, 0x12121110, + 0x12121111, 0x12121112, 0x12121211, 0x10101020, 0x10101021, 0x10101022, 0x10101120, 0x10101122, + 0x10101220, 0x10101221, 0x10111021, 0x10111120, 0x10111121, 0x10111220, 0x10111221, 0x10121020, + 0x10121021, 0x10121022, 0x10121120, 0x10121121, 0x10121122, 0x10121220, 0x10121221, 0x11101021, + 0x11101121, 0x11101122, 0x11101220, 0x11101221, 0x11101222, 0x11111020, 0x11111021, 0x11111022, + 0x11111120, 0x11111121, 0x11111122, 0x11111220, 0x11111221, 0x11111222, 0x11121021, 0x11121120, + 0x11121121, 0x11121221, 0x12101022, 0x12101121, 0x12101122, 0x12101220, 0x12101221, 0x12101222, + 0x12111021, 0x12111121, 0x12111222, 0x12121022, 0x12121121, 0x12121122, 0x12121220, 0x12121221, + 0x10102100, 0x10102101, 0x10102102, 0x10102201, 0x10112000, 0x10112101, 0x10112200, 0x10122001, + 0x10122202, 0x11102101, 0x11102200, 0x11102202, 0x11112001, 0x11112100, 0x11112101, 0x11112102, + 0x11112200, 0x11112201, 0x11122000, 0x11122002, 0x11122100, 0x11122101, 0x12102002, 0x12102201, + 0x12112000, 0x12112002, 0x12112101, 0x12112200, 0x12122001, 0x12122201, 0x10102011, 0x10102012, + 0x10102111, 0x10102212, 0x10112011, 0x10112110, 0x10112111, 0x10112112, 0x10112211, 0x10122111, + 0x11102011, 0x11102110, 0x11102111, 0x11102112, 0x11102211, 0x11112010, 0x11112011, 0x11112012, + 0x11112110, 0x11112111, 0x11112112, 0x11112210, 0x11112211, 0x11112212, 0x11122011, 0x11122110, + 0x11122111, 0x11122112, 0x11122211, 0x12102011, 0x12102111, 0x12102211, 0x12112011, 0x12112110, + 0x12112111, 0x12112112, 0x12112210, 0x12112211, 0x12122111, 0x10102120, 0x10102220, 0x10112121, + 0x10112222, 0x10122020, 0x10122121, 0x10122122, 0x10122221, 0x11102121, 0x11102220, 0x11102221, + 0x11112021, 0x11112121, 0x11112122, 0x11112220, 0x11112221, 0x11122022, 0x11122121, 0x11122220, + 0x11122222, 0x12102021, 0x12102222, 0x12112022, 0x12112121, 0x12112122, 0x12112220, 0x12112222, + 0x12122021, 0x10200101, 0x10210100, 0x10210102, 0x10210201, 0x10220101, 0x11200100, 0x11210000, + 0x11210101, 0x11210102, 0x11210200, 0x11210202, 0x11220001, 0x11220100, 0x11220102, 0x11220201, + 0x12200001, 0x12210102, 0x12220101, 0x10200011, 0x10200110, 0x10200112, 0x10200211, 0x10210012, + 0x10210111, 0x10220011, 0x10220012, 0x10220112, 0x10220211, 0x11200111, 0x11200211, 0x11210011, + 0x11210111, 0x11210112, 0x11210211, 0x11220111, 0x11220112, 0x11220212, 0x12200110, 0x12200212, + 0x12210012, 0x12210111, 0x12220011, 0x12220112, 0x12220211, 0x10210021, 0x10210122, 0x10210221, + 0x11200020, 0x11200021, 0x11200122, 0x11210121, 0x11210122, 0x11210220, 0x11220020, 0x12200121, + 0x12210021, 0x12210122, 0x12220121, 0x10211001, 0x10211002, 0x10211101, 0x10211102, 0x10211202, + 0x10221001, 0x10221102, 0x10221201, 0x11201000, 0x11201002, 0x11201101, 0x11201200, 0x11201202, + 0x11211001, 0x11211100, 0x11211101, 0x11211102, 0x11211201, 0x11211202, 0x11221000, 0x11221002, + 0x11221101, 0x12201100, 0x12201101, 0x12201201, 0x12211000, 0x12211002, 0x12211100, 0x12211101, + 0x12211102, 0x12211200, 0x12211202, 0x12221001, 0x12221100, 0x12221201, 0x10201111, 0x10201210, + 0x10201212, 0x10211011, 0x10211111, 0x10211112, 0x10211211, 0x11201110, 0x11201111, 0x11201112, + 0x11201211, 0x11211010, 0x11211011, 0x11211110, 0x11211111, 0x11211112, 0x11211211, 0x11221011, + 0x11221110, 0x11221111, 0x11221112, 0x11221211, 0x12201112, 0x12201211, 0x12201212, 0x12211011, + 0x12211111, 0x12211112, 0x12211211, 0x12211212, 0x12221012, 0x12221111, 0x12221112, 0x12221210, + 0x10201022, 0x10201221, 0x10211121, 0x10221020, 0x10221122, 0x10221220, 0x10221221, 0x11201020, + 0x11201121, 0x11201220, 0x11201222, 0x11211021, 0x11211120, 0x11211121, 0x11211122, 0x11211220, + 0x11211222, 0x11221020, 0x11221121, 0x11221220, 0x12201020, 0x12201022, 0x12201121, 0x12201222, + 0x12211120, 0x12211122, 0x12211220, 0x12211221, 0x12221020, 0x12221120, 0x12221122, 0x12221222, + 0x10212102, 0x10212201, 0x10222101, 0x11202001, 0x11212002, 0x11212101, 0x11212202, 0x11222001, + 0x11222201, 0x12202101, 0x12212001, 0x12212200, 0x12222102, 0x10202011, 0x10202110, 0x10212010, + 0x10212111, 0x10222011, 0x10222110, 0x10222112, 0x10222211, 0x11202010, 0x11202011, 0x11202111, + 0x11202112, 0x11202210, 0x11212011, 0x11212110, 0x11212111, 0x11212112, 0x11212211, 0x11222010, + 0x11222111, 0x11222212, 0x12202012, 0x12202110, 0x12202212, 0x12212111, 0x12222011, 0x12222110, + 0x12222111, 0x12222211, 0x10212021, 0x10212122, 0x10212220, 0x11202021, 0x11202120, 0x11202221, + 0x11212020, 0x11212121, 0x11212220, 0x11212222, 0x11222120, 0x11222121, 0x11222221, 0x12202122, + 0x12212120, 0x12212220, 0x12212222, 0x12222122, 0x20000000, 0x20000002, 0x20000200, 0x20000202, + 0x20020000, 0x20020002, 0x20020200, 0x20020202, 0x21000101, 0x21010000, 0x21010001, 0x21010100, + 0x21010102, 0x21010201, 0x21020101, 0x22000000, 0x22000002, 0x22000200, 0x22000202, 0x22010101, + 0x22020000, 0x22020002, 0x22020200, 0x22020202, 0x20000111, 0x20010011, 0x20010110, 0x20010112, + 0x20010211, 0x20020111, 0x21000011, 0x21000110, 0x21000211, 0x21010010, 0x21010012, 0x21010111, + 0x21010112, 0x21010210, 0x21010211, 0x21020110, 0x21020112, 0x21020211, 0x22000111, 0x22000211, + 0x22010110, 0x22010112, 0x22010211, 0x22020111, 0x20000020, 0x20000022, 0x20000220, 0x20000222, + 0x20010121, 0x20020020, 0x20020022, 0x20020220, 0x20020222, 0x21010021, 0x21010120, 0x21010221, + 0x21020121, 0x22000020, 0x22000022, 0x22000220, 0x22000222, 0x22010121, 0x22020020, 0x22020022, + 0x22020220, 0x22020222, 0x20011100, 0x20011201, 0x21001001, 0x21001100, 0x21011001, 0x21011101, + 0x21011202, 0x21021001, 0x21021100, 0x21021201, 0x22011100, 0x22011201, 0x20001011, 0x20001211, + 0x20011012, 0x20011111, 0x20011212, 0x20021112, 0x20021211, 0x21001010, 0x21001011, 0x21001111, + 0x21001210, 0x21011011, 0x21011110, 0x21011111, 0x21011112, 0x21011211, 0x21011212, 0x21021111, + 0x21021112, 0x21021210, 0x21021212, 0x22001011, 0x22001110, 0x22001112, 0x22001211, 0x22011010, + 0x22011012, 0x22011111, 0x22011210, 0x22021112, 0x20011021, 0x20011122, 0x20011221, 0x20021121, + 0x21001021, 0x21001120, 0x21001221, 0x21001222, 0x21011020, 0x21011121, 0x21011221, 0x21011222, + 0x21021021, 0x21021122, 0x21021222, 0x22001121, 0x22011021, 0x22011222, 0x22021120, 0x20002000, + 0x20002002, 0x20002200, 0x20002202, 0x20012101, 0x20022000, 0x20022002, 0x20022200, 0x20022202, + 0x21002001, 0x21002101, 0x21012001, 0x21012100, 0x21012201, 0x21022101, 0x21022201, 0x22002000, + 0x22002002, 0x22002200, 0x22002202, 0x22012101, 0x22022000, 0x22022002, 0x22022200, 0x22022202, + 0x20002111, 0x20002112, 0x20012011, 0x20012110, 0x20012112, 0x20022111, 0x21002011, 0x21002110, + 0x21002112, 0x21002211, 0x21012010, 0x21012012, 0x21012111, 0x21012212, 0x21022011, 0x21022110, + 0x22002111, 0x22012112, 0x22012211, 0x22022111, 0x20002020, 0x20002022, 0x20002220, 0x20002222, + 0x20012121, 0x20022020, 0x20022022, 0x20022220, 0x20022222, 0x21002121, 0x21012021, 0x21012120, + 0x21012122, 0x22002020, 0x22002022, 0x22002220, 0x22002222, 0x22012121, 0x22022020, 0x22022022, + 0x22022220, 0x22022222, 0x20100101, 0x20110001, 0x20110102, 0x20110200, 0x20110201, 0x20120101, + 0x21100001, 0x21100102, 0x21100201, 0x21110101, 0x21110200, 0x21110202, 0x21120201, 0x21120202, + 0x22100101, 0x22110001, 0x22110100, 0x22110102, 0x22110201, 0x22120101, 0x20100011, 0x20100110, + 0x20100112, 0x20100211, 0x20110010, 0x20110111, 0x20110210, 0x20110212, 0x20120011, 0x20120110, + 0x20120112, 0x20120211, 0x21100010, 0x21100111, 0x21110010, 0x21110011, 0x21110110, 0x21110111, + 0x21110112, 0x21110211, 0x21120012, 0x21120111, 0x22100110, 0x22100112, 0x22110012, 0x22110111, + 0x22110210, 0x22120011, 0x22120110, 0x22120112, 0x22120211, 0x20100121, 0x20110021, 0x20110120, + 0x20110221, 0x20120121, 0x21100120, 0x21100122, 0x21100221, 0x21110020, 0x21110022, 0x21110121, + 0x21110220, 0x21120122, 0x21120221, 0x22100121, 0x22110120, 0x22110122, 0x22120221, 0x20101001, + 0x20101100, 0x20101102, 0x20111000, 0x20111101, 0x20111200, 0x20121102, 0x21101000, 0x21101202, + 0x21111001, 0x21111100, 0x21111101, 0x21111102, 0x21111200, 0x21111201, 0x21121000, 0x21121001, + 0x21121002, 0x21121101, 0x22101100, 0x22101102, 0x22111002, 0x22111100, 0x22111101, 0x22111200, + 0x22121001, 0x22121201, 0x20101010, 0x20101111, 0x20101210, 0x20101212, 0x20111010, 0x20111011, + 0x20111110, 0x20111111, 0x20111112, 0x20111211, 0x20121011, 0x20121111, 0x20121211, 0x20121212, + 0x21101011, 0x21101110, 0x21101111, 0x21101112, 0x21101211, 0x21111010, 0x21111011, 0x21111012, + 0x21111110, 0x21111111, 0x21111112, 0x21111210, 0x21111211, 0x21111212, 0x21121011, 0x21121110, + 0x21121111, 0x21121112, 0x21121211, 0x22101011, 0x22101111, 0x22101210, 0x22111011, 0x22111012, + 0x22111110, 0x22111111, 0x22111112, 0x22111211, 0x22111212, 0x22121010, 0x22121012, 0x22121111, + 0x22121210, 0x22121212, 0x20101021, 0x20101120, 0x20111020, 0x20111121, 0x20111221, 0x20121020, + 0x20121122, 0x20121221, 0x21101121, 0x21101220, 0x21101221, 0x21111021, 0x21111022, 0x21111121, + 0x21111122, 0x21111221, 0x21121121, 0x21121220, 0x22101022, 0x22101120, 0x22101221, 0x22101222, + 0x22111022, 0x22111120, 0x22111121, 0x22121120, 0x22121122, 0x22121221, 0x20102101, 0x20112102, + 0x20112201, 0x20122101, 0x21102001, 0x21102102, 0x21112000, 0x21112002, 0x21112101, 0x21112102, + 0x21112202, 0x21122100, 0x21122101, 0x22102101, 0x22112001, 0x22112102, 0x22112201, 0x22122101, + 0x20102110, 0x20102112, 0x20102211, 0x20112010, 0x20112012, 0x20112111, 0x20112210, 0x20112212, + 0x20122010, 0x20122011, 0x20122110, 0x20122112, 0x21102010, 0x21102012, 0x21102111, 0x21102210, + 0x21102212, 0x21112011, 0x21112110, 0x21112111, 0x21112112, 0x21112211, 0x21122012, 0x21122111, + 0x21122112, 0x21122212, 0x22102011, 0x22102110, 0x22112010, 0x22112012, 0x22112111, 0x22112212, + 0x22122011, 0x22122112, 0x20102121, 0x20112121, 0x20122121, 0x21102120, 0x21102122, 0x21102221, + 0x21112020, 0x21112121, 0x21112220, 0x21122021, 0x22102121, 0x22112021, 0x22112120, 0x22112121, + 0x22112122, 0x20200000, 0x20200002, 0x20200200, 0x20200202, 0x20210101, 0x20220000, 0x20220002, + 0x20220200, 0x20220202, 0x21200101, 0x21210001, 0x21210100, 0x21210102, 0x21210201, 0x22200000, + 0x22200002, 0x22200200, 0x22200202, 0x22210101, 0x22220000, 0x22220002, 0x22220200, 0x22220202, + 0x20200111, 0x20200211, 0x20210011, 0x20210110, 0x20210112, 0x20210211, 0x20210212, 0x21200112, + 0x21200211, 0x21210011, 0x21210111, 0x21210210, 0x21210212, 0x21220011, 0x21220110, 0x22200111, + 0x22210010, 0x22210012, 0x22210112, 0x22210211, 0x20200022, 0x20200220, 0x20200222, 0x20210020, + 0x20210221, 0x20220022, 0x20220220, 0x20220222, 0x21200121, 0x21210021, 0x21210122, 0x21210221, + 0x21220121, 0x22200020, 0x22200022, 0x22200220, 0x22200222, 0x22210121, 0x22220020, 0x22220022, + 0x22220220, 0x22220222, 0x20211201, 0x20221101, 0x21201001, 0x21201100, 0x21211000, 0x21211100, + 0x21211101, 0x21211200, 0x21211202, 0x21221001, 0x21221101, 0x21221102, 0x21221200, 0x21221201, + 0x22201101, 0x20201112, 0x20201211, 0x20211010, 0x20211012, 0x20211111, 0x20211210, 0x20221112, + 0x20221211, 0x21201012, 0x21201111, 0x21211011, 0x21211110, 0x21211111, 0x21211112, 0x21211211, + 0x21221111, 0x21221212, 0x22201011, 0x22201110, 0x22201111, 0x22201112, 0x22201211, 0x22211012, + 0x22211111, 0x22211210, 0x20201121, 0x20211021, 0x20211122, 0x20211222, 0x20221021, 0x20221121, + 0x21201120, 0x21201122, 0x21201222, 0x21211022, 0x21211121, 0x21211122, 0x21211220, 0x21221020, + 0x21221022, 0x22201122, 0x22211020, 0x22211121, 0x22211122, 0x22211221, 0x22221021, 0x22221120, + 0x22221122, 0x20202000, 0x20202002, 0x20202200, 0x20202202, 0x20222000, 0x20222002, 0x20222200, + 0x20222202, 0x21212001, 0x21212100, 0x21212102, 0x21212201, 0x22202000, 0x22202002, 0x22202200, + 0x22202202, 0x22212101, 0x22222000, 0x22222002, 0x22222200, 0x22222202, 0x20202111, 0x20212110, + 0x20212211, 0x20222011, 0x20222111, 0x21202011, 0x21212010, 0x21212111, 0x21212212, 0x21222011, + 0x21222112, 0x21222211, 0x22212010, 0x22212112, 0x20202020, 0x20202022, 0x20202220, 0x20202222, + 0x20222020, 0x20222022, 0x20222220, 0x20222222, 0x21212021, 0x21212120, 0x21212122, 0x22202020, + 0x22202022, 0x22202220, 0x22202222, 0x22212121, 0x22222020, 0x22222022, 0x22222220, 0x22222222, +GGML_TABLE_END() +#endif + +#endif // GGML_COMMON_IMPL +#endif // GGML_COMMON_IMPL diff --git a/ml/backend/ggml/ggml-cpu-impl.h b/ml/backend/ggml/ggml-cpu-impl.h new file mode 100644 index 000000000..93dfb83a8 --- /dev/null +++ b/ml/backend/ggml/ggml-cpu-impl.h @@ -0,0 +1,640 @@ +/** + * llama.cpp - commit 3f1ae2e32cde00c39b96be6d01c2997c29bae555 - do not edit this file + * + * MIT License + * + * Copyright (c) 2023-2024 The ggml authors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +#pragma once + +// GGML CPU internal header + +#include "ggml.h" +#include "ggml-impl.h" +#include // load `stdlib.h` before other headers to work around MinGW bug: https://sourceforge.net/p/mingw-w64/bugs/192/ +//#include +#include +#include // memcpy +#include // fabsf + + +#ifdef __cplusplus +extern "C" { +#endif + +#if defined(_MSC_VER) + +#define m512bh(p) p +#define m512i(p) p + +#else + +#define m512bh(p) (__m512bh)(p) +#define m512i(p) (__m512i)(p) + +#endif + +/** + * Converts brain16 to float32. + * + * The bfloat16 floating point format has the following structure: + * + * ┌sign + * │ + * │ ┌exponent + * │ │ + * │ │ ┌mantissa + * │ │ │ + * │┌──┴───┐┌─┴───┐ + * 0b0000000000000000 brain16 + * + * Since bf16 has the same number of exponent bits as a 32bit float, + * encoding and decoding numbers becomes relatively straightforward. + * + * ┌sign + * │ + * │ ┌exponent + * │ │ + * │ │ ┌mantissa + * │ │ │ + * │┌──┴───┐┌─┴───────────────────┐ + * 0b00000000000000000000000000000000 IEEE binary32 + * + * For comparison, the standard fp16 format has fewer exponent bits. + * + * ┌sign + * │ + * │ ┌exponent + * │ │ + * │ │ ┌mantissa + * │ │ │ + * │┌─┴─┐┌─┴──────┐ + * 0b0000000000000000 IEEE binary16 + * + * @see IEEE 754-2008 + */ +static inline float ggml_compute_bf16_to_fp32(ggml_bf16_t h) { + union { + float f; + uint32_t i; + } u; + u.i = (uint32_t)h.bits << 16; + return u.f; +} + +/** + * Converts float32 to brain16. + * + * This is binary identical with Google Brain float conversion. + * Floats shall round to nearest even, and NANs shall be quiet. + * Subnormals aren't flushed to zero, except perhaps when used. + * This code should vectorize nicely if using modern compilers. + */ +static inline ggml_bf16_t ggml_compute_fp32_to_bf16(float s) { + ggml_bf16_t h; + union { + float f; + uint32_t i; + } u; + u.f = s; + if ((u.i & 0x7fffffff) > 0x7f800000) { /* nan */ + h.bits = (u.i >> 16) | 64; /* force to quiet */ + return h; + } + h.bits = (u.i + (0x7fff + ((u.i >> 16) & 1))) >> 16; + return h; +} + +#define GGML_FP32_TO_BF16(x) ggml_compute_fp32_to_bf16(x) +#define GGML_BF16_TO_FP32(x) ggml_compute_bf16_to_fp32(x) + +// __FMA__ and __F16C__ are not defined in MSVC, however they are implied with AVX2/AVX512 +#if defined(_MSC_VER) && (defined(__AVX2__) || defined(__AVX512F__)) +#ifndef __FMA__ +#define __FMA__ +#endif +#ifndef __F16C__ +#define __F16C__ +#endif +#endif + +// __SSE3__ and __SSSE3__ are not defined in MSVC, but SSE3/SSSE3 are present when AVX/AVX2/AVX512 are available +#if defined(_MSC_VER) && (defined(__AVX__) || defined(__AVX2__) || defined(__AVX512F__)) +#ifndef __SSE3__ +#define __SSE3__ +#endif +#ifndef __SSSE3__ +#define __SSSE3__ +#endif +#endif + +#if defined(__ARM_FEATURE_SVE) +#include +#include +#endif + +// 16-bit float +// on Arm, we use __fp16 +// on x86, we use uint16_t +#if defined(__ARM_NEON) + +// if YCM cannot find , make a symbolic link to it, for example: +// +// $ ln -sfn /Library/Developer/CommandLineTools/usr/lib/clang/13.1.6/include/arm_neon.h ./src/ +// +#include + +#ifdef _MSC_VER + +typedef uint16_t ggml_fp16_internal_t; + +#define ggml_vld1q_u32(w,x,y,z) { ((w) + ((uint64_t)(x) << 32)), ((y) + ((uint64_t)(z) << 32)) } + +#else + +typedef __fp16 ggml_fp16_internal_t; + +#define ggml_vld1q_u32(w,x,y,z) { (w), (x), (y), (z) } + +#endif // _MSC_VER + +#if !defined(__aarch64__) + +// 32-bit ARM compatibility + +// vaddlvq_s16 +// vpaddq_s16 +// vpaddq_s32 +// vaddvq_s32 +// vaddvq_f32 +// vmaxvq_f32 +// vcvtnq_s32_f32 +// vzip1_u8 +// vzip2_u8 + +inline static int32_t vaddlvq_s16(int16x8_t v) { + int32x4_t v0 = vreinterpretq_s32_s64(vpaddlq_s32(vpaddlq_s16(v))); + return vgetq_lane_s32(v0, 0) + vgetq_lane_s32(v0, 2); +} + +inline static int16x8_t vpaddq_s16(int16x8_t a, int16x8_t b) { + int16x4_t a0 = vpadd_s16(vget_low_s16(a), vget_high_s16(a)); + int16x4_t b0 = vpadd_s16(vget_low_s16(b), vget_high_s16(b)); + return vcombine_s16(a0, b0); +} + +inline static int32x4_t vpaddq_s32(int32x4_t a, int32x4_t b) { + int32x2_t a0 = vpadd_s32(vget_low_s32(a), vget_high_s32(a)); + int32x2_t b0 = vpadd_s32(vget_low_s32(b), vget_high_s32(b)); + return vcombine_s32(a0, b0); +} + +inline static int32_t vaddvq_s32(int32x4_t v) { + return vgetq_lane_s32(v, 0) + vgetq_lane_s32(v, 1) + vgetq_lane_s32(v, 2) + vgetq_lane_s32(v, 3); +} + +inline static float vaddvq_f32(float32x4_t v) { + return vgetq_lane_f32(v, 0) + vgetq_lane_f32(v, 1) + vgetq_lane_f32(v, 2) + vgetq_lane_f32(v, 3); +} + +inline static float vmaxvq_f32(float32x4_t v) { + return + MAX(MAX(vgetq_lane_f32(v, 0), vgetq_lane_f32(v, 1)), + MAX(vgetq_lane_f32(v, 2), vgetq_lane_f32(v, 3))); +} + +inline static int32x4_t vcvtnq_s32_f32(float32x4_t v) { + int32x4_t res; + + res[0] = roundf(vgetq_lane_f32(v, 0)); + res[1] = roundf(vgetq_lane_f32(v, 1)); + res[2] = roundf(vgetq_lane_f32(v, 2)); + res[3] = roundf(vgetq_lane_f32(v, 3)); + + return res; +} + +inline static uint8x8_t vzip1_u8(uint8x8_t a, uint8x8_t b) { + uint8x8_t res; + + res[0] = a[0]; res[1] = b[0]; + res[2] = a[1]; res[3] = b[1]; + res[4] = a[2]; res[5] = b[2]; + res[6] = a[3]; res[7] = b[3]; + + return res; +} + +inline static uint8x8_t vzip2_u8(uint8x8_t a, uint8x8_t b) { + uint8x8_t res; + + res[0] = a[4]; res[1] = b[4]; + res[2] = a[5]; res[3] = b[5]; + res[4] = a[6]; res[5] = b[6]; + res[6] = a[7]; res[7] = b[7]; + + return res; +} + +// vld1q_s16_x2 +// vld1q_u8_x2 +// vld1q_u8_x4 +// vld1q_s8_x2 +// vld1q_s8_x4 +// TODO: double-check these work correctly + +typedef struct ggml_int16x8x2_t { + int16x8_t val[2]; +} ggml_int16x8x2_t; + +inline static ggml_int16x8x2_t ggml_vld1q_s16_x2(const int16_t * ptr) { + ggml_int16x8x2_t res; + + res.val[0] = vld1q_s16(ptr + 0); + res.val[1] = vld1q_s16(ptr + 8); + + return res; +} + +typedef struct ggml_uint8x16x2_t { + uint8x16_t val[2]; +} ggml_uint8x16x2_t; + +inline static ggml_uint8x16x2_t ggml_vld1q_u8_x2(const uint8_t * ptr) { + ggml_uint8x16x2_t res; + + res.val[0] = vld1q_u8(ptr + 0); + res.val[1] = vld1q_u8(ptr + 16); + + return res; +} + +typedef struct ggml_uint8x16x4_t { + uint8x16_t val[4]; +} ggml_uint8x16x4_t; + +inline static ggml_uint8x16x4_t ggml_vld1q_u8_x4(const uint8_t * ptr) { + ggml_uint8x16x4_t res; + + res.val[0] = vld1q_u8(ptr + 0); + res.val[1] = vld1q_u8(ptr + 16); + res.val[2] = vld1q_u8(ptr + 32); + res.val[3] = vld1q_u8(ptr + 48); + + return res; +} + +typedef struct ggml_int8x16x2_t { + int8x16_t val[2]; +} ggml_int8x16x2_t; + +inline static ggml_int8x16x2_t ggml_vld1q_s8_x2(const int8_t * ptr) { + ggml_int8x16x2_t res; + + res.val[0] = vld1q_s8(ptr + 0); + res.val[1] = vld1q_s8(ptr + 16); + + return res; +} + +typedef struct ggml_int8x16x4_t { + int8x16_t val[4]; +} ggml_int8x16x4_t; + +inline static ggml_int8x16x4_t ggml_vld1q_s8_x4(const int8_t * ptr) { + ggml_int8x16x4_t res; + + res.val[0] = vld1q_s8(ptr + 0); + res.val[1] = vld1q_s8(ptr + 16); + res.val[2] = vld1q_s8(ptr + 32); + res.val[3] = vld1q_s8(ptr + 48); + + return res; +} + +// NOTE: not tested +inline static int8x16_t ggml_vqtbl1q_s8(int8x16_t a, uint8x16_t b) { + int8x16_t res; + + res[ 0] = a[b[ 0]]; + res[ 1] = a[b[ 1]]; + res[ 2] = a[b[ 2]]; + res[ 3] = a[b[ 3]]; + res[ 4] = a[b[ 4]]; + res[ 5] = a[b[ 5]]; + res[ 6] = a[b[ 6]]; + res[ 7] = a[b[ 7]]; + res[ 8] = a[b[ 8]]; + res[ 9] = a[b[ 9]]; + res[10] = a[b[10]]; + res[11] = a[b[11]]; + res[12] = a[b[12]]; + res[13] = a[b[13]]; + res[14] = a[b[14]]; + res[15] = a[b[15]]; + + return res; +} + +// NOTE: not tested +inline static uint8x16_t ggml_vqtbl1q_u8(uint8x16_t a, uint8x16_t b) { + uint8x16_t res; + + res[ 0] = a[b[ 0]]; + res[ 1] = a[b[ 1]]; + res[ 2] = a[b[ 2]]; + res[ 3] = a[b[ 3]]; + res[ 4] = a[b[ 4]]; + res[ 5] = a[b[ 5]]; + res[ 6] = a[b[ 6]]; + res[ 7] = a[b[ 7]]; + res[ 8] = a[b[ 8]]; + res[ 9] = a[b[ 9]]; + res[10] = a[b[10]]; + res[11] = a[b[11]]; + res[12] = a[b[12]]; + res[13] = a[b[13]]; + res[14] = a[b[14]]; + res[15] = a[b[15]]; + + return res; +} + +#else + +#define ggml_int16x8x2_t int16x8x2_t +#define ggml_uint8x16x2_t uint8x16x2_t +#define ggml_uint8x16x4_t uint8x16x4_t +#define ggml_int8x16x2_t int8x16x2_t +#define ggml_int8x16x4_t int8x16x4_t + +#define ggml_vld1q_s16_x2 vld1q_s16_x2 +#define ggml_vld1q_u8_x2 vld1q_u8_x2 +#define ggml_vld1q_u8_x4 vld1q_u8_x4 +#define ggml_vld1q_s8_x2 vld1q_s8_x2 +#define ggml_vld1q_s8_x4 vld1q_s8_x4 +#define ggml_vqtbl1q_s8 vqtbl1q_s8 +#define ggml_vqtbl1q_u8 vqtbl1q_u8 + +#endif // !defined(__aarch64__) + +#if !defined(__ARM_FEATURE_DOTPROD) + +inline static int32x4_t ggml_vdotq_s32(int32x4_t acc, int8x16_t a, int8x16_t b) { + const int16x8_t p0 = vmull_s8(vget_low_s8 (a), vget_low_s8 (b)); + const int16x8_t p1 = vmull_s8(vget_high_s8(a), vget_high_s8(b)); + + return vaddq_s32(acc, vaddq_s32(vpaddlq_s16(p0), vpaddlq_s16(p1))); +} + +#else + +#define ggml_vdotq_s32(a, b, c) vdotq_s32(a, b, c) + +#endif // !defined(__ARM_FEATURE_DOTPROD) + +#endif // defined(__ARM_NEON) + +#if defined(__ARM_NEON) && !defined(_MSC_VER) + +#define GGML_COMPUTE_FP16_TO_FP32(x) ggml_compute_fp16_to_fp32(x) +#define GGML_COMPUTE_FP32_TO_FP16(x) ggml_compute_fp32_to_fp16(x) + +#define GGML_FP16_TO_FP32(x) ggml_compute_fp16_to_fp32(x) + +static inline float ggml_compute_fp16_to_fp32(ggml_fp16_t h) { + ggml_fp16_internal_t tmp; + memcpy(&tmp, &h, sizeof(ggml_fp16_t)); + return (float)tmp; +} + +static inline ggml_fp16_t ggml_compute_fp32_to_fp16(float f) { + ggml_fp16_t res; + ggml_fp16_internal_t tmp = f; + memcpy(&res, &tmp, sizeof(ggml_fp16_t)); + return res; +} + +#else + +#ifdef __wasm_simd128__ +#include +#else +#ifdef __POWER9_VECTOR__ +#include +#undef bool +#define bool _Bool +#else +#if defined(_MSC_VER) || defined(__MINGW32__) +#include +#else +#if defined(__AVX__) || defined(__AVX2__) || defined(__AVX512F__) || defined(__SSSE3__) || defined(__SSE3__) || defined(__SSE__) +#if !defined(__riscv) +#include +#endif +#endif +#endif +#endif +#endif + +#ifdef __riscv_v_intrinsic +#include +#endif + +#if defined(__loongarch64) +#if defined(__loongarch_asx) +#include +#endif +#if defined(__loongarch_sx) +#include +#endif +#endif + +#if defined(__loongarch_asx) + +typedef union { + int32_t i; + float f; +} ft_union; + +/* float type data load instructions */ +static __m128 __lsx_vreplfr2vr_s(float val) { + ft_union fi_tmpval = {.f = val}; + return (__m128)__lsx_vreplgr2vr_w(fi_tmpval.i); +} + +static __m256 __lasx_xvreplfr2vr_s(float val) { + ft_union fi_tmpval = {.f = val}; + return (__m256)__lasx_xvreplgr2vr_w(fi_tmpval.i); +} +#endif + +#ifdef __F16C__ + +#ifdef _MSC_VER +#define GGML_COMPUTE_FP16_TO_FP32(x) _mm_cvtss_f32(_mm_cvtph_ps(_mm_cvtsi32_si128(x))) +#define GGML_COMPUTE_FP32_TO_FP16(x) _mm_extract_epi16(_mm_cvtps_ph(_mm_set_ss(x), 0), 0) +#else +#define GGML_COMPUTE_FP16_TO_FP32(x) _cvtsh_ss(x) +#define GGML_COMPUTE_FP32_TO_FP16(x) _cvtss_sh(x, 0) +#endif + +#elif defined(__POWER9_VECTOR__) + +#define GGML_COMPUTE_FP16_TO_FP32(x) ggml_compute_fp16_to_fp32(x) +#define GGML_COMPUTE_FP32_TO_FP16(x) ggml_compute_fp32_to_fp16(x) +/* the inline asm below is about 12% faster than the lookup method */ +#define GGML_FP16_TO_FP32(x) GGML_COMPUTE_FP16_TO_FP32(x) +#define GGML_FP32_TO_FP16(x) GGML_COMPUTE_FP32_TO_FP16(x) + +static inline float ggml_compute_fp16_to_fp32(ggml_fp16_t h) { + register float f; + register double d; + __asm__( + "mtfprd %0,%2\n" + "xscvhpdp %0,%0\n" + "frsp %1,%0\n" : + /* temp */ "=d"(d), + /* out */ "=f"(f): + /* in */ "r"(h)); + return f; +} + +static inline ggml_fp16_t ggml_compute_fp32_to_fp16(float f) { + register double d; + register ggml_fp16_t r; + __asm__( /* xscvdphp can work on double or single precision */ + "xscvdphp %0,%2\n" + "mffprd %1,%0\n" : + /* temp */ "=d"(d), + /* out */ "=r"(r): + /* in */ "f"(f)); + return r; +} + +#else + +// FP16 <-> FP32 +// ref: https://github.com/Maratyszcza/FP16 + +static inline float fp32_from_bits(uint32_t w) { + union { + uint32_t as_bits; + float as_value; + } fp32; + fp32.as_bits = w; + return fp32.as_value; +} + +static inline uint32_t fp32_to_bits(float f) { + union { + float as_value; + uint32_t as_bits; + } fp32; + fp32.as_value = f; + return fp32.as_bits; +} + +static inline float ggml_compute_fp16_to_fp32(ggml_fp16_t h) { + const uint32_t w = (uint32_t) h << 16; + const uint32_t sign = w & UINT32_C(0x80000000); + const uint32_t two_w = w + w; + + const uint32_t exp_offset = UINT32_C(0xE0) << 23; +#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) || defined(__GNUC__) && !defined(__STRICT_ANSI__) + const float exp_scale = 0x1.0p-112f; +#else + const float exp_scale = fp32_from_bits(UINT32_C(0x7800000)); +#endif + const float normalized_value = fp32_from_bits((two_w >> 4) + exp_offset) * exp_scale; + + const uint32_t magic_mask = UINT32_C(126) << 23; + const float magic_bias = 0.5f; + const float denormalized_value = fp32_from_bits((two_w >> 17) | magic_mask) - magic_bias; + + const uint32_t denormalized_cutoff = UINT32_C(1) << 27; + const uint32_t result = sign | + (two_w < denormalized_cutoff ? fp32_to_bits(denormalized_value) : fp32_to_bits(normalized_value)); + return fp32_from_bits(result); +} + +static inline ggml_fp16_t ggml_compute_fp32_to_fp16(float f) { +#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) || defined(__GNUC__) && !defined(__STRICT_ANSI__) + const float scale_to_inf = 0x1.0p+112f; + const float scale_to_zero = 0x1.0p-110f; +#else + const float scale_to_inf = fp32_from_bits(UINT32_C(0x77800000)); + const float scale_to_zero = fp32_from_bits(UINT32_C(0x08800000)); +#endif + float base = (fabsf(f) * scale_to_inf) * scale_to_zero; + + const uint32_t w = fp32_to_bits(f); + const uint32_t shl1_w = w + w; + const uint32_t sign = w & UINT32_C(0x80000000); + uint32_t bias = shl1_w & UINT32_C(0xFF000000); + if (bias < UINT32_C(0x71000000)) { + bias = UINT32_C(0x71000000); + } + + base = fp32_from_bits((bias >> 1) + UINT32_C(0x07800000)) + base; + const uint32_t bits = fp32_to_bits(base); + const uint32_t exp_bits = (bits >> 13) & UINT32_C(0x00007C00); + const uint32_t mantissa_bits = bits & UINT32_C(0x00000FFF); + const uint32_t nonsign = exp_bits + mantissa_bits; + return (sign >> 16) | (shl1_w > UINT32_C(0xFF000000) ? UINT16_C(0x7E00) : nonsign); +} + +#define GGML_COMPUTE_FP16_TO_FP32(x) ggml_compute_fp16_to_fp32(x) +#define GGML_COMPUTE_FP32_TO_FP16(x) ggml_compute_fp32_to_fp16(x) + +#endif // __F16C__ + +#endif // defined(__ARM_NEON) && (!defined(__MSC_VER) + +#ifdef __ARM_FEATURE_SVE +#include +#endif // __ARM_FEATURE_SVE + +// precomputed f32 table for f16 (256 KB) +// defined in ggml.c, initialized in ggml_init() +extern float ggml_table_f32_f16[1 << 16]; + +// On ARM NEON, it's quicker to directly convert x -> x instead of calling into ggml_lookup_fp16_to_fp32, +// so we define GGML_FP16_TO_FP32 and GGML_FP32_TO_FP16 elsewhere for NEON. +// This is also true for POWER9. +#if !defined(GGML_FP16_TO_FP32) +inline static float ggml_lookup_fp16_to_fp32(ggml_fp16_t f) { + uint16_t s; + memcpy(&s, &f, sizeof(uint16_t)); + return ggml_table_f32_f16[s]; +} + +#define GGML_FP16_TO_FP32(x) ggml_lookup_fp16_to_fp32(x) +#endif + +#if !defined(GGML_FP32_TO_FP16) +#define GGML_FP32_TO_FP16(x) GGML_COMPUTE_FP32_TO_FP16(x) +#endif + +#ifdef __cplusplus +} +#endif diff --git a/ml/backend/ggml/ggml-cuda.h b/ml/backend/ggml/ggml-cuda.h new file mode 100644 index 000000000..023c62b82 --- /dev/null +++ b/ml/backend/ggml/ggml-cuda.h @@ -0,0 +1,75 @@ +/** + * llama.cpp - commit 3f1ae2e32cde00c39b96be6d01c2997c29bae555 - do not edit this file + * + * MIT License + * + * Copyright (c) 2023-2024 The ggml authors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +#pragma once + +#include "ggml.h" +#include "ggml-backend.h" + +#ifdef GGML_USE_HIPBLAS +#define GGML_CUDA_NAME "ROCm" +#define GGML_CUBLAS_NAME "hipBLAS" +#elif defined(GGML_USE_MUSA) +#define GGML_CUDA_NAME "MUSA" +#define GGML_CUBLAS_NAME "muBLAS" +#else +#define GGML_CUDA_NAME "CUDA" +#define GGML_CUBLAS_NAME "cuBLAS" +#endif + +#ifdef __cplusplus +extern "C" { +#endif + +#define GGML_CUDA_MAX_DEVICES 16 + +// backend API +GGML_API GGML_CALL ggml_backend_t ggml_backend_cuda_init(int device); + +GGML_API GGML_CALL bool ggml_backend_is_cuda(ggml_backend_t backend); + +// device buffer +GGML_API GGML_CALL ggml_backend_buffer_type_t ggml_backend_cuda_buffer_type(int device); + +// split tensor buffer that splits matrices by rows across multiple devices +GGML_API GGML_CALL ggml_backend_buffer_type_t ggml_backend_cuda_split_buffer_type(const float * tensor_split); + +// pinned host buffer for use with the CPU backend for faster copies between CPU and GPU +GGML_API GGML_CALL ggml_backend_buffer_type_t ggml_backend_cuda_host_buffer_type(void); + +GGML_API GGML_CALL int ggml_backend_cuda_reg_devices(); + +GGML_API GGML_CALL int ggml_backend_cuda_get_device_count(void); +GGML_API GGML_CALL void ggml_backend_cuda_get_device_description(int device, char * description, size_t description_size); +GGML_API GGML_CALL void ggml_backend_cuda_get_device_memory(int device, size_t * free, size_t * total); + +GGML_API GGML_CALL bool ggml_backend_cuda_register_host_buffer(void * buffer, size_t size); +GGML_API GGML_CALL void ggml_backend_cuda_unregister_host_buffer(void * buffer); + +GGML_API void ggml_backend_cuda_log_set_callback(ggml_log_callback log_callback, void * user_data); +#ifdef __cplusplus +} +#endif diff --git a/ml/backend/ggml/ggml-debug.c b/ml/backend/ggml/ggml-debug.c new file mode 100644 index 000000000..5d5054659 --- /dev/null +++ b/ml/backend/ggml/ggml-debug.c @@ -0,0 +1,110 @@ +#include + +#include "ggml-debug.h" + +static int mul(int64_t *dims, int ndims) { + int result = 1; + for (int i = 0; i < ndims; i++) { + result *= dims[i]; + } + + return result; +} + +static void repeat(char c, int n) { + for (int i = 0; i < n; i++) { + fprintf(stderr, "%c", c); + } +} + +static void print_tensor(const void *tensor, void (*cb)(const void *, int), + int shape, + int64_t *dims, int ndims, int stride, + int nitems, int pad) { + fprintf(stderr, "["); + for (int i = 0; i < dims[0]; i++) { + if (i >= nitems && i < dims[0] - nitems) { + fprintf(stderr, "... (%lld more), ", dims[0] - 2 * nitems); + int skip = dims[0] - 2 * nitems; + if (ndims > 1) { + stride += mul(dims + 1, ndims - 1) * skip; + repeat('\n', ndims - 1); + repeat(' ', shape - ndims + 1 + pad); + } + i += skip - 1; + } else if (ndims > 1) { + print_tensor(tensor, cb, shape, dims + 1, ndims - 1, stride, + nitems, pad); + stride += mul(dims + 1, ndims - 1); + if (i < dims[0] - 1) { + fprintf(stderr, ", "); + repeat('\n', ndims - 1); + repeat(' ', shape - ndims + 1 + pad); + } + } else { + cb(tensor, stride + i); + if (i < dims[0] - 1) { + fprintf(stderr, ", "); + } + } + } + fprintf(stderr, "]"); +} + +static void print_tensor_f16(const void *tensor, int i) { + fprintf(stderr, "%f", ggml_fp16_to_fp32(((const ggml_fp16_t *)tensor)[i])); +} + +static void print_tensor_f32(const void *tensor, int i) { + fprintf(stderr, "%f", ((const float *)tensor)[i]); +} + +static void print_tensor_i32(const void *tensor, int i) { + fprintf(stderr, "%d", ((const int32_t *)tensor)[i]); +} + +static void ggml_debug_tensor(const struct ggml_tensor *tensor, bool verbose, const char *prefix, int indent) { + fprintf(stderr, "%s%s %s (%s): [%lld %lld %lld %lld]\n", prefix, tensor->name, + ggml_op_name(tensor->op), ggml_type_name(tensor->type), tensor->ne[0], + tensor->ne[1], tensor->ne[2], tensor->ne[3]); + + if (!verbose) { + return; + } + + for (int i = 0; i < indent; i++) { + fprintf(stderr, " "); + } + + switch (tensor->type) { + case GGML_TYPE_F16: + print_tensor(ggml_get_data(tensor), print_tensor_f16, ggml_n_dims(tensor), + (int64_t *)tensor->ne, ggml_n_dims(tensor), 0, 3, indent); + break; + case GGML_TYPE_F32: + print_tensor(ggml_get_data(tensor), print_tensor_f32, ggml_n_dims(tensor), + (int64_t *)tensor->ne, ggml_n_dims(tensor), 0, 3, indent); + break; + case GGML_TYPE_I32: + print_tensor(ggml_get_data(tensor), print_tensor_i32, ggml_n_dims(tensor), + (int64_t *)tensor->ne, ggml_n_dims(tensor), 0, 3, indent); + break; + default: + fprintf(stderr, "\n"); + return; + } + + fprintf(stderr, "\n"); +} + +void ggml_debug(const struct ggml_tensor *tensor, bool verbose) { + ggml_debug_tensor(tensor, verbose, ">>> ", 4); + + if (tensor->src[0] != NULL) { + ggml_debug_tensor(tensor->src[0], verbose, " ?? ", 4); + } + + if (tensor->src[1] != NULL) { + ggml_debug_tensor(tensor->src[1], verbose, " ?? ", 4); + } +} diff --git a/ml/backend/ggml/ggml-debug.h b/ml/backend/ggml/ggml-debug.h new file mode 100644 index 000000000..960ca7df9 --- /dev/null +++ b/ml/backend/ggml/ggml-debug.h @@ -0,0 +1,3 @@ +#include "ggml.h" + +void ggml_debug(const struct ggml_tensor *tensor, bool verbose); diff --git a/ml/backend/ggml/ggml-impl.h b/ml/backend/ggml/ggml-impl.h new file mode 100644 index 000000000..8cc8c3bde --- /dev/null +++ b/ml/backend/ggml/ggml-impl.h @@ -0,0 +1,212 @@ +/** + * llama.cpp - commit 3f1ae2e32cde00c39b96be6d01c2997c29bae555 - do not edit this file + * + * MIT License + * + * Copyright (c) 2023-2024 The ggml authors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +#pragma once + +// GGML internal header + +#include "ggml.h" + +#include +#include // load `stdlib.h` before other headers to work around MinGW bug: https://sourceforge.net/p/mingw-w64/bugs/192/ +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + +#undef MIN +#undef MAX + +#define MIN(a, b) ((a) < (b) ? (a) : (b)) +#define MAX(a, b) ((a) > (b) ? (a) : (b)) + +// static_assert should be a #define, but if it's not, +// fall back to the _Static_assert C11 keyword. +// if C99 - static_assert is noop +// ref: https://stackoverflow.com/a/53923785/4039976 +#ifndef __cplusplus +#ifndef static_assert +#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201100L) +#define static_assert(cond, msg) _Static_assert(cond, msg) +#else +#define static_assert(cond, msg) struct global_scope_noop_trick +#endif +#endif +#endif + +// bitset + +typedef uint32_t ggml_bitset_t; + +static_assert(sizeof(ggml_bitset_t) == 4, "bitset_t constants must be updated"); +#define BITSET_SHR 5 // log2(sizeof(ggml_bitset_t)*8) +#define BITSET_MASK (sizeof(ggml_bitset_t)*8 - 1) + +static size_t ggml_bitset_size(size_t n) { + return (n + BITSET_MASK) >> BITSET_SHR; +} + +static inline bool ggml_bitset_get(const ggml_bitset_t * bitset, size_t i) { + return !!(bitset[i >> BITSET_SHR] & (1u << (i & BITSET_MASK))); +} + +static inline void ggml_bitset_set(ggml_bitset_t * bitset, size_t i) { + bitset[i >> BITSET_SHR] |= (1u << (i & BITSET_MASK)); +} + +static inline void ggml_bitset_clear(ggml_bitset_t * bitset, size_t i) { + bitset[i >> BITSET_SHR] &= ~(1u << (i & BITSET_MASK)); +} + +// hash set + +#define GGML_HASHSET_FULL ((size_t)-1) +#define GGML_HASHSET_ALREADY_EXISTS ((size_t)-2) + +struct ggml_hash_set { + size_t size; + ggml_bitset_t * used; // whether or not the keys are in use i.e. set + struct ggml_tensor ** keys; // actual tensors in the set, keys[i] is only defined if ggml_bitset_get(used, i) +}; + +struct ggml_hash_set ggml_hash_set_new(size_t size); +void ggml_hash_set_free(struct ggml_hash_set * hash_set); + +// returns the minimum size for a hash set that can hold min_sz elements +size_t ggml_hash_size(size_t min_sz); + +// remove all elements from the hash set +void ggml_hash_set_reset(struct ggml_hash_set * hash_set); + +// returns true if key is in the hash set +static bool ggml_hash_contains(const struct ggml_hash_set * hash_set, struct ggml_tensor * key); + +// returns GGML_HASHSET_FULL if table is full, otherwise the current index of the key or where it should be inserted +static size_t ggml_hash_find(const struct ggml_hash_set * hash_set, struct ggml_tensor * key); + +// returns GGML_HASHSET_ALREADY_EXISTS if key already exists, index otherwise, asserts if table is full +static size_t ggml_hash_insert(struct ggml_hash_set * hash_set, struct ggml_tensor * key); + +// return index, asserts if table is full +static size_t ggml_hash_find_or_insert(struct ggml_hash_set * hash_set, struct ggml_tensor * key); + +// hash function for ggml_tensor +static inline size_t ggml_hash(const struct ggml_tensor * p) { + // the last 4 bits are always zero due to alignment + return (size_t)(uintptr_t)p >> 4; +} + +static size_t ggml_hash_find(const struct ggml_hash_set * hash_set, struct ggml_tensor * key) { + size_t h = ggml_hash(key) % hash_set->size; + + // linear probing + size_t i = h; + while (ggml_bitset_get(hash_set->used, i) && hash_set->keys[i] != key) { + i = (i + 1) % hash_set->size; + if (i == h) { + // visited all hash table entries -> not found + return GGML_HASHSET_FULL; + } + } + return i; +} + +static bool ggml_hash_contains(const struct ggml_hash_set * hash_set, struct ggml_tensor * key) { + size_t i = ggml_hash_find(hash_set, key); + return i != GGML_HASHSET_FULL && ggml_bitset_get(hash_set->used, i); +} + +static size_t ggml_hash_insert(struct ggml_hash_set * hash_set, struct ggml_tensor * key) { + size_t h = ggml_hash(key) % hash_set->size; + + // linear probing + size_t i = h; + do { + if (!ggml_bitset_get(hash_set->used, i)) { + ggml_bitset_set(hash_set->used, i); + hash_set->keys[i] = key; + return i; + } + if (hash_set->keys[i] == key) { + return GGML_HASHSET_ALREADY_EXISTS; + } + i = (i + 1) % hash_set->size; + } while (i != h); + + // visited all hash table entries -> not found + GGML_ABORT("fatal error"); +} + +static size_t ggml_hash_find_or_insert(struct ggml_hash_set * hash_set, struct ggml_tensor * key) { + size_t h = ggml_hash(key) % hash_set->size; + + // linear probing + size_t i = h; + do { + if (!ggml_bitset_get(hash_set->used, i)) { + ggml_bitset_set(hash_set->used, i); + hash_set->keys[i] = key; + return i; + } + if (hash_set->keys[i] == key) { + return i; + } + i = (i + 1) % hash_set->size; + } while (i != h); + + // visited all hash table entries -> not found + GGML_ABORT("fatal error"); +} + +// computation graph + +enum ggml_cgraph_eval_order { + GGML_CGRAPH_EVAL_ORDER_LEFT_TO_RIGHT = 0, + GGML_CGRAPH_EVAL_ORDER_RIGHT_TO_LEFT, + GGML_CGRAPH_EVAL_ORDER_COUNT +}; + +struct ggml_cgraph { + int size; + int n_nodes; + int n_leafs; + + struct ggml_tensor ** nodes; + struct ggml_tensor ** grads; + struct ggml_tensor ** leafs; + + struct ggml_hash_set visited_hash_set; + + enum ggml_cgraph_eval_order order; +}; + +struct ggml_cgraph ggml_graph_view(struct ggml_cgraph * cgraph, int i0, int i1); + +#ifdef __cplusplus +} +#endif diff --git a/ml/backend/ggml/ggml-metal-embed.metal b/ml/backend/ggml/ggml-metal-embed.metal new file mode 100644 index 000000000..acb91ca1d --- /dev/null +++ b/ml/backend/ggml/ggml-metal-embed.metal @@ -0,0 +1,8325 @@ +// Code generated Tue Oct 22 12:48:58 PDT 2024. DO NOT EDIT. + +/** + * llama.cpp - commit 3f1ae2e32cde00c39b96be6d01c2997c29bae555 - do not edit this file + * + * MIT License + * + * Copyright (c) 2023-2024 The ggml authors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +#define GGML_COMMON_DECL_METAL +#define GGML_COMMON_IMPL_METAL +/** + * llama.cpp - commit 3f1ae2e32cde00c39b96be6d01c2997c29bae555 - do not edit this file + * + * MIT License + * + * Copyright (c) 2023-2024 The ggml authors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +#ifndef GGML_COMMON_DECL + +#if defined(GGML_COMMON_DECL_C) +#include + +typedef uint16_t ggml_half; +typedef uint32_t ggml_half2; + +#define GGML_COMMON_AGGR + +#define GGML_COMMON_DECL +#elif defined(GGML_COMMON_DECL_METAL) +#include + +typedef half ggml_half; +typedef half2 ggml_half2; + +#define GGML_COMMON_AGGR + +#define GGML_COMMON_DECL +#elif defined(GGML_COMMON_DECL_CUDA) +#if defined(GGML_COMMON_DECL_MUSA) +#include +#else +#include +#endif +#include + +typedef half ggml_half; +typedef half2 ggml_half2; + +#define GGML_COMMON_AGGR data + +#define GGML_COMMON_DECL +#elif defined(GGML_COMMON_DECL_HIP) +#include +#include + +typedef half ggml_half; +typedef half2 ggml_half2; + +#define GGML_COMMON_AGGR data + +#define GGML_COMMON_DECL +#elif defined(GGML_COMMON_DECL_SYCL) +#include +#include + +typedef sycl::half ggml_half; +typedef sycl::half2 ggml_half2; + +#define GGML_COMMON_AGGR data + +#define GGML_COMMON_DECL +#endif + +#if defined(GGML_COMMON_DECL) + +#ifndef __cplusplus +#ifndef static_assert +#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201100L) +#define static_assert(cond, msg) _Static_assert(cond, msg) +#else +#define static_assert(cond, msg) struct global_scope_noop_trick +#endif +#endif +#endif // __cplusplus + +// QK = number of values after dequantization +// QK_K = super-block size + +#define QK_K 256 +#define K_SCALE_SIZE 12 + +#if defined(GGML_COMMON_DECL_CUDA) || defined(GGML_COMMON_DECL_HIP) || defined(GGML_COMMON_DECL_SYCL) +// QR = QK / number of values before dequantization +// QI = number of 32 bit integers before dequantization + +#define QI4_0 (QK4_0 / (4 * QR4_0)) +#define QR4_0 2 + +#define QI4_1 (QK4_1 / (4 * QR4_1)) +#define QR4_1 2 + +#define QI5_0 (QK5_0 / (4 * QR5_0)) +#define QR5_0 2 + +#define QI5_1 (QK5_1 / (4 * QR5_1)) +#define QR5_1 2 + +#define QI8_0 (QK8_0 / (4 * QR8_0)) +#define QR8_0 1 + +#define QI8_1 (QK8_1 / (4 * QR8_1)) +#define QR8_1 1 + +#define QI2_K (QK_K / (4*QR2_K)) +#define QR2_K 4 + +#define QI3_K (QK_K / (4*QR3_K)) +#define QR3_K 4 + +#define QI4_K (QK_K / (4*QR4_K)) +#define QR4_K 2 + +#define QI5_K (QK_K / (4*QR5_K)) +#define QR5_K 2 + +#define QI6_K (QK_K / (4*QR6_K)) +#define QR6_K 2 + +#define QI2_XXS (QK_K / (4*QR2_XXS)) +#define QR2_XXS 4 + +#define QI2_XS (QK_K / (4*QR2_XS)) +#define QR2_XS 4 + +#define QI2_S (QK_K / (4*QR2_S)) +#define QR2_S 4 + +#define QI3_XXS (QK_K / (4*QR3_XXS)) +#define QR3_XXS 4 + +#define QI3_XS (QK_K / (4*QR3_XS)) +#define QR3_XS 4 + +#define QI1_S (QK_K / (4*QR1_S)) +#define QR1_S 8 + +#define QI1_M (QK_K / (4*QR1_M)) +#define QR1_M 8 + +#define QI4_NL (QK4_NL / (4*QR4_NL)) +#define QR4_NL 2 + +#define QI4_XS (QK_K / (4*QR4_XS)) +#define QR4_XS 2 + +#define QI3_S (QK_K / (4*QR3_S)) +#define QR3_S 4 + +#endif // GGML_COMMON_DECL_CUDA || GGML_COMMON_DECL_HIP + +#define QK4_0 32 +typedef struct { + ggml_half d; // delta + uint8_t qs[QK4_0 / 2]; // nibbles / quants +} block_q4_0; +static_assert(sizeof(block_q4_0) == sizeof(ggml_half) + QK4_0 / 2, "wrong q4_0 block size/padding"); + +#define QK4_1 32 +typedef struct { + union { + struct { + ggml_half d; // delta + ggml_half m; // min + } GGML_COMMON_AGGR; + ggml_half2 dm; + }; + uint8_t qs[QK4_1 / 2]; // nibbles / quants +} block_q4_1; +static_assert(sizeof(block_q4_1) == 2 * sizeof(ggml_half) + QK4_1 / 2, "wrong q4_1 block size/padding"); + +#define QK5_0 32 +typedef struct { + ggml_half d; // delta + uint8_t qh[4]; // 5-th bit of quants + uint8_t qs[QK5_0 / 2]; // nibbles / quants +} block_q5_0; +static_assert(sizeof(block_q5_0) == sizeof(ggml_half) + sizeof(uint32_t) + QK5_0 / 2, "wrong q5_0 block size/padding"); + +#define QK5_1 32 +typedef struct { + union { + struct { + ggml_half d; // delta + ggml_half m; // min + } GGML_COMMON_AGGR; + ggml_half2 dm; + }; + uint8_t qh[4]; // 5-th bit of quants + uint8_t qs[QK5_1 / 2]; // nibbles / quants +} block_q5_1; +static_assert(sizeof(block_q5_1) == 2 * sizeof(ggml_half) + sizeof(uint32_t) + QK5_1 / 2, "wrong q5_1 block size/padding"); + +#define QK8_0 32 +typedef struct { + ggml_half d; // delta + int8_t qs[QK8_0]; // quants +} block_q8_0; +static_assert(sizeof(block_q8_0) == sizeof(ggml_half) + QK8_0, "wrong q8_0 block size/padding"); + +#define QK8_1 32 +typedef struct { + union { + struct { + ggml_half d; // delta + ggml_half s; // d * sum(qs[i]) + } GGML_COMMON_AGGR; + ggml_half2 ds; + }; + int8_t qs[QK8_1]; // quants +} block_q8_1; +static_assert(sizeof(block_q8_1) == 2*sizeof(ggml_half) + QK8_1, "wrong q8_1 block size/padding"); + +typedef struct { + ggml_half d[4]; // deltas for 4 q4_0 blocks + uint8_t qs[QK4_0 * 2]; // nibbles / quants for 4 q4_0 blocks +} block_q4_0x4; +static_assert(sizeof(block_q4_0x4) == 4 * sizeof(ggml_half) + QK4_0 * 2, "wrong q4_0x4 block size/padding"); + +typedef struct { + ggml_half d[8]; // deltas for 8 q4_0 blocks + uint8_t qs[QK4_0 * 4]; // nibbles / quants for 8 q4_0 blocks +} block_q4_0x8; +static_assert(sizeof(block_q4_0x8) == 8 * sizeof(ggml_half) + QK4_0 * 4, "wrong q4_0x8 block size/padding"); + +typedef struct { + ggml_half d[4]; // deltas for 4 q8_0 blocks + int8_t qs[QK8_0 * 4]; // quants for 4 q8_0 blocks +} block_q8_0x4; +static_assert(sizeof(block_q8_0x4) == 4 * sizeof(ggml_half) + QK8_0 * 4, "wrong q8_0x4 block size/padding"); + +typedef struct { + ggml_half d[8]; // deltas for 8 q8_0 blocks + int8_t qs[QK8_0 * 8]; // quants for 8 q8_0 blocks +} block_q8_0x8; +static_assert(sizeof(block_q8_0x8) == 8 * sizeof(ggml_half) + QK8_0 * 8, "wrong q8_0x8 block size/padding"); + +// +// Ternary quantization +// + +// 1.6875 bpw +typedef struct { + uint8_t qs[(QK_K - 4 * QK_K / 64) / 5]; // 5 elements per byte (3^5 = 243 < 256) + uint8_t qh[QK_K/64]; // 4 elements per byte + ggml_half d; +} block_tq1_0; +static_assert(sizeof(block_tq1_0) == sizeof(ggml_half) + QK_K / 64 + (QK_K - 4 * QK_K / 64) / 5, "wrong tq1_0 block size/padding"); + +// 2.0625 bpw +typedef struct { + uint8_t qs[QK_K/4]; // 2 bits per element + ggml_half d; +} block_tq2_0; +static_assert(sizeof(block_tq2_0) == sizeof(ggml_half) + QK_K / 4, "wrong tq2_0 block size/padding"); + +// +// Super-block quantization structures +// + +// 2-bit quantization +// weight is represented as x = a * q + b +// 16 blocks of 16 elements each +// Effectively 2.625 bits per weight +typedef struct { + uint8_t scales[QK_K/16]; // scales and mins, quantized with 4 bits + uint8_t qs[QK_K/4]; // quants + union { + struct { + ggml_half d; // super-block scale for quantized scales + ggml_half dmin; // super-block scale for quantized mins + } GGML_COMMON_AGGR; + ggml_half2 dm; + }; +} block_q2_K; +static_assert(sizeof(block_q2_K) == 2*sizeof(ggml_half) + QK_K/16 + QK_K/4, "wrong q2_K block size/padding"); + +// 3-bit quantization +// weight is represented as x = a * q +// 16 blocks of 16 elements each +// Effectively 3.4375 bits per weight +typedef struct { + uint8_t hmask[QK_K/8]; // quants - high bit + uint8_t qs[QK_K/4]; // quants - low 2 bits + uint8_t scales[12]; // scales, quantized with 6 bits + ggml_half d; // super-block scale +} block_q3_K; +static_assert(sizeof(block_q3_K) == sizeof(ggml_half) + QK_K / 4 + QK_K / 8 + 12, "wrong q3_K block size/padding"); + +// 4-bit quantization +// 8 blocks of 32 elements each +// weight is represented as x = a * q + b +// Effectively 4.5 bits per weight +typedef struct { + union { + struct { + ggml_half d; // super-block scale for quantized scales + ggml_half dmin; // super-block scale for quantized mins + } GGML_COMMON_AGGR; + ggml_half2 dm; + }; + uint8_t scales[K_SCALE_SIZE]; // scales and mins, quantized with 6 bits + uint8_t qs[QK_K/2]; // 4--bit quants +} block_q4_K; +static_assert(sizeof(block_q4_K) == 2*sizeof(ggml_half) + K_SCALE_SIZE + QK_K/2, "wrong q4_K block size/padding"); + +// 5-bit quantization +// 8 blocks of 32 elements each +// weight is represented as x = a * q + b +// Effectively 5.5 bits per weight +typedef struct { + union { + struct { + ggml_half d; // super-block scale for quantized scales + ggml_half dmin; // super-block scale for quantized mins + } GGML_COMMON_AGGR; + ggml_half2 dm; + }; + uint8_t scales[K_SCALE_SIZE]; // scales and mins, quantized with 6 bits + uint8_t qh[QK_K/8]; // quants, high bit + uint8_t qs[QK_K/2]; // quants, low 4 bits +} block_q5_K; +static_assert(sizeof(block_q5_K) == 2*sizeof(ggml_half) + K_SCALE_SIZE + QK_K/2 + QK_K/8, "wrong q5_K block size/padding"); + +// 6-bit quantization +// weight is represented as x = a * q +// 16 blocks of 16 elements each +// Effectively 6.5625 bits per weight +typedef struct { + uint8_t ql[QK_K/2]; // quants, lower 4 bits + uint8_t qh[QK_K/4]; // quants, upper 2 bits + int8_t scales[QK_K/16]; // scales, quantized with 8 bits + ggml_half d; // super-block scale +} block_q6_K; +static_assert(sizeof(block_q6_K) == sizeof(ggml_half) + QK_K / 16 + 3*QK_K/4, "wrong q6_K block size/padding"); + +// This is only used for intermediate quantization and dot products +typedef struct { + float d; // delta + int8_t qs[QK_K]; // quants + int16_t bsums[QK_K/16]; // sum of quants in groups of 16 +} block_q8_K; +static_assert(sizeof(block_q8_K) == sizeof(float) + QK_K + QK_K/16*sizeof(int16_t), "wrong q8_K block size/padding"); + +// (Almost) "true" 2-bit quantization. +// Due to the need to use blocks as per ggml design, it ends up using +// 2.0625 bpw because of the 16-bit scale for each block of 256. +typedef struct { + ggml_half d; + uint16_t qs[QK_K/8]; +} block_iq2_xxs; +static_assert(sizeof(block_iq2_xxs) == sizeof(ggml_half) + QK_K/8*sizeof(uint16_t), "wrong iq2_xxs block size/padding"); + +// 2.3125 bpw quants +typedef struct { + ggml_half d; + uint16_t qs[QK_K/8]; + uint8_t scales[QK_K/32]; +} block_iq2_xs; +static_assert(sizeof(block_iq2_xs) == sizeof(ggml_half) + QK_K/8*sizeof(uint16_t) + QK_K/32, "wrong iq2_xs block size/padding"); + +// 2.5625 bpw quants +typedef struct { + ggml_half d; + uint8_t qs[QK_K/4]; + uint8_t qh[QK_K/32]; + uint8_t scales[QK_K/32]; +} block_iq2_s; +static_assert(sizeof(block_iq2_s) == sizeof(ggml_half) + QK_K/4 + QK_K/16, "wrong iq2_s block size/padding"); + +// (Almost) "true" 3-bit quantization. +// Due to the need to use blocks as per ggml design, it ends up using +// 3.0625 bpw because of the 16-bit scale for each block of 256. +typedef struct { + ggml_half d; + uint8_t qs[3*QK_K/8]; +} block_iq3_xxs; +static_assert(sizeof(block_iq3_xxs) == sizeof(ggml_half) + 3*(QK_K/8), "wrong iq3_xxs block size/padding"); + +// 3.4375 bpw +#define IQ3S_N_SCALE QK_K/64 +typedef struct { + ggml_half d; + uint8_t qs[QK_K/4]; + uint8_t qh[QK_K/32]; + uint8_t signs[QK_K/8]; + uint8_t scales[IQ3S_N_SCALE]; +} block_iq3_s; +static_assert(sizeof(block_iq3_s) == sizeof(ggml_half) + 13*(QK_K/32) + IQ3S_N_SCALE, "wrong iq3_s block size/padding"); + +// 1.5625 bpw +typedef struct { + ggml_half d; + uint8_t qs[QK_K/8]; + uint16_t qh[QK_K/32]; +} block_iq1_s; +static_assert(sizeof(block_iq1_s) == sizeof(ggml_half) + QK_K/8 + QK_K/16, "wrong iq1_s block size/padding"); + +// 1.75 bpw +typedef struct { + uint8_t qs[QK_K/8]; // grid index, low 8 bits + uint8_t qh[QK_K/16]; // grid index, high 3 bits + grid shift bit (for two groups of 8) + uint8_t scales[QK_K/32]; // 3-bit block scales (4-bit if QK_K == 64) +} block_iq1_m; +static_assert(sizeof(block_iq1_m) == QK_K/8 + QK_K/16 + QK_K/32, "wrong iq1_m block size/padding"); + +// Used by IQ1_M quants +typedef union { + ggml_half f16; + uint16_t u16; +} iq1m_scale_t; + +// Non-linear quants +#define QK4_NL 32 +typedef struct { + ggml_half d; + uint8_t qs[QK4_NL/2]; +} block_iq4_nl; +static_assert(sizeof(block_iq4_nl) == sizeof(ggml_half) + QK4_NL/2, "wrong iq4_nl block size/padding"); + +typedef struct { + ggml_half d; + uint16_t scales_h; + uint8_t scales_l[QK_K/64]; + uint8_t qs[QK_K/2]; +} block_iq4_xs; +static_assert(sizeof(block_iq4_xs) == sizeof(ggml_half) + sizeof(uint16_t) + QK_K/64 + QK_K/2, "wrong iq4_xs block size/padding"); + +#endif // GGML_COMMON_DECL +#endif // GGML_COMMON_DECL + +//////////////////////////////////////////////////////////////////////////////// + +#ifndef GGML_COMMON_IMPL + +#if defined(GGML_COMMON_IMPL_C) +#include + +#define GGML_TABLE_BEGIN(type, name, size) static const type name[size] = { +#define GGML_TABLE_END() }; + +#define GGML_COMMON_IMPL +#elif defined(GGML_COMMON_IMPL_METAL) +#include + +#define GGML_TABLE_BEGIN(type, name, size) static const constant type name[size] = { +#define GGML_TABLE_END() }; + +#define GGML_COMMON_IMPL +#elif defined(GGML_COMMON_IMPL_CUDA) || defined(GGML_COMMON_IMPL_HIP) || defined(GGML_COMMON_IMPL_MUSA) +#include + +#define GGML_TABLE_BEGIN(type, name, size) static const __device__ type name[size] = { +#define GGML_TABLE_END() }; + +#define GGML_COMMON_IMPL +#elif defined(GGML_COMMON_IMPL_SYCL) + +#include + +#define GGML_TABLE_BEGIN(type, name, size) static const type name[size] = { +#define GGML_TABLE_END() }; + +#define GGML_COMMON_IMPL +#endif + +#if defined(GGML_COMMON_IMPL) + +GGML_TABLE_BEGIN(uint8_t, kmask_iq2xs, 8) + 1, 2, 4, 8, 16, 32, 64, 128 +GGML_TABLE_END() + +GGML_TABLE_BEGIN(uint8_t, ksigns_iq2xs, 128) + 0, 129, 130, 3, 132, 5, 6, 135, 136, 9, 10, 139, 12, 141, 142, 15, + 144, 17, 18, 147, 20, 149, 150, 23, 24, 153, 154, 27, 156, 29, 30, 159, + 160, 33, 34, 163, 36, 165, 166, 39, 40, 169, 170, 43, 172, 45, 46, 175, + 48, 177, 178, 51, 180, 53, 54, 183, 184, 57, 58, 187, 60, 189, 190, 63, + 192, 65, 66, 195, 68, 197, 198, 71, 72, 201, 202, 75, 204, 77, 78, 207, + 80, 209, 210, 83, 212, 85, 86, 215, 216, 89, 90, 219, 92, 221, 222, 95, + 96, 225, 226, 99, 228, 101, 102, 231, 232, 105, 106, 235, 108, 237, 238, 111, + 240, 113, 114, 243, 116, 245, 246, 119, 120, 249, 250, 123, 252, 125, 126, 255, +GGML_TABLE_END() + +//#if __CUDA_ARCH__ >= MIN_CC_DP4A // lowest compute capability for integer intrinsics +GGML_TABLE_BEGIN(uint64_t, ksigns64, 128) + 0x0000000000000000, 0xff000000000000ff, 0xff0000000000ff00, 0x000000000000ffff, + 0xff00000000ff0000, 0x0000000000ff00ff, 0x0000000000ffff00, 0xff00000000ffffff, + 0xff000000ff000000, 0x00000000ff0000ff, 0x00000000ff00ff00, 0xff000000ff00ffff, + 0x00000000ffff0000, 0xff000000ffff00ff, 0xff000000ffffff00, 0x00000000ffffffff, + 0xff0000ff00000000, 0x000000ff000000ff, 0x000000ff0000ff00, 0xff0000ff0000ffff, + 0x000000ff00ff0000, 0xff0000ff00ff00ff, 0xff0000ff00ffff00, 0x000000ff00ffffff, + 0x000000ffff000000, 0xff0000ffff0000ff, 0xff0000ffff00ff00, 0x000000ffff00ffff, + 0xff0000ffffff0000, 0x000000ffffff00ff, 0x000000ffffffff00, 0xff0000ffffffffff, + 0xff00ff0000000000, 0x0000ff00000000ff, 0x0000ff000000ff00, 0xff00ff000000ffff, + 0x0000ff0000ff0000, 0xff00ff0000ff00ff, 0xff00ff0000ffff00, 0x0000ff0000ffffff, + 0x0000ff00ff000000, 0xff00ff00ff0000ff, 0xff00ff00ff00ff00, 0x0000ff00ff00ffff, + 0xff00ff00ffff0000, 0x0000ff00ffff00ff, 0x0000ff00ffffff00, 0xff00ff00ffffffff, + 0x0000ffff00000000, 0xff00ffff000000ff, 0xff00ffff0000ff00, 0x0000ffff0000ffff, + 0xff00ffff00ff0000, 0x0000ffff00ff00ff, 0x0000ffff00ffff00, 0xff00ffff00ffffff, + 0xff00ffffff000000, 0x0000ffffff0000ff, 0x0000ffffff00ff00, 0xff00ffffff00ffff, + 0x0000ffffffff0000, 0xff00ffffffff00ff, 0xff00ffffffffff00, 0x0000ffffffffffff, + 0xffff000000000000, 0x00ff0000000000ff, 0x00ff00000000ff00, 0xffff00000000ffff, + 0x00ff000000ff0000, 0xffff000000ff00ff, 0xffff000000ffff00, 0x00ff000000ffffff, + 0x00ff0000ff000000, 0xffff0000ff0000ff, 0xffff0000ff00ff00, 0x00ff0000ff00ffff, + 0xffff0000ffff0000, 0x00ff0000ffff00ff, 0x00ff0000ffffff00, 0xffff0000ffffffff, + 0x00ff00ff00000000, 0xffff00ff000000ff, 0xffff00ff0000ff00, 0x00ff00ff0000ffff, + 0xffff00ff00ff0000, 0x00ff00ff00ff00ff, 0x00ff00ff00ffff00, 0xffff00ff00ffffff, + 0xffff00ffff000000, 0x00ff00ffff0000ff, 0x00ff00ffff00ff00, 0xffff00ffff00ffff, + 0x00ff00ffffff0000, 0xffff00ffffff00ff, 0xffff00ffffffff00, 0x00ff00ffffffffff, + 0x00ffff0000000000, 0xffffff00000000ff, 0xffffff000000ff00, 0x00ffff000000ffff, + 0xffffff0000ff0000, 0x00ffff0000ff00ff, 0x00ffff0000ffff00, 0xffffff0000ffffff, + 0xffffff00ff000000, 0x00ffff00ff0000ff, 0x00ffff00ff00ff00, 0xffffff00ff00ffff, + 0x00ffff00ffff0000, 0xffffff00ffff00ff, 0xffffff00ffffff00, 0x00ffff00ffffffff, + 0xffffffff00000000, 0x00ffffff000000ff, 0x00ffffff0000ff00, 0xffffffff0000ffff, + 0x00ffffff00ff0000, 0xffffffff00ff00ff, 0xffffffff00ffff00, 0x00ffffff00ffffff, + 0x00ffffffff000000, 0xffffffffff0000ff, 0xffffffffff00ff00, 0x00ffffffff00ffff, + 0xffffffffffff0000, 0x00ffffffffff00ff, 0x00ffffffffffff00, 0xffffffffffffffff, +GGML_TABLE_END() +//#endif + + +GGML_TABLE_BEGIN(uint64_t, iq2xxs_grid, 256) + 0x0808080808080808, 0x080808080808082b, 0x0808080808081919, 0x0808080808082b08, + 0x0808080808082b2b, 0x0808080808190819, 0x0808080808191908, 0x08080808082b0808, + 0x08080808082b082b, 0x08080808082b2b08, 0x08080808082b2b2b, 0x0808080819080819, + 0x0808080819081908, 0x0808080819190808, 0x0808080819192b08, 0x08080808192b0819, + 0x08080808192b1908, 0x080808082b080808, 0x080808082b08082b, 0x080808082b082b2b, + 0x080808082b2b082b, 0x0808081908080819, 0x0808081908081908, 0x0808081908190808, + 0x0808081908191919, 0x0808081919080808, 0x080808192b081908, 0x080808192b192b08, + 0x0808082b08080808, 0x0808082b0808082b, 0x0808082b082b082b, 0x0808082b2b08082b, + 0x0808190808080819, 0x0808190808081908, 0x0808190808190808, 0x08081908082b0819, + 0x08081908082b1908, 0x0808190819080808, 0x080819081908082b, 0x0808190819082b08, + 0x08081908192b0808, 0x080819082b080819, 0x080819082b081908, 0x080819082b190808, + 0x080819082b2b1908, 0x0808191908080808, 0x080819190808082b, 0x0808191908082b08, + 0x08081919082b0808, 0x080819191908192b, 0x08081919192b2b19, 0x080819192b080808, + 0x080819192b190819, 0x0808192b08082b19, 0x0808192b08190808, 0x0808192b19080808, + 0x0808192b2b081908, 0x0808192b2b2b1908, 0x08082b0808080808, 0x08082b0808081919, + 0x08082b0808082b08, 0x08082b0808191908, 0x08082b08082b2b08, 0x08082b0819080819, + 0x08082b0819081908, 0x08082b0819190808, 0x08082b081919082b, 0x08082b082b082b08, + 0x08082b1908081908, 0x08082b1919080808, 0x08082b2b0808082b, 0x08082b2b08191908, + 0x0819080808080819, 0x0819080808081908, 0x0819080808190808, 0x08190808082b0819, + 0x0819080819080808, 0x08190808192b0808, 0x081908082b081908, 0x081908082b190808, + 0x081908082b191919, 0x0819081908080808, 0x0819081908082b08, 0x08190819082b0808, + 0x0819081919190808, 0x0819081919192b2b, 0x081908192b080808, 0x0819082b082b1908, + 0x0819082b19081919, 0x0819190808080808, 0x0819190808082b08, 0x08191908082b0808, + 0x08191908082b1919, 0x0819190819082b19, 0x081919082b080808, 0x0819191908192b08, + 0x08191919192b082b, 0x0819192b08080808, 0x0819192b0819192b, 0x08192b0808080819, + 0x08192b0808081908, 0x08192b0808190808, 0x08192b0819080808, 0x08192b082b080819, + 0x08192b1908080808, 0x08192b1908081919, 0x08192b192b2b0808, 0x08192b2b19190819, + 0x082b080808080808, 0x082b08080808082b, 0x082b080808082b2b, 0x082b080819081908, + 0x082b0808192b0819, 0x082b08082b080808, 0x082b08082b08082b, 0x082b0819082b2b19, + 0x082b081919082b08, 0x082b082b08080808, 0x082b082b0808082b, 0x082b190808080819, + 0x082b190808081908, 0x082b190808190808, 0x082b190819080808, 0x082b19081919192b, + 0x082b191908080808, 0x082b191919080819, 0x082b1919192b1908, 0x082b192b2b190808, + 0x082b2b0808082b08, 0x082b2b08082b0808, 0x082b2b082b191908, 0x082b2b2b19081908, + 0x1908080808080819, 0x1908080808081908, 0x1908080808190808, 0x1908080808192b08, + 0x19080808082b0819, 0x19080808082b1908, 0x1908080819080808, 0x1908080819082b08, + 0x190808081919192b, 0x19080808192b0808, 0x190808082b080819, 0x190808082b081908, + 0x190808082b190808, 0x1908081908080808, 0x19080819082b0808, 0x19080819192b0819, + 0x190808192b080808, 0x190808192b081919, 0x1908082b08080819, 0x1908082b08190808, + 0x1908082b19082b08, 0x1908082b1919192b, 0x1908082b192b2b08, 0x1908190808080808, + 0x1908190808082b08, 0x19081908082b0808, 0x190819082b080808, 0x190819082b192b19, + 0x190819190819082b, 0x19081919082b1908, 0x1908192b08080808, 0x19082b0808080819, + 0x19082b0808081908, 0x19082b0808190808, 0x19082b0819080808, 0x19082b0819081919, + 0x19082b1908080808, 0x19082b1919192b08, 0x19082b19192b0819, 0x19082b192b08082b, + 0x19082b2b19081919, 0x19082b2b2b190808, 0x1919080808080808, 0x1919080808082b08, + 0x1919080808190819, 0x1919080808192b19, 0x19190808082b0808, 0x191908082b080808, + 0x191908082b082b08, 0x1919081908081908, 0x191908191908082b, 0x191908192b2b1908, + 0x1919082b2b190819, 0x191919082b190808, 0x191919082b19082b, 0x1919191908082b2b, + 0x1919192b08080819, 0x1919192b19191908, 0x19192b0808080808, 0x19192b0808190819, + 0x19192b0808192b19, 0x19192b08192b1908, 0x19192b1919080808, 0x19192b2b08082b08, + 0x192b080808081908, 0x192b080808190808, 0x192b080819080808, 0x192b0808192b2b08, + 0x192b081908080808, 0x192b081919191919, 0x192b082b08192b08, 0x192b082b192b0808, + 0x192b190808080808, 0x192b190808081919, 0x192b191908190808, 0x192b19190819082b, + 0x192b19192b081908, 0x192b2b081908082b, 0x2b08080808080808, 0x2b0808080808082b, + 0x2b08080808082b2b, 0x2b08080819080819, 0x2b0808082b08082b, 0x2b08081908081908, + 0x2b08081908192b08, 0x2b08081919080808, 0x2b08082b08190819, 0x2b08190808080819, + 0x2b08190808081908, 0x2b08190808190808, 0x2b08190808191919, 0x2b08190819080808, + 0x2b081908192b0808, 0x2b08191908080808, 0x2b0819191908192b, 0x2b0819192b191908, + 0x2b08192b08082b19, 0x2b08192b19080808, 0x2b08192b192b0808, 0x2b082b080808082b, + 0x2b082b1908081908, 0x2b082b2b08190819, 0x2b19080808081908, 0x2b19080808190808, + 0x2b190808082b1908, 0x2b19080819080808, 0x2b1908082b2b0819, 0x2b1908190819192b, + 0x2b1908192b080808, 0x2b19082b19081919, 0x2b19190808080808, 0x2b191908082b082b, + 0x2b19190819081908, 0x2b19191919190819, 0x2b192b082b080819, 0x2b192b19082b0808, + 0x2b2b08080808082b, 0x2b2b080819190808, 0x2b2b08082b081919, 0x2b2b081908082b19, + 0x2b2b082b08080808, 0x2b2b190808192b08, 0x2b2b2b0819190808, 0x2b2b2b1908081908, +GGML_TABLE_END() + +GGML_TABLE_BEGIN(uint64_t, iq2xs_grid, 512) + 0x0808080808080808, 0x080808080808082b, 0x0808080808081919, 0x0808080808082b08, + 0x0808080808082b2b, 0x0808080808190819, 0x0808080808191908, 0x080808080819192b, + 0x0808080808192b19, 0x08080808082b0808, 0x08080808082b082b, 0x08080808082b1919, + 0x08080808082b2b08, 0x0808080819080819, 0x0808080819081908, 0x080808081908192b, + 0x0808080819082b19, 0x0808080819190808, 0x080808081919082b, 0x0808080819191919, + 0x0808080819192b08, 0x08080808192b0819, 0x08080808192b1908, 0x080808082b080808, + 0x080808082b08082b, 0x080808082b081919, 0x080808082b082b08, 0x080808082b190819, + 0x080808082b191908, 0x080808082b192b19, 0x080808082b2b0808, 0x0808081908080819, + 0x0808081908081908, 0x080808190808192b, 0x0808081908082b19, 0x0808081908190808, + 0x080808190819082b, 0x0808081908191919, 0x0808081908192b08, 0x0808081908192b2b, + 0x08080819082b0819, 0x08080819082b1908, 0x0808081919080808, 0x080808191908082b, + 0x0808081919081919, 0x0808081919082b08, 0x0808081919190819, 0x0808081919191908, + 0x08080819192b0808, 0x08080819192b2b08, 0x080808192b080819, 0x080808192b081908, + 0x080808192b190808, 0x0808082b08080808, 0x0808082b0808082b, 0x0808082b08081919, + 0x0808082b08082b08, 0x0808082b08190819, 0x0808082b08191908, 0x0808082b082b0808, + 0x0808082b19080819, 0x0808082b19081908, 0x0808082b19190808, 0x0808082b19191919, + 0x0808082b2b080808, 0x0808082b2b082b2b, 0x0808190808080819, 0x0808190808081908, + 0x080819080808192b, 0x0808190808082b19, 0x0808190808190808, 0x080819080819082b, + 0x0808190808191919, 0x0808190808192b08, 0x08081908082b0819, 0x08081908082b1908, + 0x0808190819080808, 0x080819081908082b, 0x0808190819081919, 0x0808190819082b08, + 0x0808190819190819, 0x0808190819191908, 0x080819081919192b, 0x08081908192b0808, + 0x080819082b080819, 0x080819082b081908, 0x080819082b190808, 0x0808191908080808, + 0x080819190808082b, 0x0808191908081919, 0x0808191908082b08, 0x0808191908190819, + 0x0808191908191908, 0x08081919082b0808, 0x0808191919080819, 0x0808191919081908, + 0x0808191919190808, 0x08081919192b0819, 0x080819192b080808, 0x0808192b08080819, + 0x0808192b08081908, 0x0808192b08190808, 0x0808192b082b192b, 0x0808192b19080808, + 0x0808192b1908082b, 0x0808192b2b081908, 0x08082b0808080808, 0x08082b080808082b, + 0x08082b0808081919, 0x08082b0808082b08, 0x08082b0808082b2b, 0x08082b0808190819, + 0x08082b0808191908, 0x08082b08082b0808, 0x08082b08082b1919, 0x08082b0819080819, + 0x08082b0819081908, 0x08082b0819190808, 0x08082b0819192b08, 0x08082b082b080808, + 0x08082b082b2b0808, 0x08082b082b2b2b2b, 0x08082b1908080819, 0x08082b1908081908, + 0x08082b1908190808, 0x08082b1919080808, 0x08082b192b080819, 0x08082b192b082b19, + 0x08082b2b08080808, 0x08082b2b082b0808, 0x08082b2b082b2b08, 0x08082b2b2b19192b, + 0x08082b2b2b2b0808, 0x0819080808080819, 0x0819080808081908, 0x081908080808192b, + 0x0819080808082b19, 0x0819080808190808, 0x081908080819082b, 0x0819080808191919, + 0x0819080808192b08, 0x08190808082b0819, 0x08190808082b1908, 0x0819080819080808, + 0x081908081908082b, 0x0819080819081919, 0x0819080819082b08, 0x0819080819190819, + 0x0819080819191908, 0x08190808192b0808, 0x08190808192b2b2b, 0x081908082b080819, + 0x081908082b081908, 0x081908082b190808, 0x0819081908080808, 0x081908190808082b, + 0x0819081908081919, 0x0819081908082b08, 0x0819081908190819, 0x0819081908191908, + 0x08190819082b0808, 0x0819081919080819, 0x0819081919081908, 0x0819081919190808, + 0x081908192b080808, 0x081908192b191908, 0x081908192b19192b, 0x0819082b08080819, + 0x0819082b08081908, 0x0819082b0808192b, 0x0819082b08190808, 0x0819082b19080808, + 0x0819082b192b0808, 0x0819190808080808, 0x081919080808082b, 0x0819190808081919, + 0x0819190808082b08, 0x0819190808190819, 0x0819190808191908, 0x08191908082b0808, + 0x0819190819080819, 0x0819190819081908, 0x0819190819082b19, 0x0819190819190808, + 0x08191908192b1908, 0x081919082b080808, 0x0819191908080819, 0x0819191908081908, + 0x0819191908190808, 0x0819191919080808, 0x0819192b08080808, 0x0819192b08191908, + 0x0819192b19082b19, 0x08192b0808080819, 0x08192b0808081908, 0x08192b0808190808, + 0x08192b080819082b, 0x08192b0819080808, 0x08192b0819191908, 0x08192b082b08192b, + 0x08192b1908080808, 0x08192b1908081919, 0x08192b19192b192b, 0x08192b2b19190819, + 0x08192b2b2b2b2b19, 0x082b080808080808, 0x082b08080808082b, 0x082b080808081919, + 0x082b080808082b08, 0x082b080808082b2b, 0x082b080808190819, 0x082b080808191908, + 0x082b0808082b0808, 0x082b080819080819, 0x082b080819081908, 0x082b080819190808, + 0x082b08082b080808, 0x082b08082b2b0808, 0x082b081908080819, 0x082b081908081908, + 0x082b081908190808, 0x082b081919080808, 0x082b081919082b08, 0x082b0819192b1919, + 0x082b082b08080808, 0x082b082b082b082b, 0x082b082b2b080808, 0x082b082b2b2b2b08, + 0x082b190808080819, 0x082b190808081908, 0x082b190808190808, 0x082b1908082b2b19, + 0x082b190819080808, 0x082b191908080808, 0x082b191919080819, 0x082b19191919082b, + 0x082b19192b192b19, 0x082b192b08080819, 0x082b192b08192b2b, 0x082b192b2b2b192b, + 0x082b2b0808080808, 0x082b2b0808082b08, 0x082b2b0808082b2b, 0x082b2b08082b0808, + 0x082b2b0819191919, 0x082b2b082b082b08, 0x082b2b082b2b082b, 0x082b2b19192b2b08, + 0x082b2b192b190808, 0x082b2b2b08082b08, 0x082b2b2b082b0808, 0x082b2b2b2b08082b, + 0x082b2b2b2b082b08, 0x082b2b2b2b082b2b, 0x1908080808080819, 0x1908080808081908, + 0x190808080808192b, 0x1908080808082b19, 0x1908080808190808, 0x190808080819082b, + 0x1908080808191919, 0x1908080808192b08, 0x19080808082b0819, 0x19080808082b1908, + 0x1908080819080808, 0x190808081908082b, 0x1908080819081919, 0x1908080819082b08, + 0x1908080819082b2b, 0x1908080819190819, 0x1908080819191908, 0x19080808192b0808, + 0x19080808192b1919, 0x190808082b080819, 0x190808082b081908, 0x190808082b190808, + 0x1908081908080808, 0x190808190808082b, 0x1908081908081919, 0x1908081908082b08, + 0x1908081908190819, 0x1908081908191908, 0x19080819082b0808, 0x1908081919080819, + 0x1908081919081908, 0x1908081919190808, 0x190808192b080808, 0x190808192b081919, + 0x190808192b2b082b, 0x1908082b08080819, 0x1908082b08081908, 0x1908082b08190808, + 0x1908082b0819082b, 0x1908082b082b2b19, 0x1908082b19080808, 0x1908190808080808, + 0x190819080808082b, 0x1908190808081919, 0x1908190808082b08, 0x1908190808190819, + 0x1908190808191908, 0x1908190808192b19, 0x19081908082b0808, 0x1908190819080819, + 0x1908190819081908, 0x1908190819190808, 0x190819082b080808, 0x190819082b191908, + 0x1908191908080819, 0x1908191908081908, 0x1908191908190808, 0x19081919082b1908, + 0x1908191919080808, 0x190819192b192b2b, 0x1908192b08080808, 0x1908192b08082b2b, + 0x1908192b19081908, 0x1908192b19190808, 0x19082b0808080819, 0x19082b0808081908, + 0x19082b0808190808, 0x19082b0819080808, 0x19082b0819081919, 0x19082b0819191908, + 0x19082b08192b082b, 0x19082b1908080808, 0x19082b1908190819, 0x19082b1919081908, + 0x19082b1919190808, 0x19082b19192b2b19, 0x19082b2b08081908, 0x1919080808080808, + 0x191908080808082b, 0x1919080808081919, 0x1919080808082b08, 0x1919080808190819, + 0x1919080808191908, 0x19190808082b0808, 0x19190808082b2b08, 0x1919080819080819, + 0x1919080819081908, 0x1919080819190808, 0x191908082b080808, 0x1919081908080819, + 0x1919081908081908, 0x1919081908190808, 0x1919081908191919, 0x1919081919080808, + 0x191908191908082b, 0x1919082b08080808, 0x1919082b19081908, 0x1919082b2b2b2b2b, + 0x1919190808080819, 0x1919190808081908, 0x1919190808190808, 0x19191908082b0819, + 0x1919190819080808, 0x19191908192b0808, 0x191919082b080819, 0x191919082b2b0819, + 0x1919191908080808, 0x1919191908082b08, 0x191919192b080808, 0x191919192b082b08, + 0x1919192b082b0819, 0x1919192b192b2b08, 0x1919192b2b2b0819, 0x19192b0808080808, + 0x19192b0808191908, 0x19192b0819080819, 0x19192b0819190808, 0x19192b082b192b19, + 0x19192b1908192b2b, 0x19192b1919080808, 0x19192b191908082b, 0x19192b2b2b081919, + 0x192b080808080819, 0x192b080808081908, 0x192b080808190808, 0x192b080819080808, + 0x192b080819191908, 0x192b0808192b082b, 0x192b08082b08192b, 0x192b08082b2b2b19, + 0x192b081908080808, 0x192b082b082b1908, 0x192b082b19082b2b, 0x192b082b2b19082b, + 0x192b190808080808, 0x192b19080819192b, 0x192b191908190808, 0x192b191919080808, + 0x192b191919081919, 0x192b19192b2b1908, 0x192b2b0808080819, 0x192b2b08192b2b2b, + 0x192b2b19082b1919, 0x192b2b2b0808192b, 0x192b2b2b19191908, 0x192b2b2b192b082b, + 0x2b08080808080808, 0x2b0808080808082b, 0x2b08080808081919, 0x2b08080808082b08, + 0x2b08080808190819, 0x2b08080808191908, 0x2b080808082b0808, 0x2b080808082b2b2b, + 0x2b08080819080819, 0x2b08080819081908, 0x2b08080819190808, 0x2b0808082b080808, + 0x2b0808082b08082b, 0x2b0808082b2b2b08, 0x2b0808082b2b2b2b, 0x2b08081908080819, + 0x2b08081908081908, 0x2b0808190808192b, 0x2b08081908190808, 0x2b08081919080808, + 0x2b08081919190819, 0x2b08081919192b19, 0x2b08082b08080808, 0x2b08082b082b0808, + 0x2b08082b2b080808, 0x2b08082b2b08082b, 0x2b08082b2b2b0808, 0x2b08082b2b2b2b08, + 0x2b08190808080819, 0x2b08190808081908, 0x2b08190808190808, 0x2b0819080819082b, + 0x2b08190808191919, 0x2b08190819080808, 0x2b081908192b0808, 0x2b0819082b082b19, + 0x2b08191908080808, 0x2b08191919081908, 0x2b0819192b2b1919, 0x2b08192b08192b08, + 0x2b08192b192b2b2b, 0x2b082b0808080808, 0x2b082b0808082b08, 0x2b082b08082b1919, + 0x2b082b0819192b2b, 0x2b082b082b080808, 0x2b082b082b08082b, 0x2b082b082b2b2b08, + 0x2b082b190808192b, 0x2b082b2b082b082b, 0x2b082b2b2b080808, 0x2b082b2b2b082b08, + 0x2b082b2b2b19192b, 0x2b082b2b2b2b2b08, 0x2b19080808080819, 0x2b19080808081908, + 0x2b19080808190808, 0x2b19080819080808, 0x2b1908081919192b, 0x2b1908082b081908, + 0x2b19081908080808, 0x2b190819082b082b, 0x2b190819192b1908, 0x2b19082b1919192b, + 0x2b19082b2b082b19, 0x2b19190808080808, 0x2b19190808081919, 0x2b19190819081908, + 0x2b19190819190808, 0x2b19190819192b08, 0x2b191919082b2b19, 0x2b1919192b190808, + 0x2b1919192b19082b, 0x2b19192b19080819, 0x2b192b0819190819, 0x2b192b082b2b192b, + 0x2b192b1919082b19, 0x2b192b2b08191919, 0x2b192b2b192b0808, 0x2b2b080808080808, + 0x2b2b08080808082b, 0x2b2b080808082b08, 0x2b2b080808082b2b, 0x2b2b0808082b0808, + 0x2b2b0808082b2b2b, 0x2b2b08082b2b0808, 0x2b2b081919190819, 0x2b2b081919192b19, + 0x2b2b08192b2b192b, 0x2b2b082b08080808, 0x2b2b082b0808082b, 0x2b2b082b08082b08, + 0x2b2b082b082b2b2b, 0x2b2b082b2b080808, 0x2b2b082b2b2b0808, 0x2b2b190819080808, + 0x2b2b19082b191919, 0x2b2b192b192b1919, 0x2b2b192b2b192b08, 0x2b2b2b0808082b2b, + 0x2b2b2b08082b0808, 0x2b2b2b08082b082b, 0x2b2b2b08082b2b08, 0x2b2b2b082b2b0808, + 0x2b2b2b082b2b2b08, 0x2b2b2b1908081908, 0x2b2b2b192b081908, 0x2b2b2b192b08192b, + 0x2b2b2b2b082b2b08, 0x2b2b2b2b082b2b2b, 0x2b2b2b2b2b190819, 0x2b2b2b2b2b2b2b2b, +GGML_TABLE_END() + +GGML_TABLE_BEGIN(uint64_t, iq2s_grid, 1024) + 0x0808080808080808, 0x080808080808082b, 0x0808080808081919, 0x0808080808082b08, + 0x0808080808082b2b, 0x0808080808190819, 0x0808080808191908, 0x080808080819192b, + 0x0808080808192b19, 0x08080808082b0808, 0x08080808082b082b, 0x08080808082b1919, + 0x08080808082b2b08, 0x0808080819080819, 0x0808080819081908, 0x080808081908192b, + 0x0808080819082b19, 0x0808080819190808, 0x080808081919082b, 0x0808080819191919, + 0x0808080819192b08, 0x08080808192b0819, 0x08080808192b1908, 0x08080808192b192b, + 0x08080808192b2b19, 0x080808082b080808, 0x080808082b08082b, 0x080808082b081919, + 0x080808082b082b08, 0x080808082b190819, 0x080808082b191908, 0x080808082b2b0808, + 0x080808082b2b1919, 0x080808082b2b2b2b, 0x0808081908080819, 0x0808081908081908, + 0x080808190808192b, 0x0808081908082b19, 0x0808081908190808, 0x080808190819082b, + 0x0808081908191919, 0x0808081908192b08, 0x08080819082b0819, 0x08080819082b1908, + 0x0808081919080808, 0x080808191908082b, 0x0808081919081919, 0x0808081919082b08, + 0x0808081919190819, 0x0808081919191908, 0x080808191919192b, 0x0808081919192b19, + 0x08080819192b0808, 0x08080819192b1919, 0x08080819192b2b08, 0x080808192b080819, + 0x080808192b081908, 0x080808192b190808, 0x080808192b19082b, 0x080808192b191919, + 0x080808192b2b0819, 0x080808192b2b1908, 0x0808082b08080808, 0x0808082b0808082b, + 0x0808082b08081919, 0x0808082b08082b08, 0x0808082b08190819, 0x0808082b08191908, + 0x0808082b082b0808, 0x0808082b082b2b2b, 0x0808082b19080819, 0x0808082b19081908, + 0x0808082b1908192b, 0x0808082b19082b19, 0x0808082b19190808, 0x0808082b19191919, + 0x0808082b2b080808, 0x0808082b2b081919, 0x0808082b2b082b2b, 0x0808082b2b191908, + 0x0808082b2b2b082b, 0x0808190808080819, 0x0808190808081908, 0x080819080808192b, + 0x0808190808082b19, 0x0808190808190808, 0x080819080819082b, 0x0808190808191919, + 0x0808190808192b08, 0x08081908082b0819, 0x08081908082b1908, 0x08081908082b192b, + 0x08081908082b2b19, 0x0808190819080808, 0x080819081908082b, 0x0808190819081919, + 0x0808190819082b08, 0x0808190819082b2b, 0x0808190819190819, 0x0808190819191908, + 0x080819081919192b, 0x0808190819192b19, 0x08081908192b0808, 0x08081908192b082b, + 0x08081908192b1919, 0x080819082b080819, 0x080819082b081908, 0x080819082b08192b, + 0x080819082b082b19, 0x080819082b190808, 0x080819082b191919, 0x080819082b192b08, + 0x080819082b2b0819, 0x080819082b2b1908, 0x0808191908080808, 0x080819190808082b, + 0x0808191908081919, 0x0808191908082b08, 0x0808191908082b2b, 0x0808191908190819, + 0x0808191908191908, 0x080819190819192b, 0x0808191908192b19, 0x08081919082b0808, + 0x08081919082b1919, 0x08081919082b2b08, 0x0808191919080819, 0x0808191919081908, + 0x080819191908192b, 0x0808191919082b19, 0x0808191919190808, 0x080819191919082b, + 0x0808191919191919, 0x0808191919192b08, 0x08081919192b0819, 0x08081919192b1908, + 0x080819192b080808, 0x080819192b08082b, 0x080819192b081919, 0x080819192b082b08, + 0x080819192b190819, 0x080819192b191908, 0x080819192b2b0808, 0x0808192b08080819, + 0x0808192b08081908, 0x0808192b0808192b, 0x0808192b08082b19, 0x0808192b08190808, + 0x0808192b08191919, 0x0808192b19080808, 0x0808192b19081919, 0x0808192b19082b08, + 0x0808192b19190819, 0x0808192b19191908, 0x0808192b192b0808, 0x0808192b2b080819, + 0x0808192b2b081908, 0x0808192b2b190808, 0x08082b0808080808, 0x08082b080808082b, + 0x08082b0808081919, 0x08082b0808082b08, 0x08082b0808190819, 0x08082b0808191908, + 0x08082b080819192b, 0x08082b0808192b19, 0x08082b08082b0808, 0x08082b08082b1919, + 0x08082b08082b2b2b, 0x08082b0819080819, 0x08082b0819081908, 0x08082b081908192b, + 0x08082b0819082b19, 0x08082b0819190808, 0x08082b081919082b, 0x08082b0819191919, + 0x08082b0819192b08, 0x08082b08192b0819, 0x08082b08192b1908, 0x08082b082b080808, + 0x08082b082b081919, 0x08082b082b191908, 0x08082b082b2b2b2b, 0x08082b1908080819, + 0x08082b1908081908, 0x08082b1908190808, 0x08082b190819082b, 0x08082b1908191919, + 0x08082b1908192b08, 0x08082b19082b0819, 0x08082b1919080808, 0x08082b1919081919, + 0x08082b1919082b08, 0x08082b1919190819, 0x08082b1919191908, 0x08082b19192b0808, + 0x08082b192b080819, 0x08082b192b190808, 0x08082b2b08080808, 0x08082b2b08190819, + 0x08082b2b08191908, 0x08082b2b082b082b, 0x08082b2b082b2b08, 0x08082b2b082b2b2b, + 0x08082b2b19190808, 0x08082b2b2b192b19, 0x0819080808080819, 0x0819080808081908, + 0x081908080808192b, 0x0819080808082b19, 0x0819080808190808, 0x081908080819082b, + 0x0819080808191919, 0x0819080808192b08, 0x08190808082b0819, 0x08190808082b1908, + 0x08190808082b192b, 0x0819080819080808, 0x081908081908082b, 0x0819080819081919, + 0x0819080819082b08, 0x0819080819190819, 0x0819080819191908, 0x081908081919192b, + 0x0819080819192b19, 0x08190808192b0808, 0x08190808192b082b, 0x08190808192b1919, + 0x08190808192b2b08, 0x081908082b080819, 0x081908082b081908, 0x081908082b08192b, + 0x081908082b190808, 0x081908082b191919, 0x081908082b192b08, 0x081908082b2b0819, + 0x081908082b2b1908, 0x0819081908080808, 0x081908190808082b, 0x0819081908081919, + 0x0819081908082b08, 0x0819081908082b2b, 0x0819081908190819, 0x0819081908191908, + 0x081908190819192b, 0x0819081908192b19, 0x08190819082b0808, 0x08190819082b082b, + 0x08190819082b1919, 0x08190819082b2b08, 0x0819081919080819, 0x0819081919081908, + 0x081908191908192b, 0x0819081919082b19, 0x0819081919190808, 0x081908191919082b, + 0x0819081919191919, 0x0819081919192b08, 0x08190819192b0819, 0x08190819192b1908, + 0x081908192b080808, 0x081908192b08082b, 0x081908192b081919, 0x081908192b082b08, + 0x081908192b190819, 0x081908192b191908, 0x0819082b08080819, 0x0819082b08081908, + 0x0819082b08082b19, 0x0819082b08190808, 0x0819082b08191919, 0x0819082b082b0819, + 0x0819082b082b1908, 0x0819082b19080808, 0x0819082b19081919, 0x0819082b19190819, + 0x0819082b19191908, 0x0819082b2b080819, 0x0819082b2b081908, 0x0819082b2b190808, + 0x0819190808080808, 0x081919080808082b, 0x0819190808081919, 0x0819190808082b08, + 0x0819190808190819, 0x0819190808191908, 0x081919080819192b, 0x0819190808192b19, + 0x08191908082b0808, 0x08191908082b1919, 0x08191908082b2b08, 0x0819190819080819, + 0x0819190819081908, 0x081919081908192b, 0x0819190819082b19, 0x0819190819190808, + 0x081919081919082b, 0x0819190819191919, 0x0819190819192b08, 0x08191908192b0819, + 0x08191908192b1908, 0x081919082b080808, 0x081919082b08082b, 0x081919082b081919, + 0x081919082b082b08, 0x081919082b190819, 0x081919082b191908, 0x081919082b2b0808, + 0x0819191908080819, 0x0819191908081908, 0x081919190808192b, 0x0819191908082b19, + 0x0819191908190808, 0x081919190819082b, 0x0819191908191919, 0x0819191908192b08, + 0x08191919082b0819, 0x08191919082b1908, 0x0819191919080808, 0x081919191908082b, + 0x0819191919081919, 0x0819191919082b08, 0x0819191919190819, 0x0819191919191908, + 0x08191919192b0808, 0x081919192b080819, 0x081919192b081908, 0x081919192b190808, + 0x0819192b08080808, 0x0819192b08081919, 0x0819192b08082b08, 0x0819192b08190819, + 0x0819192b08191908, 0x0819192b082b0808, 0x0819192b19080819, 0x0819192b19081908, + 0x0819192b19190808, 0x0819192b2b080808, 0x0819192b2b2b2b2b, 0x08192b0808080819, + 0x08192b0808081908, 0x08192b080808192b, 0x08192b0808082b19, 0x08192b0808190808, + 0x08192b0808191919, 0x08192b0808192b08, 0x08192b08082b0819, 0x08192b0819080808, + 0x08192b081908082b, 0x08192b0819081919, 0x08192b0819082b08, 0x08192b0819190819, + 0x08192b0819191908, 0x08192b08192b0808, 0x08192b082b080819, 0x08192b082b081908, + 0x08192b1908080808, 0x08192b190808082b, 0x08192b1908081919, 0x08192b1908082b08, + 0x08192b1908190819, 0x08192b1908191908, 0x08192b19082b0808, 0x08192b1919080819, + 0x08192b1919081908, 0x08192b1919190808, 0x08192b19192b2b19, 0x08192b192b2b082b, + 0x08192b2b08081908, 0x08192b2b08190808, 0x08192b2b19080808, 0x08192b2b1919192b, + 0x082b080808080808, 0x082b08080808082b, 0x082b080808081919, 0x082b080808082b08, + 0x082b080808190819, 0x082b080808191908, 0x082b08080819192b, 0x082b080808192b19, + 0x082b0808082b0808, 0x082b0808082b1919, 0x082b0808082b2b2b, 0x082b080819080819, + 0x082b080819081908, 0x082b080819190808, 0x082b08081919082b, 0x082b080819191919, + 0x082b0808192b1908, 0x082b08082b080808, 0x082b08082b082b2b, 0x082b08082b191908, + 0x082b08082b2b2b2b, 0x082b081908080819, 0x082b081908081908, 0x082b081908190808, + 0x082b08190819082b, 0x082b081908191919, 0x082b0819082b0819, 0x082b081919080808, + 0x082b08191908082b, 0x082b081919081919, 0x082b081919190819, 0x082b081919191908, + 0x082b0819192b0808, 0x082b08192b080819, 0x082b08192b081908, 0x082b08192b190808, + 0x082b082b08080808, 0x082b082b08082b2b, 0x082b082b082b082b, 0x082b082b082b2b08, + 0x082b082b082b2b2b, 0x082b082b19081908, 0x082b082b19190808, 0x082b082b2b082b08, + 0x082b082b2b082b2b, 0x082b082b2b2b2b08, 0x082b190808080819, 0x082b190808081908, + 0x082b19080808192b, 0x082b190808082b19, 0x082b190808190808, 0x082b190808191919, + 0x082b190808192b08, 0x082b1908082b0819, 0x082b1908082b1908, 0x082b190819080808, + 0x082b19081908082b, 0x082b190819081919, 0x082b190819082b08, 0x082b190819190819, + 0x082b190819191908, 0x082b1908192b0808, 0x082b19082b080819, 0x082b19082b081908, + 0x082b19082b190808, 0x082b191908080808, 0x082b191908081919, 0x082b191908082b08, + 0x082b191908190819, 0x082b191908191908, 0x082b1919082b0808, 0x082b191919080819, + 0x082b191919081908, 0x082b191919190808, 0x082b1919192b192b, 0x082b19192b080808, + 0x082b192b08080819, 0x082b192b08081908, 0x082b192b08190808, 0x082b192b19080808, + 0x082b192b19192b19, 0x082b2b0808080808, 0x082b2b0808081919, 0x082b2b0808190819, + 0x082b2b0808191908, 0x082b2b0819080819, 0x082b2b0819081908, 0x082b2b0819190808, + 0x082b2b082b082b2b, 0x082b2b082b2b2b2b, 0x082b2b1908080819, 0x082b2b1908081908, + 0x082b2b1908190808, 0x082b2b192b191919, 0x082b2b2b08082b2b, 0x082b2b2b082b082b, + 0x082b2b2b192b1908, 0x082b2b2b2b082b08, 0x082b2b2b2b082b2b, 0x1908080808080819, + 0x1908080808081908, 0x190808080808192b, 0x1908080808082b19, 0x1908080808190808, + 0x190808080819082b, 0x1908080808191919, 0x1908080808192b08, 0x1908080808192b2b, + 0x19080808082b0819, 0x19080808082b1908, 0x19080808082b192b, 0x1908080819080808, + 0x190808081908082b, 0x1908080819081919, 0x1908080819082b08, 0x1908080819082b2b, + 0x1908080819190819, 0x1908080819191908, 0x190808081919192b, 0x1908080819192b19, + 0x19080808192b0808, 0x19080808192b082b, 0x19080808192b1919, 0x190808082b080819, + 0x190808082b081908, 0x190808082b190808, 0x190808082b191919, 0x190808082b192b08, + 0x190808082b2b0819, 0x190808082b2b1908, 0x1908081908080808, 0x190808190808082b, + 0x1908081908081919, 0x1908081908082b08, 0x1908081908190819, 0x1908081908191908, + 0x190808190819192b, 0x1908081908192b19, 0x19080819082b0808, 0x19080819082b082b, + 0x19080819082b1919, 0x1908081919080819, 0x1908081919081908, 0x190808191908192b, + 0x1908081919082b19, 0x1908081919190808, 0x190808191919082b, 0x1908081919191919, + 0x1908081919192b08, 0x19080819192b0819, 0x19080819192b1908, 0x190808192b080808, + 0x190808192b08082b, 0x190808192b081919, 0x190808192b082b08, 0x190808192b190819, + 0x190808192b191908, 0x190808192b2b0808, 0x1908082b08080819, 0x1908082b08081908, + 0x1908082b08190808, 0x1908082b0819082b, 0x1908082b08191919, 0x1908082b08192b08, + 0x1908082b082b1908, 0x1908082b19080808, 0x1908082b19081919, 0x1908082b19082b08, + 0x1908082b19190819, 0x1908082b19191908, 0x1908082b192b0808, 0x1908082b2b080819, + 0x1908082b2b081908, 0x1908190808080808, 0x190819080808082b, 0x1908190808081919, + 0x1908190808082b08, 0x1908190808082b2b, 0x1908190808190819, 0x1908190808191908, + 0x190819080819192b, 0x1908190808192b19, 0x19081908082b0808, 0x19081908082b082b, + 0x19081908082b1919, 0x19081908082b2b08, 0x1908190819080819, 0x1908190819081908, + 0x190819081908192b, 0x1908190819082b19, 0x1908190819190808, 0x190819081919082b, + 0x1908190819191919, 0x1908190819192b08, 0x19081908192b0819, 0x19081908192b1908, + 0x190819082b080808, 0x190819082b08082b, 0x190819082b081919, 0x190819082b082b08, + 0x190819082b190819, 0x190819082b191908, 0x190819082b2b0808, 0x1908191908080819, + 0x1908191908081908, 0x190819190808192b, 0x1908191908082b19, 0x1908191908190808, + 0x190819190819082b, 0x1908191908191919, 0x1908191908192b08, 0x19081919082b0819, + 0x19081919082b1908, 0x1908191919080808, 0x190819191908082b, 0x1908191919081919, + 0x1908191919082b08, 0x1908191919190819, 0x1908191919191908, 0x19081919192b0808, + 0x19081919192b2b2b, 0x190819192b080819, 0x190819192b081908, 0x190819192b190808, + 0x1908192b08080808, 0x1908192b0808082b, 0x1908192b08081919, 0x1908192b08082b08, + 0x1908192b08190819, 0x1908192b08191908, 0x1908192b082b0808, 0x1908192b19080819, + 0x1908192b19081908, 0x1908192b19190808, 0x1908192b2b080808, 0x1908192b2b2b1919, + 0x19082b0808080819, 0x19082b0808081908, 0x19082b0808082b19, 0x19082b0808190808, + 0x19082b080819082b, 0x19082b0808191919, 0x19082b0808192b08, 0x19082b08082b0819, + 0x19082b08082b1908, 0x19082b0819080808, 0x19082b081908082b, 0x19082b0819081919, + 0x19082b0819082b08, 0x19082b0819190819, 0x19082b0819191908, 0x19082b08192b0808, + 0x19082b082b081908, 0x19082b082b190808, 0x19082b1908080808, 0x19082b190808082b, + 0x19082b1908081919, 0x19082b1908082b08, 0x19082b1908190819, 0x19082b1908191908, + 0x19082b19082b0808, 0x19082b1919080819, 0x19082b1919081908, 0x19082b1919190808, + 0x19082b192b080808, 0x19082b192b19192b, 0x19082b2b08080819, 0x19082b2b08081908, + 0x19082b2b08190808, 0x19082b2b19080808, 0x1919080808080808, 0x191908080808082b, + 0x1919080808081919, 0x1919080808082b08, 0x1919080808190819, 0x1919080808191908, + 0x191908080819192b, 0x1919080808192b19, 0x19190808082b0808, 0x19190808082b082b, + 0x19190808082b1919, 0x19190808082b2b08, 0x1919080819080819, 0x1919080819081908, + 0x191908081908192b, 0x1919080819082b19, 0x1919080819190808, 0x191908081919082b, + 0x1919080819191919, 0x1919080819192b08, 0x19190808192b0819, 0x19190808192b1908, + 0x191908082b080808, 0x191908082b08082b, 0x191908082b081919, 0x191908082b082b08, + 0x191908082b190819, 0x191908082b191908, 0x1919081908080819, 0x1919081908081908, + 0x191908190808192b, 0x1919081908082b19, 0x1919081908190808, 0x191908190819082b, + 0x1919081908191919, 0x1919081908192b08, 0x19190819082b0819, 0x19190819082b1908, + 0x1919081919080808, 0x191908191908082b, 0x1919081919081919, 0x1919081919082b08, + 0x1919081919190819, 0x1919081919191908, 0x19190819192b0808, 0x191908192b080819, + 0x191908192b081908, 0x191908192b190808, 0x1919082b08080808, 0x1919082b08081919, + 0x1919082b08082b08, 0x1919082b08190819, 0x1919082b08191908, 0x1919082b082b0808, + 0x1919082b19080819, 0x1919082b19081908, 0x1919082b19190808, 0x1919082b192b2b19, + 0x1919082b2b080808, 0x1919190808080819, 0x1919190808081908, 0x191919080808192b, + 0x1919190808082b19, 0x1919190808190808, 0x191919080819082b, 0x1919190808191919, + 0x1919190808192b08, 0x19191908082b0819, 0x19191908082b1908, 0x1919190819080808, + 0x191919081908082b, 0x1919190819081919, 0x1919190819082b08, 0x1919190819190819, + 0x1919190819191908, 0x19191908192b0808, 0x191919082b080819, 0x191919082b081908, + 0x191919082b190808, 0x1919191908080808, 0x191919190808082b, 0x1919191908081919, + 0x1919191908082b08, 0x1919191908190819, 0x1919191908191908, 0x19191919082b0808, + 0x1919191919080819, 0x1919191919081908, 0x1919191919190808, 0x191919192b080808, + 0x1919192b08080819, 0x1919192b08081908, 0x1919192b08190808, 0x1919192b082b192b, + 0x1919192b19080808, 0x19192b0808080808, 0x19192b080808082b, 0x19192b0808081919, + 0x19192b0808082b08, 0x19192b0808190819, 0x19192b0808191908, 0x19192b08082b0808, + 0x19192b0819080819, 0x19192b0819081908, 0x19192b0819190808, 0x19192b0819192b2b, + 0x19192b082b080808, 0x19192b1908080819, 0x19192b1908081908, 0x19192b1908190808, + 0x19192b1919080808, 0x19192b2b08080808, 0x19192b2b08192b19, 0x19192b2b2b081919, + 0x19192b2b2b2b2b08, 0x192b080808080819, 0x192b080808081908, 0x192b08080808192b, + 0x192b080808190808, 0x192b08080819082b, 0x192b080808191919, 0x192b080808192b08, + 0x192b0808082b0819, 0x192b0808082b1908, 0x192b080819080808, 0x192b080819081919, + 0x192b080819082b08, 0x192b080819190819, 0x192b080819191908, 0x192b0808192b0808, + 0x192b08082b081908, 0x192b08082b190808, 0x192b081908080808, 0x192b08190808082b, + 0x192b081908081919, 0x192b081908082b08, 0x192b081908190819, 0x192b081908191908, + 0x192b0819082b0808, 0x192b081919080819, 0x192b081919081908, 0x192b081919190808, + 0x192b08192b080808, 0x192b08192b192b19, 0x192b082b08081908, 0x192b082b08190808, + 0x192b082b19080808, 0x192b082b1919192b, 0x192b082b2b2b0819, 0x192b190808080808, + 0x192b190808081919, 0x192b190808082b08, 0x192b190808190819, 0x192b190808191908, + 0x192b1908082b0808, 0x192b190819080819, 0x192b190819081908, 0x192b190819190808, + 0x192b19082b080808, 0x192b191908080819, 0x192b191908081908, 0x192b191908190808, + 0x192b191919080808, 0x192b191919082b2b, 0x192b1919192b2b08, 0x192b19192b19082b, + 0x192b192b08080808, 0x192b192b2b191908, 0x192b2b0808080819, 0x192b2b0808081908, + 0x192b2b0808190808, 0x192b2b08192b1919, 0x192b2b082b192b08, 0x192b2b1908080808, + 0x192b2b19082b2b2b, 0x192b2b2b1908082b, 0x192b2b2b2b2b0819, 0x2b08080808080808, + 0x2b0808080808082b, 0x2b08080808081919, 0x2b08080808082b08, 0x2b08080808190819, + 0x2b08080808191908, 0x2b08080808192b19, 0x2b080808082b0808, 0x2b080808082b1919, + 0x2b08080819080819, 0x2b08080819081908, 0x2b08080819190808, 0x2b0808081919082b, + 0x2b08080819191919, 0x2b08080819192b08, 0x2b080808192b0819, 0x2b0808082b080808, + 0x2b0808082b081919, 0x2b0808082b190819, 0x2b0808082b191908, 0x2b08081908080819, + 0x2b08081908081908, 0x2b08081908082b19, 0x2b08081908190808, 0x2b0808190819082b, + 0x2b08081908191919, 0x2b08081908192b08, 0x2b080819082b0819, 0x2b080819082b1908, + 0x2b08081919080808, 0x2b0808191908082b, 0x2b08081919081919, 0x2b08081919082b08, + 0x2b08081919190819, 0x2b08081919191908, 0x2b0808192b080819, 0x2b0808192b081908, + 0x2b0808192b190808, 0x2b0808192b2b2b19, 0x2b08082b08080808, 0x2b08082b08081919, + 0x2b08082b08082b2b, 0x2b08082b08190819, 0x2b08082b08191908, 0x2b08082b19080819, + 0x2b08082b19081908, 0x2b08082b19190808, 0x2b08190808080819, 0x2b08190808081908, + 0x2b0819080808192b, 0x2b08190808082b19, 0x2b08190808190808, 0x2b0819080819082b, + 0x2b08190808191919, 0x2b08190808192b08, 0x2b081908082b0819, 0x2b08190819080808, + 0x2b0819081908082b, 0x2b08190819081919, 0x2b08190819082b08, 0x2b08190819190819, + 0x2b08190819191908, 0x2b081908192b0808, 0x2b0819082b080819, 0x2b0819082b081908, + 0x2b0819082b190808, 0x2b08191908080808, 0x2b0819190808082b, 0x2b08191908081919, + 0x2b08191908082b08, 0x2b08191908190819, 0x2b08191908191908, 0x2b081919082b0808, + 0x2b08191919080819, 0x2b08191919081908, 0x2b08191919190808, 0x2b0819192b080808, + 0x2b0819192b082b2b, 0x2b08192b08080819, 0x2b08192b08081908, 0x2b08192b08190808, + 0x2b08192b082b2b19, 0x2b08192b19080808, 0x2b082b0808080808, 0x2b082b0808081919, + 0x2b082b0808190819, 0x2b082b0808191908, 0x2b082b0819080819, 0x2b082b0819081908, + 0x2b082b0819190808, 0x2b082b082b2b082b, 0x2b082b1908080819, 0x2b082b1908081908, + 0x2b082b1919080808, 0x2b082b19192b1919, 0x2b082b2b082b082b, 0x2b082b2b19192b08, + 0x2b082b2b19192b2b, 0x2b082b2b2b08082b, 0x2b082b2b2b2b082b, 0x2b19080808080819, + 0x2b19080808081908, 0x2b19080808082b19, 0x2b19080808190808, 0x2b1908080819082b, + 0x2b19080808191919, 0x2b19080808192b08, 0x2b190808082b1908, 0x2b19080819080808, + 0x2b1908081908082b, 0x2b19080819081919, 0x2b19080819082b08, 0x2b19080819190819, + 0x2b19080819191908, 0x2b190808192b0808, 0x2b1908082b080819, 0x2b1908082b081908, + 0x2b1908082b190808, 0x2b19081908080808, 0x2b19081908081919, 0x2b19081908190819, + 0x2b19081908191908, 0x2b19081919080819, 0x2b19081919081908, 0x2b19081919190808, + 0x2b19081919192b2b, 0x2b19082b08080819, 0x2b19082b08081908, 0x2b19082b08190808, + 0x2b19082b19080808, 0x2b19082b2b2b192b, 0x2b19190808080808, 0x2b1919080808082b, + 0x2b19190808081919, 0x2b19190808082b08, 0x2b19190808190819, 0x2b19190808191908, + 0x2b191908082b0808, 0x2b19190819080819, 0x2b19190819081908, 0x2b19190819190808, + 0x2b1919082b080808, 0x2b1919082b19192b, 0x2b19191908080819, 0x2b19191908081908, + 0x2b19191908190808, 0x2b19191919080808, 0x2b1919192b192b08, 0x2b1919192b2b0819, + 0x2b19192b08080808, 0x2b19192b1908192b, 0x2b19192b192b1908, 0x2b192b0808080819, + 0x2b192b0808081908, 0x2b192b0808190808, 0x2b192b08082b192b, 0x2b192b0819080808, + 0x2b192b082b2b2b19, 0x2b192b1908080808, 0x2b192b1919082b19, 0x2b192b191919082b, + 0x2b192b2b2b190808, 0x2b2b080808080808, 0x2b2b080808081919, 0x2b2b080808082b2b, + 0x2b2b080808191908, 0x2b2b0808082b082b, 0x2b2b0808082b2b2b, 0x2b2b080819080819, + 0x2b2b080819081908, 0x2b2b080819190808, 0x2b2b08082b2b082b, 0x2b2b08082b2b2b2b, + 0x2b2b081919080808, 0x2b2b0819192b1919, 0x2b2b082b0808082b, 0x2b2b082b08082b2b, + 0x2b2b082b082b082b, 0x2b2b082b082b2b08, 0x2b2b082b082b2b2b, 0x2b2b082b2b08082b, + 0x2b2b082b2b082b08, 0x2b2b082b2b082b2b, 0x2b2b082b2b2b2b08, 0x2b2b190808080819, + 0x2b2b190808081908, 0x2b2b190808190808, 0x2b2b190819080808, 0x2b2b19082b082b19, + 0x2b2b19082b2b1908, 0x2b2b191908080808, 0x2b2b191908192b19, 0x2b2b192b19190819, + 0x2b2b2b0808082b2b, 0x2b2b2b08082b2b08, 0x2b2b2b082b2b082b, 0x2b2b2b1919191908, + 0x2b2b2b192b08192b, 0x2b2b2b2b08082b08, 0x2b2b2b2b08082b2b, 0x2b2b2b2b082b0808, + 0x2b2b2b2b082b082b, 0x2b2b2b2b082b2b08, 0x2b2b2b2b2b082b08, 0x2b2b2b2b2b2b2b2b, +GGML_TABLE_END() + +GGML_TABLE_BEGIN(uint32_t, iq3xxs_grid, 256) + 0x04040404, 0x04040414, 0x04040424, 0x04040c0c, 0x04040c1c, 0x04040c3e, 0x04041404, 0x04041414, + 0x04041c0c, 0x04042414, 0x04043e1c, 0x04043e2c, 0x040c040c, 0x040c041c, 0x040c0c04, 0x040c0c14, + 0x040c140c, 0x040c142c, 0x040c1c04, 0x040c1c14, 0x040c240c, 0x040c2c24, 0x040c3e04, 0x04140404, + 0x04140414, 0x04140424, 0x04140c0c, 0x04141404, 0x04141414, 0x04141c0c, 0x04141c1c, 0x04141c3e, + 0x04142c0c, 0x04142c3e, 0x04143e2c, 0x041c040c, 0x041c043e, 0x041c0c04, 0x041c0c14, 0x041c142c, + 0x041c3e04, 0x04240c1c, 0x04241c3e, 0x04242424, 0x04242c3e, 0x04243e1c, 0x04243e2c, 0x042c040c, + 0x042c043e, 0x042c1c14, 0x042c2c14, 0x04341c2c, 0x04343424, 0x043e0c04, 0x043e0c24, 0x043e0c34, + 0x043e241c, 0x043e340c, 0x0c04040c, 0x0c04041c, 0x0c040c04, 0x0c040c14, 0x0c04140c, 0x0c04141c, + 0x0c041c04, 0x0c041c14, 0x0c041c24, 0x0c04243e, 0x0c042c04, 0x0c0c0404, 0x0c0c0414, 0x0c0c0c0c, + 0x0c0c1404, 0x0c0c1414, 0x0c14040c, 0x0c14041c, 0x0c140c04, 0x0c140c14, 0x0c14140c, 0x0c141c04, + 0x0c143e14, 0x0c1c0404, 0x0c1c0414, 0x0c1c1404, 0x0c1c1c0c, 0x0c1c2434, 0x0c1c3434, 0x0c24040c, + 0x0c24042c, 0x0c242c04, 0x0c2c1404, 0x0c2c1424, 0x0c2c2434, 0x0c2c3e0c, 0x0c34042c, 0x0c3e1414, + 0x0c3e2404, 0x14040404, 0x14040414, 0x14040c0c, 0x14040c1c, 0x14041404, 0x14041414, 0x14041434, + 0x14041c0c, 0x14042414, 0x140c040c, 0x140c041c, 0x140c042c, 0x140c0c04, 0x140c0c14, 0x140c140c, + 0x140c1c04, 0x140c341c, 0x140c343e, 0x140c3e04, 0x14140404, 0x14140414, 0x14140c0c, 0x14140c3e, + 0x14141404, 0x14141414, 0x14141c3e, 0x14142404, 0x14142c2c, 0x141c040c, 0x141c0c04, 0x141c0c24, + 0x141c3e04, 0x141c3e24, 0x14241c2c, 0x14242c1c, 0x142c041c, 0x142c143e, 0x142c240c, 0x142c3e24, + 0x143e040c, 0x143e041c, 0x143e0c34, 0x143e242c, 0x1c04040c, 0x1c040c04, 0x1c040c14, 0x1c04140c, + 0x1c04141c, 0x1c042c04, 0x1c04342c, 0x1c043e14, 0x1c0c0404, 0x1c0c0414, 0x1c0c1404, 0x1c0c1c0c, + 0x1c0c2424, 0x1c0c2434, 0x1c14040c, 0x1c14041c, 0x1c140c04, 0x1c14142c, 0x1c142c14, 0x1c143e14, + 0x1c1c0c0c, 0x1c1c1c1c, 0x1c241c04, 0x1c24243e, 0x1c243e14, 0x1c2c0404, 0x1c2c0434, 0x1c2c1414, + 0x1c2c2c2c, 0x1c340c24, 0x1c341c34, 0x1c34341c, 0x1c3e1c1c, 0x1c3e3404, 0x24040424, 0x24040c3e, + 0x24041c2c, 0x24041c3e, 0x24042c1c, 0x24042c3e, 0x240c3e24, 0x24141404, 0x24141c3e, 0x24142404, + 0x24143404, 0x24143434, 0x241c043e, 0x241c242c, 0x24240424, 0x24242c0c, 0x24243424, 0x242c142c, + 0x242c241c, 0x242c3e04, 0x243e042c, 0x243e0c04, 0x243e0c14, 0x243e1c04, 0x2c040c14, 0x2c04240c, + 0x2c043e04, 0x2c0c0404, 0x2c0c0434, 0x2c0c1434, 0x2c0c2c2c, 0x2c140c24, 0x2c141c14, 0x2c143e14, + 0x2c1c0414, 0x2c1c2c1c, 0x2c240c04, 0x2c24141c, 0x2c24143e, 0x2c243e14, 0x2c2c0414, 0x2c2c1c0c, + 0x2c342c04, 0x2c3e1424, 0x2c3e2414, 0x34041424, 0x34042424, 0x34042434, 0x34043424, 0x340c140c, + 0x340c340c, 0x34140c3e, 0x34143424, 0x341c1c04, 0x341c1c34, 0x34242424, 0x342c042c, 0x342c2c14, + 0x34341c1c, 0x343e041c, 0x343e140c, 0x3e04041c, 0x3e04042c, 0x3e04043e, 0x3e040c04, 0x3e041c14, + 0x3e042c14, 0x3e0c1434, 0x3e0c2404, 0x3e140c14, 0x3e14242c, 0x3e142c14, 0x3e1c0404, 0x3e1c0c2c, + 0x3e1c1c1c, 0x3e1c3404, 0x3e24140c, 0x3e24240c, 0x3e2c0404, 0x3e2c0414, 0x3e2c1424, 0x3e341c04, +GGML_TABLE_END() + +GGML_TABLE_BEGIN(uint32_t, iq3s_grid, 512) + 0x01010101, 0x01010103, 0x01010105, 0x0101010b, 0x0101010f, 0x01010301, 0x01010303, 0x01010305, + 0x01010309, 0x0101030d, 0x01010501, 0x01010503, 0x0101050b, 0x01010707, 0x01010901, 0x01010905, + 0x0101090b, 0x0101090f, 0x01010b03, 0x01010b07, 0x01010d01, 0x01010d05, 0x01010f03, 0x01010f09, + 0x01010f0f, 0x01030101, 0x01030103, 0x01030105, 0x01030109, 0x01030301, 0x01030303, 0x0103030b, + 0x01030501, 0x01030507, 0x0103050f, 0x01030703, 0x0103070b, 0x01030909, 0x01030d03, 0x01030d0b, + 0x01030f05, 0x01050101, 0x01050103, 0x0105010b, 0x0105010f, 0x01050301, 0x01050307, 0x0105030d, + 0x01050503, 0x0105050b, 0x01050701, 0x01050709, 0x01050905, 0x0105090b, 0x0105090f, 0x01050b03, + 0x01050b07, 0x01050f01, 0x01050f07, 0x01070107, 0x01070303, 0x0107030b, 0x01070501, 0x01070505, + 0x01070703, 0x01070707, 0x0107070d, 0x01070909, 0x01070b01, 0x01070b05, 0x01070d0f, 0x01070f03, + 0x01070f0b, 0x01090101, 0x01090307, 0x0109030f, 0x01090503, 0x01090509, 0x01090705, 0x01090901, + 0x01090907, 0x01090b03, 0x01090f01, 0x010b0105, 0x010b0109, 0x010b0501, 0x010b0505, 0x010b050d, + 0x010b0707, 0x010b0903, 0x010b090b, 0x010b090f, 0x010b0d0d, 0x010b0f07, 0x010d010d, 0x010d0303, + 0x010d0307, 0x010d0703, 0x010d0b05, 0x010d0f03, 0x010f0101, 0x010f0105, 0x010f0109, 0x010f0501, + 0x010f0505, 0x010f050d, 0x010f0707, 0x010f0b01, 0x010f0b09, 0x03010101, 0x03010103, 0x03010105, + 0x03010109, 0x03010301, 0x03010303, 0x03010307, 0x0301030b, 0x0301030f, 0x03010501, 0x03010505, + 0x03010703, 0x03010709, 0x0301070d, 0x03010b09, 0x03010b0d, 0x03010d03, 0x03010f05, 0x03030101, + 0x03030103, 0x03030107, 0x0303010d, 0x03030301, 0x03030309, 0x03030503, 0x03030701, 0x03030707, + 0x03030903, 0x03030b01, 0x03030b05, 0x03030f01, 0x03030f0d, 0x03050101, 0x03050305, 0x0305030b, + 0x0305030f, 0x03050501, 0x03050509, 0x03050705, 0x03050901, 0x03050907, 0x03050b0b, 0x03050d01, + 0x03050f05, 0x03070103, 0x03070109, 0x0307010f, 0x03070301, 0x03070307, 0x03070503, 0x0307050f, + 0x03070701, 0x03070709, 0x03070903, 0x03070d05, 0x03070f01, 0x03090107, 0x0309010b, 0x03090305, + 0x03090309, 0x03090703, 0x03090707, 0x03090905, 0x0309090d, 0x03090b01, 0x03090b09, 0x030b0103, + 0x030b0301, 0x030b0307, 0x030b0503, 0x030b0701, 0x030b0705, 0x030b0b03, 0x030d0501, 0x030d0509, + 0x030d050f, 0x030d0909, 0x030d090d, 0x030f0103, 0x030f0107, 0x030f0301, 0x030f0305, 0x030f0503, + 0x030f070b, 0x030f0903, 0x030f0d05, 0x030f0f01, 0x05010101, 0x05010103, 0x05010107, 0x0501010b, + 0x0501010f, 0x05010301, 0x05010305, 0x05010309, 0x0501030d, 0x05010503, 0x05010507, 0x0501050f, + 0x05010701, 0x05010705, 0x05010903, 0x05010907, 0x0501090b, 0x05010b01, 0x05010b05, 0x05010d0f, + 0x05010f01, 0x05010f07, 0x05010f0b, 0x05030101, 0x05030105, 0x05030301, 0x05030307, 0x0503030f, + 0x05030505, 0x0503050b, 0x05030703, 0x05030709, 0x05030905, 0x05030b03, 0x05050103, 0x05050109, + 0x0505010f, 0x05050503, 0x05050507, 0x05050701, 0x0505070f, 0x05050903, 0x05050b07, 0x05050b0f, + 0x05050f03, 0x05050f09, 0x05070101, 0x05070105, 0x0507010b, 0x05070303, 0x05070505, 0x05070509, + 0x05070703, 0x05070707, 0x05070905, 0x05070b01, 0x05070d0d, 0x05090103, 0x0509010f, 0x05090501, + 0x05090507, 0x05090705, 0x0509070b, 0x05090903, 0x05090f05, 0x05090f0b, 0x050b0109, 0x050b0303, + 0x050b0505, 0x050b070f, 0x050b0901, 0x050b0b07, 0x050b0f01, 0x050d0101, 0x050d0105, 0x050d010f, + 0x050d0503, 0x050d0b0b, 0x050d0d03, 0x050f010b, 0x050f0303, 0x050f050d, 0x050f0701, 0x050f0907, + 0x050f0b01, 0x07010105, 0x07010303, 0x07010307, 0x0701030b, 0x0701030f, 0x07010505, 0x07010703, + 0x07010707, 0x0701070b, 0x07010905, 0x07010909, 0x0701090f, 0x07010b03, 0x07010d07, 0x07010f03, + 0x07030103, 0x07030107, 0x0703010b, 0x07030309, 0x07030503, 0x07030507, 0x07030901, 0x07030d01, + 0x07030f05, 0x07030f0d, 0x07050101, 0x07050305, 0x07050501, 0x07050705, 0x07050709, 0x07050b01, + 0x07070103, 0x07070301, 0x07070309, 0x07070503, 0x07070507, 0x0707050f, 0x07070701, 0x07070903, + 0x07070907, 0x0707090f, 0x07070b0b, 0x07070f07, 0x07090107, 0x07090303, 0x0709030d, 0x07090505, + 0x07090703, 0x07090b05, 0x07090d01, 0x07090d09, 0x070b0103, 0x070b0301, 0x070b0305, 0x070b050b, + 0x070b0705, 0x070b0909, 0x070b0b0d, 0x070b0f07, 0x070d030d, 0x070d0903, 0x070f0103, 0x070f0107, + 0x070f0501, 0x070f0505, 0x070f070b, 0x09010101, 0x09010109, 0x09010305, 0x09010501, 0x09010509, + 0x0901050f, 0x09010705, 0x09010903, 0x09010b01, 0x09010f01, 0x09030105, 0x0903010f, 0x09030303, + 0x09030307, 0x09030505, 0x09030701, 0x0903070b, 0x09030907, 0x09030b03, 0x09030b0b, 0x09050103, + 0x09050107, 0x09050301, 0x0905030b, 0x09050503, 0x09050707, 0x09050901, 0x09050b0f, 0x09050d05, + 0x09050f01, 0x09070109, 0x09070303, 0x09070307, 0x09070501, 0x09070505, 0x09070703, 0x0907070b, + 0x09090101, 0x09090105, 0x09090509, 0x0909070f, 0x09090901, 0x09090f03, 0x090b010b, 0x090b010f, + 0x090b0503, 0x090b0d05, 0x090d0307, 0x090d0709, 0x090d0d01, 0x090f0301, 0x090f030b, 0x090f0701, + 0x090f0907, 0x090f0b03, 0x0b010105, 0x0b010301, 0x0b010309, 0x0b010505, 0x0b010901, 0x0b010909, + 0x0b01090f, 0x0b010b05, 0x0b010d0d, 0x0b010f09, 0x0b030103, 0x0b030107, 0x0b03010b, 0x0b030305, + 0x0b030503, 0x0b030705, 0x0b030f05, 0x0b050101, 0x0b050303, 0x0b050507, 0x0b050701, 0x0b05070d, + 0x0b050b07, 0x0b070105, 0x0b07010f, 0x0b070301, 0x0b07050f, 0x0b070909, 0x0b070b03, 0x0b070d0b, + 0x0b070f07, 0x0b090103, 0x0b090109, 0x0b090501, 0x0b090705, 0x0b09090d, 0x0b0b0305, 0x0b0b050d, + 0x0b0b0b03, 0x0b0b0b07, 0x0b0d0905, 0x0b0f0105, 0x0b0f0109, 0x0b0f0505, 0x0d010303, 0x0d010307, + 0x0d01030b, 0x0d010703, 0x0d010707, 0x0d010d01, 0x0d030101, 0x0d030501, 0x0d03050f, 0x0d030d09, + 0x0d050305, 0x0d050709, 0x0d050905, 0x0d050b0b, 0x0d050d05, 0x0d050f01, 0x0d070101, 0x0d070309, + 0x0d070503, 0x0d070901, 0x0d09050b, 0x0d090907, 0x0d090d05, 0x0d0b0101, 0x0d0b0107, 0x0d0b0709, + 0x0d0b0d01, 0x0d0d010b, 0x0d0d0901, 0x0d0f0303, 0x0d0f0307, 0x0f010101, 0x0f010109, 0x0f01010f, + 0x0f010501, 0x0f010505, 0x0f01070d, 0x0f010901, 0x0f010b09, 0x0f010d05, 0x0f030105, 0x0f030303, + 0x0f030509, 0x0f030907, 0x0f03090b, 0x0f050103, 0x0f050109, 0x0f050301, 0x0f05030d, 0x0f050503, + 0x0f050701, 0x0f050b03, 0x0f070105, 0x0f070705, 0x0f07070b, 0x0f070b07, 0x0f090103, 0x0f09010b, + 0x0f090307, 0x0f090501, 0x0f090b01, 0x0f0b0505, 0x0f0b0905, 0x0f0d0105, 0x0f0d0703, 0x0f0f0101, +GGML_TABLE_END() + +#define NGRID_IQ1S 2048 +#define IQ1S_DELTA 0.125f +#define IQ1M_DELTA 0.125f +#if defined(GGML_COMMON_IMPL_C) +GGML_TABLE_BEGIN(uint64_t, iq1s_grid, NGRID_IQ1S) + 0xffffffffffffffff, 0xffffffffffffff01, 0xffffffffffff0000, 0xffffffffffff01ff, + 0xffffffffffff0101, 0xffffffffff00ff00, 0xffffffffff000000, 0xffffffffff01ffff, + 0xffffffffff01ff01, 0xffffffffff0101ff, 0xffffffffff010101, 0xffffffff00ff0000, + 0xffffffff0000ff00, 0xffffffff000000ff, 0xffffffff00000001, 0xffffffff00010000, + 0xffffffff01ffffff, 0xffffffff01ffff01, 0xffffffff01ff01ff, 0xffffffff01ff0101, + 0xffffffff01000000, 0xffffffff0101ffff, 0xffffffff0101ff01, 0xffffffff010101ff, + 0xffffffff01010101, 0xffffff00ffff00ff, 0xffffff00ffff0000, 0xffffff00ff00ff00, + 0xffffff00ff0000ff, 0xffffff00ff000001, 0xffffff00ff000100, 0xffffff00ff000101, + 0xffffff00ff010000, 0xffffff0000ffff00, 0xffffff0000ff0001, 0xffffff0000ff0100, + 0xffffff000000ff01, 0xffffff0000000000, 0xffffff0000000101, 0xffffff000001ff00, + 0xffffff00000100ff, 0xffffff0000010001, 0xffffff00000101ff, 0xffffff0001ff0000, + 0xffffff000100ff00, 0xffffff00010000ff, 0xffffff0001000001, 0xffffff0001010000, + 0xffffff01ffffffff, 0xffffff01ffffff01, 0xffffff01ffff01ff, 0xffffff01ffff0101, + 0xffffff01ff000000, 0xffffff01ff01ffff, 0xffffff01ff01ff01, 0xffffff01ff0101ff, + 0xffffff01ff010101, 0xffffff0100ff0000, 0xffffff010000ff00, 0xffffff0100000100, + 0xffffff01000100ff, 0xffffff0100010100, 0xffffff0101ffffff, 0xffffff0101ffff01, + 0xffffff0101ff01ff, 0xffffff0101ff0101, 0xffffff010100ff00, 0xffffff0101000000, + 0xffffff0101000100, 0xffffff010101ffff, 0xffffff010101ff01, 0xffffff01010101ff, + 0xffffff0101010101, 0xffff00ffff00ff00, 0xffff00ffff0000ff, 0xffff00ffff000001, + 0xffff00ffff010000, 0xffff00ff00ffff00, 0xffff00ff00ff0100, 0xffff00ff00000000, + 0xffff00ff00000101, 0xffff00ff000100ff, 0xffff00ff00010000, 0xffff00ff0100ff00, + 0xffff00ff01000100, 0xffff00ff01010000, 0xffff0000ffffff00, 0xffff0000ffff00ff, + 0xffff0000ffff0000, 0xffff0000ffff0001, 0xffff0000ff000000, 0xffff0000ff0001ff, + 0xffff0000ff000101, 0xffff0000ff010100, 0xffff000000ffffff, 0xffff000000ff0000, + 0xffff000000ff0101, 0xffff00000000ffff, 0xffff00000000ff00, 0xffff0000000000ff, + 0xffff000000000000, 0xffff000000000001, 0xffff000000000100, 0xffff00000001ffff, + 0xffff00000001ff01, 0xffff000000010000, 0xffff0000000101ff, 0xffff000000010101, + 0xffff000001ffff00, 0xffff00000100ff00, 0xffff000001000000, 0xffff0000010001ff, + 0xffff000001000101, 0xffff00000101ff00, 0xffff0000010100ff, 0xffff000001010000, + 0xffff000001010001, 0xffff000001010100, 0xffff0001ff0000ff, 0xffff0001ff000100, + 0xffff000100ffff00, 0xffff000100ff00ff, 0xffff00010000ffff, 0xffff00010000ff01, + 0xffff000100000000, 0xffff0001000001ff, 0xffff00010001ffff, 0xffff00010001ff00, + 0xffff000100010001, 0xffff000100010100, 0xffff000101ff0000, 0xffff00010100ff00, + 0xffff0001010000ff, 0xffff000101000100, 0xffff01ffffffffff, 0xffff01ffffffff01, + 0xffff01ffffff01ff, 0xffff01ffffff0101, 0xffff01ffff000000, 0xffff01ffff01ffff, + 0xffff01ffff01ff01, 0xffff01ffff0101ff, 0xffff01ffff010101, 0xffff01ff00ff0000, + 0xffff01ff0000ff00, 0xffff01ff00000001, 0xffff01ff00010000, 0xffff01ff01ffffff, + 0xffff01ff01ffff01, 0xffff01ff01ff01ff, 0xffff01ff01ff0101, 0xffff01ff01000000, + 0xffff01ff0101ffff, 0xffff01ff0101ff01, 0xffff01ff010101ff, 0xffff01ff01010101, + 0xffff0100ffff0000, 0xffff0100ff00ff00, 0xffff0100ff0000ff, 0xffff0100ff000100, + 0xffff0100ff0100ff, 0xffff0100ff010000, 0xffff010000ffff00, 0xffff01000000ffff, + 0xffff01000000ff00, 0xffff010000000000, 0xffff01000001ff00, 0xffff0100000100ff, + 0xffff010000010100, 0xffff01000100ff00, 0xffff0100010000ff, 0xffff010001000001, + 0xffff010001000100, 0xffff010001010000, 0xffff0101ffffffff, 0xffff0101ffffff01, + 0xffff0101ffff01ff, 0xffff0101ffff0101, 0xffff0101ff000000, 0xffff0101ff01ffff, + 0xffff0101ff01ff01, 0xffff0101ff0101ff, 0xffff0101ff010101, 0xffff010100ff0000, + 0xffff01010000ff00, 0xffff010100000100, 0xffff01010001ff00, 0xffff010100010000, + 0xffff010101ffffff, 0xffff010101ffff01, 0xffff010101ff0000, 0xffff010101ff01ff, + 0xffff010101ff0101, 0xffff010101000000, 0xffff01010101ffff, 0xffff01010101ff01, + 0xffff0101010101ff, 0xffff010101010101, 0xff00ffffff00ffff, 0xff00ffffff00ff00, + 0xff00ffffff0000ff, 0xff00ffffff000100, 0xff00ffffff0100ff, 0xff00ffffff010000, + 0xff00ffff00ffff00, 0xff00ffff00ff00ff, 0xff00ffff0000ffff, 0xff00ffff00000000, + 0xff00ffff000001ff, 0xff00ffff0001ff00, 0xff00ffff000100ff, 0xff00ffff00010000, + 0xff00ffff00010100, 0xff00ffff0100ff00, 0xff00ffff010000ff, 0xff00ffff01000001, + 0xff00ffff0101ff00, 0xff00ffff01010000, 0xff00ff00ffffff00, 0xff00ff00ffff00ff, + 0xff00ff00ffff0001, 0xff00ff00ffff0100, 0xff00ff00ff00ffff, 0xff00ff00ff00ff01, + 0xff00ff00ff000000, 0xff00ff00ff0001ff, 0xff00ff00ff01ff00, 0xff00ff00ff0100ff, + 0xff00ff00ff010100, 0xff00ff0000ff0000, 0xff00ff0000ff0101, 0xff00ff000000ffff, + 0xff00ff000000ff00, 0xff00ff000000ff01, 0xff00ff00000000ff, 0xff00ff0000000000, + 0xff00ff0000000001, 0xff00ff0000000100, 0xff00ff000001ffff, 0xff00ff0000010000, + 0xff00ff0001ff00ff, 0xff00ff000100ff01, 0xff00ff0001000000, 0xff00ff000101ff00, + 0xff00ff00010100ff, 0xff00ff01ff00ff00, 0xff00ff01ff0000ff, 0xff00ff01ff000001, + 0xff00ff01ff010000, 0xff00ff0100ffffff, 0xff00ff0100ff0001, 0xff00ff0100ff0100, + 0xff00ff010000ff01, 0xff00ff0100000000, 0xff00ff01000001ff, 0xff00ff0100000101, + 0xff00ff01000100ff, 0xff00ff0100010001, 0xff00ff0101ff0000, 0xff00ff010100ff00, + 0xff00ff01010000ff, 0xff00ff0101000001, 0xff00ff0101010000, 0xff0000ffffffff00, + 0xff0000ffffff0001, 0xff0000ffffff0100, 0xff0000ffff0000ff, 0xff0000ffff000000, + 0xff0000ffff0001ff, 0xff0000ffff000100, 0xff0000ffff01ff00, 0xff0000ffff010001, + 0xff0000ff00ffff00, 0xff0000ff00ff0000, 0xff0000ff00ff0001, 0xff0000ff00ff01ff, + 0xff0000ff00ff0101, 0xff0000ff0000ff00, 0xff0000ff000000ff, 0xff0000ff00000000, + 0xff0000ff00000001, 0xff0000ff00000100, 0xff0000ff0001ff01, 0xff0000ff00010000, + 0xff0000ff000101ff, 0xff0000ff01ff00ff, 0xff0000ff01ff0100, 0xff0000ff0100ffff, + 0xff0000ff010000ff, 0xff0000ff01000000, 0xff0000ff010001ff, 0xff0000ff01000100, + 0xff0000ff01000101, 0xff0000ff0101ff00, 0xff0000ff010100ff, 0xff0000ff01010000, + 0xff0000ff01010100, 0xff000000ffffff01, 0xff000000ffff0000, 0xff000000ffff0101, + 0xff000000ff00ff00, 0xff000000ff0000ff, 0xff000000ff000000, 0xff000000ff000001, + 0xff000000ff000100, 0xff000000ff01ffff, 0xff000000ff01ff01, 0xff000000ff010000, + 0xff000000ff0101ff, 0xff000000ff010101, 0xff00000000ffff00, 0xff00000000ff00ff, + 0xff00000000ff0000, 0xff00000000ff0001, 0xff0000000000ff00, 0xff0000000000ff01, + 0xff000000000000ff, 0xff00000000000000, 0xff00000000000001, 0xff00000000000100, + 0xff00000000000101, 0xff0000000001ff00, 0xff000000000100ff, 0xff00000000010000, + 0xff00000000010001, 0xff00000000010100, 0xff00000001ffffff, 0xff00000001ffff01, + 0xff00000001ff00ff, 0xff00000001ff0000, 0xff00000001ff01ff, 0xff00000001ff0101, + 0xff0000000100ffff, 0xff0000000100ff00, 0xff000000010000ff, 0xff00000001000000, + 0xff00000001000001, 0xff00000001000100, 0xff00000001000101, 0xff0000000101ffff, + 0xff0000000101ff01, 0xff00000001010000, 0xff000001ffffff00, 0xff000001ffff00ff, + 0xff000001ffff0000, 0xff000001ffff0001, 0xff000001ff000000, 0xff000001ff000001, + 0xff000001ff0001ff, 0xff000001ff000101, 0xff000001ff01ff00, 0xff000001ff010001, + 0xff00000100ffffff, 0xff00000100ffff01, 0xff00000100ff00ff, 0xff00000100ff0000, + 0xff00000100ff01ff, 0xff00000100ff0101, 0xff0000010000ff00, 0xff00000100000000, + 0xff00000100000001, 0xff000001000001ff, 0xff00000100000100, 0xff0000010001ff00, + 0xff000001000100ff, 0xff00000100010000, 0xff000001000101ff, 0xff00000100010100, + 0xff00000100010101, 0xff00000101ff0001, 0xff00000101ff0101, 0xff0000010100ff01, + 0xff00000101000000, 0xff000001010100ff, 0xff00000101010100, 0xff0001ffff00ff00, + 0xff0001ffff000001, 0xff0001ffff010000, 0xff0001ff00ffff00, 0xff0001ff00ff00ff, + 0xff0001ff00ff0001, 0xff0001ff00ff0100, 0xff0001ff0000ffff, 0xff0001ff00000000, + 0xff0001ff000001ff, 0xff0001ff00000101, 0xff0001ff0001ffff, 0xff0001ff0001ff00, + 0xff0001ff000100ff, 0xff0001ff00010001, 0xff0001ff00010100, 0xff0001ff01ff0000, + 0xff0001ff0100ff00, 0xff0001ff010000ff, 0xff0001ff01010000, 0xff000100ff00ffff, + 0xff000100ff00ff01, 0xff000100ff000000, 0xff000100ff000101, 0xff000100ff01ff00, + 0xff000100ff010000, 0xff00010000ffff01, 0xff00010000ff00ff, 0xff00010000ff0000, + 0xff00010000ff01ff, 0xff0001000000ff00, 0xff000100000000ff, 0xff00010000000000, + 0xff00010000000001, 0xff00010000000100, 0xff00010000000101, 0xff0001000001ffff, + 0xff00010000010000, 0xff00010000010101, 0xff00010001ff0100, 0xff0001000100ff00, + 0xff0001000100ff01, 0xff00010001000000, 0xff000100010001ff, 0xff0001000101ff00, + 0xff00010001010001, 0xff00010001010100, 0xff000101ffff0100, 0xff000101ff000001, + 0xff000101ff0100ff, 0xff000101ff010001, 0xff00010100ff00ff, 0xff00010100ff0001, + 0xff00010100ff0100, 0xff0001010000ffff, 0xff0001010000ff01, 0xff00010100000000, + 0xff000101000001ff, 0xff0001010001ff00, 0xff00010100010001, 0xff00010100010100, + 0xff00010101ff0000, 0xff0001010100ff00, 0xff00010101000001, 0xff00010101000101, + 0xff01ffffffffffff, 0xff01ffffffffff01, 0xff01ffffffff01ff, 0xff01ffffffff0101, + 0xff01ffffff000000, 0xff01ffffff01ffff, 0xff01ffffff01ff01, 0xff01ffffff010000, + 0xff01ffffff0101ff, 0xff01ffffff010101, 0xff01ffff00ff0000, 0xff01ffff0000ff00, + 0xff01ffff00000100, 0xff01ffff0001ff00, 0xff01ffff00010000, 0xff01ffff01ffffff, + 0xff01ffff01ffff01, 0xff01ffff01ff01ff, 0xff01ffff01ff0101, 0xff01ffff01000000, + 0xff01ffff0101ffff, 0xff01ffff0101ff01, 0xff01ffff01010000, 0xff01ffff010101ff, + 0xff01ffff01010101, 0xff01ff00ffff0000, 0xff01ff00ff00ff00, 0xff01ff00ff0000ff, + 0xff01ff00ff000100, 0xff01ff00ff010000, 0xff01ff0000ffff01, 0xff01ff0000ff00ff, + 0xff01ff0000ff0100, 0xff01ff0000000000, 0xff01ff00000001ff, 0xff01ff0000000101, + 0xff01ff000001ff00, 0xff01ff00000100ff, 0xff01ff0000010000, 0xff01ff0000010001, + 0xff01ff0001ff0000, 0xff01ff000100ffff, 0xff01ff0001000001, 0xff01ff0001000100, + 0xff01ff0001010000, 0xff01ff01ffffff00, 0xff01ff01ffff01ff, 0xff01ff01ffff0101, + 0xff01ff01ff00ff00, 0xff01ff01ff000000, 0xff01ff01ff01ffff, 0xff01ff01ff01ff01, + 0xff01ff01ff0101ff, 0xff01ff01ff010101, 0xff01ff0100ff0000, 0xff01ff010000ff00, + 0xff01ff0100000001, 0xff01ff0100000100, 0xff01ff0100010000, 0xff01ff0101ffff00, + 0xff01ff0101ff01ff, 0xff01ff0101ff0101, 0xff01ff010100ff00, 0xff01ff0101000000, + 0xff01ff010101ffff, 0xff01ff010101ff01, 0xff01ff01010101ff, 0xff01ff0101010101, + 0xff0100ffffff0000, 0xff0100ffff0000ff, 0xff0100ffff000001, 0xff0100ffff000100, + 0xff0100ffff010000, 0xff0100ff00ff00ff, 0xff0100ff00ff0000, 0xff0100ff00ff0001, + 0xff0100ff00ff0100, 0xff0100ff0000ff01, 0xff0100ff00000000, 0xff0100ff000001ff, + 0xff0100ff00000101, 0xff0100ff00010001, 0xff0100ff01ff0000, 0xff0100ff0100ff00, + 0xff0100ff010000ff, 0xff0100ff01000100, 0xff0100ff0101ff00, 0xff0100ff01010000, + 0xff010000ffff0100, 0xff010000ff000000, 0xff010000ff01ff00, 0xff010000ff010100, + 0xff01000000ffffff, 0xff01000000ff0000, 0xff01000000ff01ff, 0xff0100000000ff00, + 0xff010000000000ff, 0xff01000000000000, 0xff01000000000100, 0xff0100000001ff01, + 0xff01000000010000, 0xff010000000101ff, 0xff01000001ff0100, 0xff0100000100ffff, + 0xff010000010000ff, 0xff01000001000000, 0xff010000010001ff, 0xff01000001000101, + 0xff0100000101ff00, 0xff010000010100ff, 0xff01000001010001, 0xff01000001010100, + 0xff010001ffff0000, 0xff010001ff00ffff, 0xff010001ff00ff01, 0xff010001ff000100, + 0xff010001ff010000, 0xff01000100ffff00, 0xff01000100ff0100, 0xff01000100000000, + 0xff0100010001ffff, 0xff0100010001ff00, 0xff01000100010100, 0xff01000101ff00ff, + 0xff01000101ff0001, 0xff0100010100ffff, 0xff01000101000101, 0xff0101ffffffffff, + 0xff0101ffffffff01, 0xff0101ffffff01ff, 0xff0101ffffff0101, 0xff0101ffff000000, + 0xff0101ffff01ffff, 0xff0101ffff01ff01, 0xff0101ffff0101ff, 0xff0101ffff010101, + 0xff0101ff00ff0000, 0xff0101ff0000ff00, 0xff0101ff000000ff, 0xff0101ff00010000, + 0xff0101ff01ffffff, 0xff0101ff01ffff01, 0xff0101ff01ff01ff, 0xff0101ff01ff0101, + 0xff0101ff0101ffff, 0xff0101ff0101ff01, 0xff0101ff010101ff, 0xff0101ff01010101, + 0xff010100ffff0100, 0xff010100ff00ff00, 0xff010100ff0000ff, 0xff010100ff000100, + 0xff010100ff010000, 0xff01010000ff0001, 0xff01010000ff0100, 0xff0101000000ff01, + 0xff01010000000000, 0xff0101000001ff00, 0xff010100000100ff, 0xff01010000010001, + 0xff01010000010100, 0xff01010001ff0000, 0xff0101000100ffff, 0xff01010001000001, + 0xff01010001000100, 0xff010100010100ff, 0xff01010001010000, 0xff010101ffffffff, + 0xff010101ffffff01, 0xff010101ffff01ff, 0xff010101ffff0101, 0xff010101ff01ffff, + 0xff010101ff01ff01, 0xff010101ff0101ff, 0xff010101ff010101, 0xff01010100ff0000, + 0xff0101010000ff00, 0xff01010100000001, 0xff01010100000100, 0xff01010100010000, + 0xff01010101ffffff, 0xff01010101ffff01, 0xff01010101ff01ff, 0xff01010101ff0101, + 0xff01010101000000, 0xff0101010101ffff, 0xff0101010101ff01, 0xff010101010101ff, + 0xff01010101010101, 0x00ffffffffff0000, 0x00ffffffff00ff00, 0x00ffffffff000001, + 0x00ffffffff010000, 0x00ffffff00ff0100, 0x00ffffff0000ff01, 0x00ffffff00000000, + 0x00ffffff000001ff, 0x00ffffff00000101, 0x00ffffff0001ff00, 0x00ffffff000100ff, + 0x00ffffff00010001, 0x00ffffff010000ff, 0x00ffffff01000100, 0x00ffffff0101ff00, + 0x00ffffff01010001, 0x00ffff00ffffffff, 0x00ffff00ffffff00, 0x00ffff00ffff00ff, + 0x00ffff00ffff0001, 0x00ffff00ffff0100, 0x00ffff00ff00ff01, 0x00ffff00ff000000, + 0x00ffff00ff000001, 0x00ffff00ff0001ff, 0x00ffff00ff000101, 0x00ffff00ff01ff00, + 0x00ffff00ff010001, 0x00ffff00ff010100, 0x00ffff0000ff0000, 0x00ffff0000ff01ff, + 0x00ffff0000ff0101, 0x00ffff000000ff00, 0x00ffff00000000ff, 0x00ffff0000000000, + 0x00ffff0000000001, 0x00ffff0000000100, 0x00ffff0000000101, 0x00ffff0000010000, + 0x00ffff00000101ff, 0x00ffff0000010101, 0x00ffff0001ffff00, 0x00ffff0001ff00ff, + 0x00ffff0001ff0001, 0x00ffff000100ffff, 0x00ffff000100ff01, 0x00ffff0001000000, + 0x00ffff000101ffff, 0x00ffff000101ff00, 0x00ffff000101ff01, 0x00ffff01ffff0000, + 0x00ffff01ff00ff00, 0x00ffff01ff0000ff, 0x00ffff01ff000001, 0x00ffff01ff010000, + 0x00ffff0100ffff00, 0x00ffff010000ff01, 0x00ffff0100000000, 0x00ffff0100000101, + 0x00ffff01000100ff, 0x00ffff0100010100, 0x00ffff0101ff0100, 0x00ffff01010000ff, + 0x00ffff0101010000, 0x00ff00ffffffff00, 0x00ff00ffff000000, 0x00ff00ffff000100, + 0x00ff00ffff010100, 0x00ff00ff00ff0000, 0x00ff00ff00ff01ff, 0x00ff00ff00ff0101, + 0x00ff00ff0000ff00, 0x00ff00ff000000ff, 0x00ff00ff00000000, 0x00ff00ff00000001, + 0x00ff00ff0001ff00, 0x00ff00ff0001ff01, 0x00ff00ff00010000, 0x00ff00ff000101ff, + 0x00ff00ff00010101, 0x00ff00ff01ffff00, 0x00ff00ff01ff0001, 0x00ff00ff01ff0100, + 0x00ff00ff0100ffff, 0x00ff00ff0100ff01, 0x00ff00ff01000000, 0x00ff00ff0101ffff, + 0x00ff00ff0101ff00, 0x00ff00ff01010100, 0x00ff0000ffffff00, 0x00ff0000ffffff01, + 0x00ff0000ffff0000, 0x00ff0000ffff0101, 0x00ff0000ff00ff00, 0x00ff0000ff0000ff, + 0x00ff0000ff000000, 0x00ff0000ff000001, 0x00ff0000ff000100, 0x00ff0000ff01ffff, + 0x00ff0000ff010000, 0x00ff0000ff010101, 0x00ff000000ffff00, 0x00ff000000ff00ff, + 0x00ff000000ff0000, 0x00ff000000ff0001, 0x00ff000000ff0100, 0x00ff00000000ffff, + 0x00ff00000000ff00, 0x00ff0000000000ff, 0x00ff000000000000, 0x00ff000000000001, + 0x00ff0000000001ff, 0x00ff000000000100, 0x00ff00000001ff00, 0x00ff0000000100ff, + 0x00ff000000010000, 0x00ff000000010001, 0x00ff000000010100, 0x00ff000001ffff01, + 0x00ff000001ff00ff, 0x00ff000001ff0000, 0x00ff000001ff01ff, 0x00ff00000100ff00, + 0x00ff0000010000ff, 0x00ff000001000000, 0x00ff000001000001, 0x00ff000001000100, + 0x00ff000001000101, 0x00ff000001010000, 0x00ff0000010101ff, 0x00ff000001010101, + 0x00ff0001ffffff00, 0x00ff0001ffff0000, 0x00ff0001ffff0100, 0x00ff0001ff0000ff, + 0x00ff0001ff000000, 0x00ff0001ff0001ff, 0x00ff0001ff000101, 0x00ff0001ff01ff00, + 0x00ff0001ff0100ff, 0x00ff0001ff010100, 0x00ff000100ffffff, 0x00ff000100ffff01, + 0x00ff000100ff0000, 0x00ff000100ff01ff, 0x00ff00010000ffff, 0x00ff00010000ff00, + 0x00ff00010000ff01, 0x00ff000100000000, 0x00ff000100000001, 0x00ff000100000100, + 0x00ff00010001ff01, 0x00ff000100010000, 0x00ff0001000101ff, 0x00ff000101ffff00, + 0x00ff000101ff0000, 0x00ff000101ff0101, 0x00ff0001010000ff, 0x00ff000101000000, + 0x00ff00010101ff00, 0x00ff0001010100ff, 0x00ff000101010001, 0x00ff01ffffff0000, + 0x00ff01ffff00ff00, 0x00ff01ffff000000, 0x00ff01ffff000101, 0x00ff01ffff010000, + 0x00ff01ff00ffff01, 0x00ff01ff00ff0100, 0x00ff01ff0000ffff, 0x00ff01ff00000000, + 0x00ff01ff000001ff, 0x00ff01ff0001ff00, 0x00ff01ff000100ff, 0x00ff01ff00010001, + 0x00ff01ff00010100, 0x00ff01ff01ff0000, 0x00ff01ff0100ff00, 0x00ff01ff010000ff, + 0x00ff01ff01000001, 0x00ff01ff01000100, 0x00ff01ff01010000, 0x00ff0100ffffff00, + 0x00ff0100ffff0000, 0x00ff0100ffff0001, 0x00ff0100ffff0101, 0x00ff0100ff00ffff, + 0x00ff0100ff0000ff, 0x00ff0100ff000000, 0x00ff0100ff0001ff, 0x00ff0100ff01ff00, + 0x00ff0100ff0100ff, 0x00ff0100ff010001, 0x00ff010000ffffff, 0x00ff010000ff0000, + 0x00ff010000ff0101, 0x00ff01000000ff00, 0x00ff01000000ff01, 0x00ff0100000000ff, + 0x00ff010000000000, 0x00ff010000000001, 0x00ff010000000100, 0x00ff01000001ffff, + 0x00ff01000001ff01, 0x00ff010000010000, 0x00ff010000010001, 0x00ff010000010101, + 0x00ff010001ff0001, 0x00ff010001ff0100, 0x00ff01000100ff01, 0x00ff010001000000, + 0x00ff010001000001, 0x00ff0100010001ff, 0x00ff01000101ff00, 0x00ff0100010100ff, + 0x00ff010001010001, 0x00ff010001010100, 0x00ff0101ff000001, 0x00ff010100ff00ff, + 0x00ff010100ff0001, 0x00ff010100ff0100, 0x00ff010100000000, 0x00ff0101000001ff, + 0x00ff010100000101, 0x00ff0101000100ff, 0x00ff010100010100, 0x00ff0101010000ff, + 0x00ff010101010000, 0x0000ffffffffff00, 0x0000ffffffff00ff, 0x0000ffffffff0000, + 0x0000ffffffff0001, 0x0000ffffffff0100, 0x0000ffffff00ff01, 0x0000ffffff000000, + 0x0000ffffff000101, 0x0000ffffff01ff00, 0x0000ffffff0100ff, 0x0000ffffff010100, + 0x0000ffff00ffffff, 0x0000ffff00ff0000, 0x0000ffff00ff01ff, 0x0000ffff0000ff00, + 0x0000ffff000000ff, 0x0000ffff00000000, 0x0000ffff00000001, 0x0000ffff00000100, + 0x0000ffff00010000, 0x0000ffff000101ff, 0x0000ffff01ff0001, 0x0000ffff01ff0100, + 0x0000ffff01000000, 0x0000ffff010001ff, 0x0000ffff0101ffff, 0x0000ffff0101ff00, + 0x0000ffff01010001, 0x0000ffff01010100, 0x0000ff00ffff0000, 0x0000ff00ffff01ff, + 0x0000ff00ffff0100, 0x0000ff00ffff0101, 0x0000ff00ff00ff00, 0x0000ff00ff0000ff, + 0x0000ff00ff000000, 0x0000ff00ff000001, 0x0000ff00ff0001ff, 0x0000ff00ff000100, + 0x0000ff00ff01ffff, 0x0000ff00ff010000, 0x0000ff00ff010001, 0x0000ff00ff0101ff, + 0x0000ff00ff010101, 0x0000ff0000ffff00, 0x0000ff0000ff00ff, 0x0000ff0000ff0000, + 0x0000ff0000ff0001, 0x0000ff0000ff0100, 0x0000ff000000ffff, 0x0000ff000000ff00, + 0x0000ff000000ff01, 0x0000ff00000000ff, 0x0000ff0000000000, 0x0000ff0000000001, + 0x0000ff00000001ff, 0x0000ff0000000100, 0x0000ff0000000101, 0x0000ff000001ff00, + 0x0000ff00000100ff, 0x0000ff0000010000, 0x0000ff0000010001, 0x0000ff0000010100, + 0x0000ff0001ffff01, 0x0000ff0001ff0000, 0x0000ff000100ff00, 0x0000ff00010000ff, + 0x0000ff0001000000, 0x0000ff0001000001, 0x0000ff0001000100, 0x0000ff000101ffff, + 0x0000ff0001010000, 0x0000ff0001010101, 0x0000ff01ffffff00, 0x0000ff01ffff0001, + 0x0000ff01ff00ff01, 0x0000ff01ff000000, 0x0000ff01ff000101, 0x0000ff01ff01ff00, + 0x0000ff01ff0100ff, 0x0000ff0100ffff01, 0x0000ff0100ff0000, 0x0000ff0100ff0101, + 0x0000ff010000ff00, 0x0000ff01000000ff, 0x0000ff0100000000, 0x0000ff0100000001, + 0x0000ff0100000100, 0x0000ff010001ff01, 0x0000ff0100010000, 0x0000ff0101ff0000, + 0x0000ff010100ffff, 0x0000ff010100ff01, 0x0000ff0101000000, 0x0000ff0101000100, + 0x0000ff0101000101, 0x0000ff01010100ff, 0x000000ffffff00ff, 0x000000ffffff0000, + 0x000000ffff00ff00, 0x000000ffff0000ff, 0x000000ffff000000, 0x000000ffff000001, + 0x000000ffff0001ff, 0x000000ffff000100, 0x000000ffff01ff00, 0x000000ffff010000, + 0x000000ffff0101ff, 0x000000ffff010101, 0x000000ff00ffff00, 0x000000ff00ff00ff, + 0x000000ff00ff0000, 0x000000ff00ff0001, 0x000000ff00ff0100, 0x000000ff00ff0101, + 0x000000ff0000ffff, 0x000000ff0000ff00, 0x000000ff000000ff, 0x000000ff00000000, + 0x000000ff00000001, 0x000000ff000001ff, 0x000000ff00000100, 0x000000ff00000101, + 0x000000ff0001ff00, 0x000000ff0001ff01, 0x000000ff000100ff, 0x000000ff00010000, + 0x000000ff00010001, 0x000000ff00010100, 0x000000ff01ffffff, 0x000000ff01ff01ff, + 0x000000ff01ff0101, 0x000000ff0100ff00, 0x000000ff010000ff, 0x000000ff01000000, + 0x000000ff01000001, 0x000000ff01000100, 0x000000ff0101ff00, 0x000000ff010100ff, + 0x000000ff01010000, 0x000000ff01010101, 0x00000000ffffff00, 0x00000000ffffff01, + 0x00000000ffff00ff, 0x00000000ffff0000, 0x00000000ffff0001, 0x00000000ffff0100, + 0x00000000ff00ffff, 0x00000000ff00ff00, 0x00000000ff00ff01, 0x00000000ff0000ff, + 0x00000000ff000000, 0x00000000ff000001, 0x00000000ff000100, 0x00000000ff000101, + 0x00000000ff01ff00, 0x00000000ff0100ff, 0x00000000ff010000, 0x00000000ff010001, + 0x00000000ff010100, 0x0000000000ffffff, 0x0000000000ffff00, 0x0000000000ffff01, + 0x0000000000ff00ff, 0x0000000000ff0000, 0x0000000000ff0001, 0x0000000000ff01ff, + 0x0000000000ff0100, 0x000000000000ffff, 0x000000000000ff00, 0x000000000000ff01, + 0x00000000000000ff, 0x0000000000000000, 0x0000000000000001, 0x00000000000001ff, + 0x0000000000000100, 0x0000000000000101, 0x000000000001ffff, 0x000000000001ff00, + 0x00000000000100ff, 0x0000000000010000, 0x0000000000010001, 0x00000000000101ff, + 0x0000000000010100, 0x0000000000010101, 0x0000000001ffff00, 0x0000000001ff00ff, + 0x0000000001ff0000, 0x0000000001ff0100, 0x0000000001ff0101, 0x000000000100ffff, + 0x000000000100ff00, 0x00000000010000ff, 0x0000000001000000, 0x0000000001000001, + 0x00000000010001ff, 0x0000000001000100, 0x000000000101ff00, 0x00000000010100ff, + 0x0000000001010000, 0x0000000001010001, 0x0000000001010100, 0x00000001ffffffff, + 0x00000001ffffff00, 0x00000001ffffff01, 0x00000001ffff00ff, 0x00000001ffff0001, + 0x00000001ffff01ff, 0x00000001ffff0100, 0x00000001ff00ff00, 0x00000001ff0000ff, + 0x00000001ff000000, 0x00000001ff0001ff, 0x00000001ff000100, 0x00000001ff01ffff, + 0x00000001ff01ff00, 0x00000001ff01ff01, 0x00000001ff0100ff, 0x00000001ff010000, + 0x00000001ff010001, 0x00000001ff0101ff, 0x00000001ff010100, 0x0000000100ffff00, + 0x0000000100ff0000, 0x0000000100ff0001, 0x0000000100ff01ff, 0x0000000100ff0100, + 0x0000000100ff0101, 0x000000010000ffff, 0x000000010000ff00, 0x000000010000ff01, + 0x00000001000000ff, 0x0000000100000000, 0x0000000100000001, 0x00000001000001ff, + 0x0000000100000100, 0x0000000100000101, 0x000000010001ff00, 0x00000001000100ff, + 0x0000000100010000, 0x0000000100010100, 0x0000000101ffff01, 0x0000000101ff0000, + 0x0000000101ff0001, 0x0000000101ff01ff, 0x0000000101ff0100, 0x0000000101ff0101, + 0x000000010100ff00, 0x0000000101000000, 0x0000000101000101, 0x000000010101ff01, + 0x0000000101010000, 0x0000000101010001, 0x00000001010101ff, 0x0000000101010100, + 0x000001ffffff00ff, 0x000001ffffff0000, 0x000001ffffff0001, 0x000001ffffff0100, + 0x000001ffff00ffff, 0x000001ffff000000, 0x000001ffff0001ff, 0x000001ffff01ff00, + 0x000001ffff010101, 0x000001ff00ff0000, 0x000001ff00ff01ff, 0x000001ff00ff0101, + 0x000001ff0000ff00, 0x000001ff000000ff, 0x000001ff00000000, 0x000001ff00000001, + 0x000001ff000001ff, 0x000001ff00000100, 0x000001ff0001ffff, 0x000001ff0001ff01, + 0x000001ff000100ff, 0x000001ff00010000, 0x000001ff01ffff01, 0x000001ff01ff0100, + 0x000001ff0100ffff, 0x000001ff0100ff01, 0x000001ff01000000, 0x000001ff010001ff, + 0x000001ff0101ff00, 0x000001ff01010100, 0x00000100ffffff00, 0x00000100ffffff01, + 0x00000100ffff0000, 0x00000100ffff0101, 0x00000100ff00ff00, 0x00000100ff0000ff, + 0x00000100ff000000, 0x00000100ff000001, 0x00000100ff000100, 0x00000100ff010000, + 0x0000010000ffff00, 0x0000010000ff00ff, 0x0000010000ff0000, 0x0000010000ff0001, + 0x0000010000ff0100, 0x000001000000ffff, 0x000001000000ff00, 0x000001000000ff01, + 0x00000100000000ff, 0x0000010000000000, 0x0000010000000001, 0x00000100000001ff, + 0x0000010000000100, 0x0000010000000101, 0x000001000001ff00, 0x00000100000100ff, + 0x0000010000010000, 0x0000010000010001, 0x0000010000010100, 0x0000010001ffff00, + 0x0000010001ff0000, 0x0000010001ff0100, 0x000001000100ff00, 0x00000100010000ff, + 0x0000010001000000, 0x0000010001000001, 0x00000100010001ff, 0x0000010001000100, + 0x0000010001010000, 0x00000101ffff00ff, 0x00000101ffff01ff, 0x00000101ff000000, + 0x00000101ff000101, 0x00000101ff01ffff, 0x00000101ff010000, 0x00000101ff010001, + 0x00000101ff010100, 0x0000010100ff0000, 0x0000010100ff01ff, 0x0000010100ff0100, + 0x000001010000ff00, 0x0000010100000000, 0x0000010100000001, 0x00000101000001ff, + 0x0000010100000100, 0x000001010001ff01, 0x0000010100010000, 0x00000101000101ff, + 0x0000010100010101, 0x0000010101ffff00, 0x0000010101ff0101, 0x000001010100ff01, + 0x0000010101000000, 0x0000010101000001, 0x00000101010001ff, 0x0000010101000101, + 0x000001010101ff00, 0x0001ffffffff0000, 0x0001ffffff0000ff, 0x0001ffffff000001, + 0x0001ffffff000100, 0x0001ffffff010000, 0x0001ffff00ff00ff, 0x0001ffff0000ffff, + 0x0001ffff00000000, 0x0001ffff00000001, 0x0001ffff000001ff, 0x0001ffff00000101, + 0x0001ffff0001ff00, 0x0001ffff000100ff, 0x0001ffff00010001, 0x0001ffff00010100, + 0x0001ffff01ffff00, 0x0001ffff01000001, 0x0001ffff01010000, 0x0001ff00ffffff00, + 0x0001ff00ffff00ff, 0x0001ff00ffff0001, 0x0001ff00ffff0100, 0x0001ff00ff00ff01, + 0x0001ff00ff000000, 0x0001ff00ff01ff00, 0x0001ff00ff01ff01, 0x0001ff00ff010001, + 0x0001ff00ff010100, 0x0001ff0000ff0000, 0x0001ff0000ff0100, 0x0001ff000000ff00, + 0x0001ff0000000000, 0x0001ff0000000001, 0x0001ff0000000100, 0x0001ff0000010000, + 0x0001ff0000010001, 0x0001ff0000010101, 0x0001ff0001ff00ff, 0x0001ff0001ff0101, + 0x0001ff000100ff01, 0x0001ff0001000000, 0x0001ff000101ff00, 0x0001ff0001010001, + 0x0001ff0001010100, 0x0001ff01ff00ff00, 0x0001ff01ff000001, 0x0001ff01ff000100, + 0x0001ff0100ffffff, 0x0001ff0100ffff00, 0x0001ff0100ff0001, 0x0001ff0100000000, + 0x0001ff0100000001, 0x0001ff01000001ff, 0x0001ff010001ffff, 0x0001ff0101ff0000, + 0x0001ff010100ff00, 0x0001ff0101000001, 0x0001ff0101010000, 0x000100ffff00ff00, + 0x000100ffff00ff01, 0x000100ffff000000, 0x000100ffff000001, 0x000100ffff000101, + 0x000100ffff01ff00, 0x000100ffff010001, 0x000100ffff010100, 0x000100ff00ffffff, + 0x000100ff00ffff01, 0x000100ff00ff0000, 0x000100ff00ff01ff, 0x000100ff00ff0101, + 0x000100ff0000ff00, 0x000100ff000000ff, 0x000100ff00000000, 0x000100ff00000001, + 0x000100ff00000100, 0x000100ff00000101, 0x000100ff0001ffff, 0x000100ff0001ff01, + 0x000100ff00010000, 0x000100ff01ff00ff, 0x000100ff01ff0000, 0x000100ff01ff0100, + 0x000100ff0100ffff, 0x000100ff0100ff01, 0x000100ff010000ff, 0x000100ff01000000, + 0x000100ff01000001, 0x000100ff010001ff, 0x000100ff01000101, 0x000100ff0101ff00, + 0x000100ff010100ff, 0x000100ff01010100, 0x00010000ffff0000, 0x00010000ffff01ff, + 0x00010000ffff0101, 0x00010000ff00ff00, 0x00010000ff000000, 0x00010000ff000001, + 0x00010000ff000100, 0x0001000000ff00ff, 0x0001000000ff0000, 0x0001000000ff0001, + 0x0001000000ff0100, 0x000100000000ffff, 0x000100000000ff00, 0x00010000000000ff, + 0x0001000000000000, 0x0001000000000001, 0x0001000000000100, 0x000100000001ff00, + 0x00010000000100ff, 0x0001000000010000, 0x0001000000010001, 0x0001000000010100, + 0x0001000001ff0001, 0x0001000001ff0100, 0x0001000001ff0101, 0x000100000100ff00, + 0x0001000001000000, 0x0001000001000001, 0x0001000001000100, 0x0001000001000101, + 0x000100000101ff01, 0x0001000001010000, 0x0001000001010001, 0x00010000010101ff, + 0x00010001ffffff01, 0x00010001ffff0100, 0x00010001ff000000, 0x00010001ff01ffff, + 0x00010001ff010001, 0x00010001ff0101ff, 0x00010001ff010100, 0x0001000100ffffff, + 0x0001000100ff0000, 0x0001000100ff01ff, 0x0001000100ff0101, 0x000100010000ff00, + 0x00010001000000ff, 0x0001000100000000, 0x0001000100000001, 0x00010001000001ff, + 0x0001000100000101, 0x000100010001ffff, 0x0001000100010000, 0x00010001000101ff, + 0x0001000101ffffff, 0x0001000101ffff01, 0x0001000101ff0000, 0x0001000101ff0101, + 0x00010001010000ff, 0x0001000101000001, 0x00010001010001ff, 0x0001000101000100, + 0x000100010101ffff, 0x00010001010100ff, 0x0001000101010001, 0x0001000101010101, + 0x000101ffff000001, 0x000101ffff000100, 0x000101ffff010000, 0x000101ff00ffff00, + 0x000101ff0000ff01, 0x000101ff00000000, 0x000101ff00000101, 0x000101ff0001ff00, + 0x000101ff00010100, 0x000101ff01ff0000, 0x000101ff0100ff00, 0x000101ff010001ff, + 0x000101ff01010001, 0x00010100ffffff00, 0x00010100ffff00ff, 0x00010100ff00ffff, + 0x00010100ff000000, 0x00010100ff01ff00, 0x00010100ff0100ff, 0x00010100ff010001, + 0x00010100ff010100, 0x0001010000ffffff, 0x0001010000ffff00, 0x0001010000ff0000, + 0x0001010000ff0001, 0x0001010000ff01ff, 0x000101000000ff00, 0x00010100000000ff, + 0x0001010000000000, 0x0001010000000001, 0x0001010000000100, 0x000101000001ffff, + 0x0001010000010000, 0x0001010000010101, 0x0001010001ffff01, 0x0001010001ff00ff, + 0x0001010001ff0101, 0x0001010001000000, 0x000101000101ff00, 0x00010100010100ff, + 0x0001010001010000, 0x0001010001010100, 0x00010101ff00ff00, 0x00010101ff000001, + 0x00010101ff0001ff, 0x0001010100ffff00, 0x0001010100ff00ff, 0x0001010100ff0100, + 0x000101010000ffff, 0x0001010100000000, 0x00010101000001ff, 0x0001010100000101, + 0x00010101000100ff, 0x0001010100010000, 0x0001010100010100, 0x0001010101ff0001, + 0x00010101010000ff, 0x00010101010001ff, 0x0001010101000101, 0x0001010101010001, + 0x01ffffffffffffff, 0x01ffffffffffff01, 0x01ffffffffff01ff, 0x01ffffffffff0101, + 0x01ffffffff01ffff, 0x01ffffffff01ff01, 0x01ffffffff0101ff, 0x01ffffffff010101, + 0x01ffffff00ff0000, 0x01ffffff0000ffff, 0x01ffffff0000ff00, 0x01ffffff000000ff, + 0x01ffffff00000001, 0x01ffffff00000100, 0x01ffffff00010000, 0x01ffffff01ffffff, + 0x01ffffff01ffff01, 0x01ffffff01ff01ff, 0x01ffffff01ff0101, 0x01ffffff01000000, + 0x01ffffff0101ffff, 0x01ffffff0101ff01, 0x01ffffff010101ff, 0x01ffffff01010101, + 0x01ffff00ffff0000, 0x01ffff00ff00ff00, 0x01ffff00ff0000ff, 0x01ffff00ff000001, + 0x01ffff00ff000100, 0x01ffff00ff010000, 0x01ffff0000ffff00, 0x01ffff0000ff00ff, + 0x01ffff0000ff0100, 0x01ffff000000ffff, 0x01ffff000000ff01, 0x01ffff0000000000, + 0x01ffff0000000001, 0x01ffff00000001ff, 0x01ffff0000000100, 0x01ffff00000100ff, + 0x01ffff0000010001, 0x01ffff0000010100, 0x01ffff0001ff0000, 0x01ffff0001ff0100, + 0x01ffff00010000ff, 0x01ffff0001000001, 0x01ffff0001000100, 0x01ffff0001010000, + 0x01ffff01ffffffff, 0x01ffff01ffffff01, 0x01ffff01ffff01ff, 0x01ffff01ffff0101, + 0x01ffff01ff000000, 0x01ffff01ff01ffff, 0x01ffff01ff01ff01, 0x01ffff01ff0101ff, + 0x01ffff01ff010101, 0x01ffff010000ff00, 0x01ffff01000000ff, 0x01ffff0100000100, + 0x01ffff0100010000, 0x01ffff0101ffffff, 0x01ffff0101ffff01, 0x01ffff0101ff01ff, + 0x01ffff0101ff0101, 0x01ffff0101000000, 0x01ffff010101ffff, 0x01ffff010101ff01, + 0x01ffff01010101ff, 0x01ffff0101010101, 0x01ff00ffff0000ff, 0x01ff00ffff000100, + 0x01ff00ff00ffff00, 0x01ff00ff00ff00ff, 0x01ff00ff0000ff00, 0x01ff00ff00000000, + 0x01ff00ff00000101, 0x01ff00ff0001ff00, 0x01ff00ff000100ff, 0x01ff00ff00010100, + 0x01ff00ff010000ff, 0x01ff00ff01000100, 0x01ff0000ffffff00, 0x01ff0000ffff0100, + 0x01ff0000ff00ff01, 0x01ff0000ff000000, 0x01ff0000ff000101, 0x01ff0000ff010001, + 0x01ff0000ff010100, 0x01ff000000ffffff, 0x01ff000000ffff00, 0x01ff000000ff0000, + 0x01ff000000ff01ff, 0x01ff00000000ff00, 0x01ff0000000000ff, 0x01ff000000000000, + 0x01ff000000000001, 0x01ff000000000100, 0x01ff000000000101, 0x01ff000000010000, + 0x01ff000000010001, 0x01ff0000000101ff, 0x01ff000000010101, 0x01ff000001ffff00, + 0x01ff000001ff00ff, 0x01ff000001ff0001, 0x01ff000001ff0100, 0x01ff00000100ffff, + 0x01ff00000100ff01, 0x01ff000001000000, 0x01ff0000010001ff, 0x01ff000001010001, + 0x01ff0001ff00ff00, 0x01ff0001ff000001, 0x01ff0001ff000100, 0x01ff0001ff010000, + 0x01ff000100ffff00, 0x01ff000100ff00ff, 0x01ff000100ff0100, 0x01ff000100ff0101, + 0x01ff00010000ffff, 0x01ff000100000000, 0x01ff000100000100, 0x01ff000100000101, + 0x01ff00010001ff00, 0x01ff000100010001, 0x01ff000100010101, 0x01ff000101ff0000, + 0x01ff00010100ff00, 0x01ff000101000101, 0x01ff0001010100ff, 0x01ff01ffffffffff, + 0x01ff01ffffffff01, 0x01ff01ffffff01ff, 0x01ff01ffffff0101, 0x01ff01ffff000000, + 0x01ff01ffff01ffff, 0x01ff01ffff01ff01, 0x01ff01ffff0101ff, 0x01ff01ffff010101, + 0x01ff01ff00ffff00, 0x01ff01ff00ff0000, 0x01ff01ff0000ff00, 0x01ff01ff000000ff, + 0x01ff01ff00000100, 0x01ff01ff00010000, 0x01ff01ff00010100, 0x01ff01ff01ffffff, + 0x01ff01ff01ffff01, 0x01ff01ff01ff01ff, 0x01ff01ff01ff0101, 0x01ff01ff01000000, + 0x01ff01ff0101ffff, 0x01ff01ff0101ff01, 0x01ff01ff010101ff, 0x01ff01ff01010101, + 0x01ff0100ffff0000, 0x01ff0100ffff0001, 0x01ff0100ff00ff00, 0x01ff0100ff0000ff, + 0x01ff0100ff000001, 0x01ff0100ff010000, 0x01ff010000ffff00, 0x01ff010000ff00ff, + 0x01ff010000ff0001, 0x01ff010000ff0100, 0x01ff01000000ffff, 0x01ff01000000ff01, + 0x01ff010000000000, 0x01ff010000000101, 0x01ff01000001ff00, 0x01ff0100000100ff, + 0x01ff010001ff0000, 0x01ff010001000001, 0x01ff010001000100, 0x01ff010001010000, + 0x01ff0101ffffffff, 0x01ff0101ffffff01, 0x01ff0101ffff01ff, 0x01ff0101ffff0101, + 0x01ff0101ff000000, 0x01ff0101ff01ffff, 0x01ff0101ff01ff01, 0x01ff0101ff0101ff, + 0x01ff0101ff010101, 0x01ff010100ff0000, 0x01ff01010000ff00, 0x01ff0101000000ff, + 0x01ff010100000001, 0x01ff010101ffffff, 0x01ff010101ffff01, 0x01ff010101ff01ff, + 0x01ff010101ff0101, 0x01ff010101000000, 0x01ff01010101ffff, 0x01ff01010101ff01, + 0x01ff0101010101ff, 0x01ff010101010101, 0x0100ffffffff0000, 0x0100ffffff00ff00, + 0x0100ffffff000001, 0x0100ffffff0001ff, 0x0100ffffff000100, 0x0100ffffff010000, + 0x0100ffff00ffff00, 0x0100ffff00ff0001, 0x0100ffff00ff0100, 0x0100ffff00000000, + 0x0100ffff000001ff, 0x0100ffff00000101, 0x0100ffff00010100, 0x0100ffff00010101, + 0x0100ffff01ff0000, 0x0100ffff0100ff00, 0x0100ffff010000ff, 0x0100ffff01000001, + 0x0100ffff01000100, 0x0100ffff01010000, 0x0100ff00ffffff00, 0x0100ff00ffff00ff, + 0x0100ff00ffff0001, 0x0100ff00ffff0100, 0x0100ff00ff00ffff, 0x0100ff00ff000000, + 0x0100ff00ff0001ff, 0x0100ff00ff000101, 0x0100ff00ff01ff00, 0x0100ff00ff0100ff, + 0x0100ff00ff010001, 0x0100ff00ff010100, 0x0100ff0000ffffff, 0x0100ff0000ff0000, + 0x0100ff000000ffff, 0x0100ff000000ff00, 0x0100ff00000000ff, 0x0100ff0000000000, + 0x0100ff0000000001, 0x0100ff0000000100, 0x0100ff000001ff01, 0x0100ff0000010000, + 0x0100ff0001ff00ff, 0x0100ff0001ff0001, 0x0100ff000100ff01, 0x0100ff0001000000, + 0x0100ff00010001ff, 0x0100ff000101ff00, 0x0100ff00010100ff, 0x0100ff0001010001, + 0x0100ff0001010100, 0x0100ff01ffff0000, 0x0100ff01ff00ff00, 0x0100ff01ff0000ff, + 0x0100ff01ff000100, 0x0100ff01ff010000, 0x0100ff0100ff00ff, 0x0100ff0100ff0001, + 0x0100ff0100ff0100, 0x0100ff010000ffff, 0x0100ff010000ff01, 0x0100ff0100000000, + 0x0100ff01000001ff, 0x0100ff0100010001, 0x0100ff0100010100, 0x0100ff0101ff0000, + 0x0100ff01010000ff, 0x0100ff0101000001, 0x0100ff0101010100, 0x010000ffffffff00, + 0x010000ffffff00ff, 0x010000ffffff0001, 0x010000ffff00ffff, 0x010000ffff000000, + 0x010000ffff0001ff, 0x010000ffff010001, 0x010000ff00ffffff, 0x010000ff00ff0101, + 0x010000ff0000ff00, 0x010000ff000000ff, 0x010000ff00000000, 0x010000ff00000001, + 0x010000ff000001ff, 0x010000ff00000100, 0x010000ff0001ffff, 0x010000ff0001ff00, + 0x010000ff0001ff01, 0x010000ff00010000, 0x010000ff01ff00ff, 0x010000ff01ff0001, + 0x010000ff0100ff01, 0x010000ff010000ff, 0x010000ff01000000, 0x010000ff010001ff, + 0x010000ff0101ff00, 0x010000ff01010100, 0x01000000ffffffff, 0x01000000ffff0000, + 0x01000000ffff01ff, 0x01000000ffff0101, 0x01000000ff00ffff, 0x01000000ff00ff00, + 0x01000000ff0000ff, 0x01000000ff000000, 0x01000000ff000001, 0x01000000ff000100, + 0x01000000ff01ff00, 0x01000000ff010000, 0x01000000ff010100, 0x01000000ff010101, + 0x0100000000ffff00, 0x0100000000ff00ff, 0x0100000000ff0000, 0x0100000000ff0001, + 0x0100000000ff0100, 0x010000000000ffff, 0x010000000000ff00, 0x010000000000ff01, + 0x01000000000000ff, 0x0100000000000000, 0x0100000000000001, 0x01000000000001ff, + 0x0100000000000100, 0x0100000000000101, 0x010000000001ff00, 0x01000000000100ff, + 0x0100000000010000, 0x0100000000010001, 0x0100000000010100, 0x0100000001ffff00, + 0x0100000001ff0000, 0x0100000001ff01ff, 0x010000000100ff00, 0x010000000100ff01, + 0x01000000010000ff, 0x0100000001000000, 0x0100000001000001, 0x0100000001000100, + 0x0100000001000101, 0x010000000101ffff, 0x010000000101ff01, 0x0100000001010000, + 0x01000000010101ff, 0x0100000001010101, 0x01000001ffffff00, 0x01000001ffff00ff, + 0x01000001ff00ffff, 0x01000001ff000000, 0x01000001ff000100, 0x01000001ff01ffff, + 0x01000001ff010001, 0x01000001ff010100, 0x0100000100ff0000, 0x0100000100ff01ff, + 0x0100000100ff0100, 0x010000010000ff00, 0x010000010000ff01, 0x0100000100000000, + 0x0100000100000001, 0x0100000100000100, 0x0100000100010000, 0x01000001000101ff, + 0x0100000101ffff01, 0x0100000101ff00ff, 0x0100000101ff0100, 0x0100000101ff0101, + 0x010000010100ff01, 0x01000001010000ff, 0x0100000101000000, 0x01000001010100ff, + 0x0100000101010001, 0x0100000101010100, 0x010001ffffff0000, 0x010001ffff000001, + 0x010001ffff000100, 0x010001ffff010000, 0x010001ff00ffff00, 0x010001ff00ff0001, + 0x010001ff0000ffff, 0x010001ff0000ff01, 0x010001ff00000000, 0x010001ff00000001, + 0x010001ff00000101, 0x010001ff000100ff, 0x010001ff00010000, 0x010001ff01ff0000, + 0x010001ff0100ff00, 0x010001ff01000001, 0x010001ff01000100, 0x010001ff01010000, + 0x01000100ffff00ff, 0x01000100ffff0001, 0x01000100ffff0100, 0x01000100ff00ffff, + 0x01000100ff00ff01, 0x01000100ff000000, 0x01000100ff0001ff, 0x01000100ff000101, + 0x01000100ff01ffff, 0x01000100ff01ff00, 0x01000100ff0100ff, 0x01000100ff010001, + 0x0100010000ffffff, 0x0100010000ffff01, 0x0100010000ff0000, 0x0100010000ff01ff, + 0x0100010000ff0101, 0x010001000000ff00, 0x01000100000000ff, 0x0100010000000000, + 0x0100010000000001, 0x0100010000000100, 0x010001000001ff01, 0x0100010000010000, + 0x0100010000010001, 0x0100010000010101, 0x0100010001ffff00, 0x0100010001ff00ff, + 0x010001000100ffff, 0x010001000100ff01, 0x0100010001000000, 0x0100010001000101, + 0x010001000101ff00, 0x0100010001010001, 0x01000101ffff0000, 0x01000101ff000000, + 0x01000101ff010000, 0x0100010100ff00ff, 0x0100010100ff0001, 0x0100010100ff0100, + 0x010001010000ffff, 0x0100010100000000, 0x01000101000001ff, 0x010001010001ff00, + 0x0100010101ff0000, 0x010001010100ff00, 0x01000101010000ff, 0x0100010101000000, + 0x0100010101000001, 0x0101ffffffffffff, 0x0101ffffffffff01, 0x0101ffffffff01ff, + 0x0101ffffffff0101, 0x0101ffffff000000, 0x0101ffffff01ffff, 0x0101ffffff01ff01, + 0x0101ffffff0101ff, 0x0101ffffff010101, 0x0101ffff00ff0000, 0x0101ffff0000ff00, + 0x0101ffff000000ff, 0x0101ffff00000001, 0x0101ffff00000100, 0x0101ffff01ffffff, + 0x0101ffff01ffff01, 0x0101ffff01ff01ff, 0x0101ffff01ff0101, 0x0101ffff01000000, + 0x0101ffff0101ffff, 0x0101ffff0101ff01, 0x0101ffff010101ff, 0x0101ffff01010101, + 0x0101ff00ffff0000, 0x0101ff00ffff0100, 0x0101ff00ff00ff00, 0x0101ff00ff0000ff, + 0x0101ff00ff000001, 0x0101ff00ff000100, 0x0101ff00ff000101, 0x0101ff0000ff0001, + 0x0101ff0000ff0100, 0x0101ff000000ff00, 0x0101ff0000000000, 0x0101ff00000001ff, + 0x0101ff0000000101, 0x0101ff000001ff00, 0x0101ff00000100ff, 0x0101ff0001ff0000, + 0x0101ff000100ffff, 0x0101ff000100ff01, 0x0101ff0001000001, 0x0101ff0001000100, + 0x0101ff01ffffff01, 0x0101ff01ffff01ff, 0x0101ff01ffff0101, 0x0101ff01ff00ffff, + 0x0101ff01ff000100, 0x0101ff01ff01ff01, 0x0101ff01ff0101ff, 0x0101ff01ff010101, + 0x0101ff0100ff0000, 0x0101ff010000ff00, 0x0101ff0100000001, 0x0101ff0100000100, + 0x0101ff0100010000, 0x0101ff0101ffffff, 0x0101ff0101ffff01, 0x0101ff0101ff01ff, + 0x0101ff0101ff0101, 0x0101ff0101000000, 0x0101ff010101ffff, 0x0101ff010101ff01, + 0x0101ff01010101ff, 0x0101ff0101010101, 0x010100ffff000100, 0x010100ffff010000, + 0x010100ff00ffff00, 0x010100ff00ff00ff, 0x010100ff0000ffff, 0x010100ff000000ff, + 0x010100ff00000000, 0x010100ff000001ff, 0x010100ff00000101, 0x010100ff0001ff00, + 0x010100ff00010000, 0x010100ff00010001, 0x010100ff000101ff, 0x010100ff00010100, + 0x010100ff01ff0000, 0x01010000ffff0001, 0x01010000ffff0100, 0x01010000ff00ffff, + 0x01010000ff00ff01, 0x01010000ff000000, 0x01010000ff0001ff, 0x01010000ff010001, + 0x01010000ff010100, 0x0101000000ffff01, 0x0101000000ff0000, 0x010100000000ff00, + 0x01010000000000ff, 0x0101000000000000, 0x0101000000000001, 0x0101000000000100, + 0x0101000000010000, 0x0101000000010101, 0x0101000001ffff00, 0x0101000001ff00ff, + 0x0101000001ff0000, 0x0101000001ff0001, 0x0101000001ff0100, 0x010100000100ff01, + 0x0101000001000000, 0x01010000010001ff, 0x01010001ffff0000, 0x01010001ff00ff00, + 0x01010001ff000001, 0x01010001ff000101, 0x01010001ff01ff00, 0x01010001ff010000, + 0x0101000100ff00ff, 0x0101000100ff0001, 0x0101000100ff0101, 0x010100010000ff01, + 0x0101000100000000, 0x0101000100000001, 0x01010001000001ff, 0x010100010001ffff, + 0x010100010001ff01, 0x0101000101ff0001, 0x010100010100ffff, 0x0101000101000000, + 0x0101000101000001, 0x0101000101000100, 0x010100010101ff00, 0x01010001010100ff, + 0x0101000101010001, 0x010101ffffffffff, 0x010101ffffffff01, 0x010101ffffff01ff, + 0x010101ffffff0101, 0x010101ffff01ffff, 0x010101ffff01ff01, 0x010101ffff0101ff, + 0x010101ffff010101, 0x010101ff0000ff00, 0x010101ff000000ff, 0x010101ff00000001, + 0x010101ff00000100, 0x010101ff01ffffff, 0x010101ff01ffff01, 0x010101ff01ff01ff, + 0x010101ff01ff0101, 0x010101ff01000000, 0x010101ff0101ffff, 0x010101ff0101ff01, + 0x010101ff010101ff, 0x010101ff01010101, 0x01010100ffff0000, 0x01010100ff0000ff, + 0x01010100ff000100, 0x01010100ff01ff00, 0x01010100ff010000, 0x0101010000ffff00, + 0x010101000000ffff, 0x0101010000000000, 0x0101010000000101, 0x010101000001ff00, + 0x0101010000010001, 0x0101010000010100, 0x010101000100ffff, 0x0101010001000001, + 0x01010101ffffffff, 0x01010101ffffff01, 0x01010101ffff01ff, 0x01010101ffff0101, + 0x01010101ff01ffff, 0x01010101ff01ff01, 0x01010101ff0101ff, 0x01010101ff010101, + 0x010101010000ff00, 0x01010101000000ff, 0x0101010100000001, 0x0101010101ffffff, + 0x0101010101ffff01, 0x0101010101ff01ff, 0x0101010101ff0101, 0x0101010101000000, + 0x010101010101ffff, 0x010101010101ff01, 0x01010101010101ff, 0x0101010101010101, +GGML_TABLE_END() +#else +GGML_TABLE_BEGIN(uint32_t, iq1s_grid_gpu, NGRID_IQ1S) + 0x00000000, 0x00000002, 0x00000101, 0x00000200, 0x00000202, 0x00010001, 0x00010101, 0x00020000, + 0x00020002, 0x00020200, 0x00020202, 0x01000101, 0x01010001, 0x01010100, 0x01010102, 0x01020101, + 0x02000000, 0x02000002, 0x02000200, 0x02000202, 0x02010101, 0x02020000, 0x02020002, 0x02020200, + 0x02020202, 0x00000110, 0x00000111, 0x00010011, 0x00010110, 0x00010112, 0x00010211, 0x00010212, + 0x00020111, 0x01000011, 0x01000112, 0x01000211, 0x01010012, 0x01010111, 0x01010212, 0x01020011, + 0x01020110, 0x01020112, 0x01020210, 0x02000111, 0x02010011, 0x02010110, 0x02010112, 0x02020111, + 0x00000020, 0x00000022, 0x00000220, 0x00000222, 0x00010121, 0x00020020, 0x00020022, 0x00020220, + 0x00020222, 0x01000121, 0x01010021, 0x01010221, 0x01020120, 0x01020221, 0x02000020, 0x02000022, + 0x02000220, 0x02000222, 0x02010021, 0x02010121, 0x02010221, 0x02020020, 0x02020022, 0x02020220, + 0x02020222, 0x00011001, 0x00011100, 0x00011102, 0x00021101, 0x01001001, 0x01001201, 0x01011101, + 0x01011202, 0x01021100, 0x01021101, 0x02011001, 0x02011201, 0x02021101, 0x00001011, 0x00001110, + 0x00001111, 0x00001112, 0x00011111, 0x00011210, 0x00011212, 0x00021211, 0x01001010, 0x01001111, + 0x01001212, 0x01011010, 0x01011011, 0x01011110, 0x01011111, 0x01011112, 0x01011211, 0x01021010, + 0x01021012, 0x01021111, 0x01021210, 0x01021212, 0x02001011, 0x02011011, 0x02011111, 0x02011210, + 0x02011212, 0x02021011, 0x02021110, 0x02021111, 0x02021112, 0x02021211, 0x00011120, 0x00011221, + 0x01001021, 0x01001120, 0x01011020, 0x01011022, 0x01011121, 0x01011220, 0x01021020, 0x01021021, + 0x01021122, 0x01021221, 0x02001121, 0x02011021, 0x02011120, 0x02011221, 0x00002000, 0x00002002, + 0x00002200, 0x00002202, 0x00012101, 0x00022000, 0x00022002, 0x00022200, 0x00022202, 0x01002101, + 0x01012001, 0x01012102, 0x01022101, 0x02002000, 0x02002002, 0x02002200, 0x02002202, 0x02012101, + 0x02022000, 0x02022002, 0x02022200, 0x02022202, 0x00002111, 0x00012011, 0x00012110, 0x00012211, + 0x00022110, 0x00022111, 0x01002011, 0x01012010, 0x01012011, 0x01012111, 0x01022011, 0x01022110, + 0x01022211, 0x02012011, 0x02012110, 0x02012112, 0x02012211, 0x02022111, 0x00002020, 0x00002022, + 0x00002220, 0x00002222, 0x00012121, 0x00022020, 0x00022022, 0x00022220, 0x00022222, 0x01002121, + 0x01012021, 0x01012221, 0x01022021, 0x01022121, 0x02002020, 0x02002022, 0x02002121, 0x02002220, + 0x02002222, 0x02012121, 0x02022020, 0x02022022, 0x02022220, 0x02022222, 0x00110000, 0x00110001, + 0x00110100, 0x00110201, 0x00120100, 0x00120101, 0x01100001, 0x01100100, 0x01110000, 0x01110101, + 0x01110200, 0x01120001, 0x01120100, 0x01120101, 0x01120201, 0x02110001, 0x02110100, 0x02110102, + 0x02120001, 0x02120101, 0x00100011, 0x00100110, 0x00100112, 0x00100211, 0x00110010, 0x00110012, + 0x00110111, 0x00110210, 0x00120011, 0x00120110, 0x00120211, 0x01100111, 0x01100212, 0x01110010, + 0x01110011, 0x01110012, 0x01110110, 0x01110111, 0x01110112, 0x01110211, 0x01120010, 0x01120111, + 0x02100110, 0x02110012, 0x02110111, 0x02120011, 0x02120110, 0x00110021, 0x00110120, 0x00110122, + 0x00120121, 0x01100020, 0x01100122, 0x01100221, 0x01110022, 0x01110121, 0x01110220, 0x01110222, + 0x01120120, 0x01120122, 0x02100121, 0x02110021, 0x02110120, 0x02110122, 0x02120121, 0x00101001, + 0x00101102, 0x00101201, 0x00111100, 0x00111101, 0x00111200, 0x00111201, 0x00121001, 0x00121102, + 0x01101001, 0x01101101, 0x01101102, 0x01101200, 0x01101202, 0x01111001, 0x01111100, 0x01111101, + 0x01111102, 0x01111201, 0x01121002, 0x01121101, 0x01121200, 0x02101100, 0x02101201, 0x02111000, + 0x02111100, 0x02111101, 0x02111200, 0x02111201, 0x02111202, 0x02121001, 0x02121100, 0x02121101, + 0x02121201, 0x00101012, 0x00101111, 0x00101212, 0x00111011, 0x00111110, 0x00111111, 0x00111112, + 0x00111211, 0x00121010, 0x00121012, 0x00121111, 0x00121210, 0x00121212, 0x01101011, 0x01101110, + 0x01101111, 0x01101112, 0x01111011, 0x01111012, 0x01111110, 0x01111111, 0x01111112, 0x01111211, + 0x01111212, 0x01121011, 0x01121110, 0x01121111, 0x01121112, 0x01121211, 0x02101010, 0x02101012, + 0x02101110, 0x02101111, 0x02101210, 0x02101212, 0x02111010, 0x02111011, 0x02111110, 0x02111111, + 0x02111112, 0x02111211, 0x02111212, 0x02121010, 0x02121012, 0x02121111, 0x00101021, 0x00101120, + 0x00101121, 0x00101122, 0x00111121, 0x00111122, 0x00111220, 0x00111222, 0x00121021, 0x00121122, + 0x01101020, 0x01101022, 0x01101120, 0x01101121, 0x01101220, 0x01101222, 0x01111021, 0x01111121, + 0x01111122, 0x01111220, 0x01111221, 0x01121021, 0x01121120, 0x01121121, 0x01121220, 0x01121221, + 0x01121222, 0x02101122, 0x02101222, 0x02111022, 0x02111121, 0x02121120, 0x02121221, 0x00112001, + 0x00112102, 0x00122101, 0x01102001, 0x01102100, 0x01102102, 0x01102201, 0x01112000, 0x01112101, + 0x01112200, 0x01112202, 0x01122000, 0x01122001, 0x01122100, 0x01122102, 0x01122201, 0x02102101, + 0x02112001, 0x02112100, 0x02122101, 0x00112010, 0x00112012, 0x00112111, 0x00112212, 0x00122011, + 0x00122111, 0x01102012, 0x01102110, 0x01102111, 0x01102210, 0x01112011, 0x01112110, 0x01112111, + 0x01112112, 0x01112211, 0x01112212, 0x01122010, 0x01122111, 0x01122212, 0x02102211, 0x02112011, + 0x02112012, 0x02112111, 0x02112210, 0x02122011, 0x02122112, 0x02122211, 0x00102221, 0x00112122, + 0x00122120, 0x00122122, 0x01102120, 0x01102122, 0x01102221, 0x01112020, 0x01112022, 0x01112121, + 0x01112220, 0x01122021, 0x01122122, 0x01122221, 0x02102121, 0x02112021, 0x02112122, 0x02112222, + 0x00200000, 0x00200002, 0x00200200, 0x00200202, 0x00210101, 0x00220000, 0x00220002, 0x00220101, + 0x00220200, 0x00220202, 0x01200101, 0x01210001, 0x01210201, 0x01220001, 0x01220101, 0x02200000, + 0x02200002, 0x02200200, 0x02200202, 0x02210101, 0x02220000, 0x02220002, 0x02220101, 0x02220200, + 0x02220202, 0x00200111, 0x00210011, 0x00210110, 0x00210211, 0x00220111, 0x01200012, 0x01200110, + 0x01200211, 0x01210111, 0x01210210, 0x01210212, 0x01220011, 0x01220110, 0x01220111, 0x01220112, + 0x02200111, 0x02210010, 0x02210112, 0x02210211, 0x02220111, 0x00200021, 0x00200220, 0x00200222, + 0x00210021, 0x00210121, 0x00220020, 0x00220022, 0x00220220, 0x00220222, 0x01200121, 0x01210021, + 0x01210122, 0x01210221, 0x01220121, 0x02200021, 0x02200220, 0x02200222, 0x02210021, 0x02210121, + 0x02220020, 0x02220022, 0x02220220, 0x02220222, 0x00201101, 0x00211100, 0x00211102, 0x00211201, + 0x00221101, 0x01201100, 0x01201101, 0x01201102, 0x01201201, 0x01211002, 0x01211101, 0x01211200, + 0x01211202, 0x01221102, 0x02201101, 0x02211001, 0x02211100, 0x02211201, 0x02221001, 0x02221101, + 0x00201211, 0x00211111, 0x00221011, 0x00221211, 0x01201010, 0x01201111, 0x01201210, 0x01211011, + 0x01211110, 0x01211111, 0x01211211, 0x01221012, 0x01221111, 0x01221210, 0x02201211, 0x02211010, + 0x02211110, 0x02211111, 0x02211210, 0x02211212, 0x02221011, 0x02221110, 0x02221112, 0x02221211, + 0x00201121, 0x00211020, 0x00211022, 0x00211221, 0x00221121, 0x01201021, 0x01201221, 0x01211121, + 0x01221020, 0x01221021, 0x01221221, 0x02201120, 0x02201122, 0x02211020, 0x02211222, 0x00202000, + 0x00202002, 0x00202200, 0x00202202, 0x00212101, 0x00222000, 0x00222002, 0x00222200, 0x00222202, + 0x01202101, 0x01212001, 0x01212100, 0x01222101, 0x02202000, 0x02202002, 0x02202200, 0x02202202, + 0x02222000, 0x02222002, 0x02222200, 0x02222202, 0x00202211, 0x00212011, 0x00212110, 0x00212211, + 0x00222111, 0x01202112, 0x01202211, 0x01212012, 0x01212111, 0x01222011, 0x01222110, 0x01222112, + 0x01222211, 0x02202111, 0x02212010, 0x02212112, 0x02212211, 0x02222110, 0x02222111, 0x00202020, + 0x00202022, 0x00202220, 0x00202222, 0x00222020, 0x00222022, 0x00222220, 0x00222222, 0x01202121, + 0x01212021, 0x01212122, 0x01212221, 0x01222121, 0x02202020, 0x02202022, 0x02202220, 0x02202222, + 0x02212121, 0x02222020, 0x02222022, 0x02222220, 0x02222222, 0x10000101, 0x10010001, 0x10010102, + 0x10020101, 0x11000201, 0x11010002, 0x11010101, 0x11010200, 0x11010202, 0x11020001, 0x11020100, + 0x11020102, 0x12010100, 0x12010201, 0x12020001, 0x12020102, 0x10000010, 0x10000011, 0x10000110, + 0x10000112, 0x10000211, 0x10010012, 0x10010111, 0x10010112, 0x10010210, 0x10010212, 0x10020011, + 0x10020112, 0x10020211, 0x11000111, 0x11000210, 0x11000212, 0x11010011, 0x11010110, 0x11010111, + 0x11010112, 0x11010211, 0x11010212, 0x11020111, 0x11020210, 0x11020212, 0x12000011, 0x12000110, + 0x12000112, 0x12010010, 0x12010012, 0x12010111, 0x12020010, 0x12020011, 0x12020012, 0x10000121, + 0x10010021, 0x10010120, 0x10010122, 0x10020121, 0x11000021, 0x11010022, 0x11010121, 0x11010222, + 0x11020120, 0x11020221, 0x12000221, 0x12010120, 0x12020121, 0x10001001, 0x10011101, 0x10011201, + 0x10021201, 0x11001101, 0x11001200, 0x11001202, 0x11011001, 0x11011100, 0x11011101, 0x11011102, + 0x11021001, 0x11021002, 0x11021101, 0x11021200, 0x11021202, 0x12001001, 0x12001102, 0x12001201, + 0x12011000, 0x12011002, 0x12011101, 0x12021000, 0x12021001, 0x12021201, 0x10001011, 0x10001012, + 0x10001111, 0x10001212, 0x10011011, 0x10011110, 0x10011111, 0x10011112, 0x10011211, 0x10021010, + 0x10021111, 0x10021212, 0x11001011, 0x11001110, 0x11001111, 0x11001112, 0x11001211, 0x11011010, + 0x11011011, 0x11011110, 0x11011111, 0x11011112, 0x11011210, 0x11011211, 0x11021011, 0x11021110, + 0x11021111, 0x11021112, 0x11021211, 0x12001012, 0x12001110, 0x12001111, 0x12001210, 0x12011011, + 0x12011110, 0x12011111, 0x12011112, 0x12011211, 0x12011212, 0x12021111, 0x12021210, 0x12021212, + 0x10001021, 0x10001121, 0x10001221, 0x10011120, 0x10011121, 0x10011220, 0x10011222, 0x10021021, + 0x10021120, 0x10021221, 0x11001020, 0x11001022, 0x11001121, 0x11001220, 0x11011020, 0x11011021, + 0x11011022, 0x11011121, 0x11011122, 0x11011221, 0x11021022, 0x11021121, 0x11021220, 0x12001021, + 0x12001121, 0x12001222, 0x12011120, 0x12011121, 0x12021021, 0x12021120, 0x12021122, 0x10002101, + 0x10012001, 0x10012101, 0x10012202, 0x10022101, 0x11002002, 0x11002201, 0x11012000, 0x11012101, + 0x11012200, 0x11022001, 0x11022100, 0x11022102, 0x11022201, 0x12002101, 0x12012001, 0x12012100, + 0x12012102, 0x12012201, 0x12022101, 0x10002011, 0x10002111, 0x10002112, 0x10002212, 0x10012010, + 0x10012110, 0x10012111, 0x10012210, 0x10022011, 0x10022110, 0x10022112, 0x11002010, 0x11002111, + 0x11002212, 0x11012011, 0x11012012, 0x11012110, 0x11012111, 0x11012112, 0x11012211, 0x11022010, + 0x11022012, 0x11022111, 0x11022112, 0x11022212, 0x12002112, 0x12002211, 0x12012012, 0x12012111, + 0x12012112, 0x12012210, 0x12022011, 0x12022110, 0x12022112, 0x12022211, 0x10012122, 0x11002120, + 0x11002122, 0x11002221, 0x11012121, 0x11012220, 0x11012222, 0x11022120, 0x11022221, 0x12012120, + 0x12022121, 0x10100001, 0x10100100, 0x10100101, 0x10100102, 0x10100201, 0x10110002, 0x10110101, + 0x10110202, 0x10120001, 0x10120100, 0x10120201, 0x11100000, 0x11100101, 0x11100200, 0x11110001, + 0x11110100, 0x11110101, 0x11110102, 0x11110201, 0x11120101, 0x11120200, 0x12100102, 0x12100201, + 0x12110101, 0x12110200, 0x12120000, 0x12120001, 0x12120102, 0x12120201, 0x10100111, 0x10100210, + 0x10100211, 0x10100212, 0x10110011, 0x10110110, 0x10110111, 0x10110112, 0x10110210, 0x10110211, + 0x10120010, 0x10120111, 0x10120112, 0x10120210, 0x10120212, 0x11100011, 0x11100110, 0x11100111, + 0x11100112, 0x11100211, 0x11110010, 0x11110011, 0x11110012, 0x11110110, 0x11110111, 0x11110112, + 0x11110210, 0x11110211, 0x11110212, 0x11120011, 0x11120110, 0x11120111, 0x11120112, 0x11120211, + 0x12100012, 0x12100111, 0x12110011, 0x12110110, 0x12110111, 0x12110112, 0x12110211, 0x12120010, + 0x12120111, 0x12120212, 0x10100021, 0x10100122, 0x10110022, 0x10110121, 0x10110222, 0x10120021, + 0x10120120, 0x11100022, 0x11100121, 0x11100222, 0x11110021, 0x11110120, 0x11110121, 0x11110122, + 0x11110221, 0x11120022, 0x11120121, 0x12100121, 0x12110020, 0x12110022, 0x12110121, 0x12110221, + 0x12110222, 0x12120120, 0x10101100, 0x10101101, 0x10111001, 0x10111100, 0x10111101, 0x10111102, + 0x10111200, 0x10111201, 0x10121001, 0x10121101, 0x10121200, 0x10121202, 0x11101001, 0x11101100, + 0x11101101, 0x11101102, 0x11101201, 0x11101202, 0x11111000, 0x11111001, 0x11111100, 0x11111101, + 0x11111102, 0x11111200, 0x11111201, 0x11111202, 0x11121001, 0x11121002, 0x11121100, 0x11121101, + 0x11121102, 0x11121201, 0x12101000, 0x12101200, 0x12101202, 0x12111001, 0x12111100, 0x12111101, + 0x12111102, 0x12111201, 0x12121001, 0x12121100, 0x12121101, 0x12121202, 0x10101011, 0x10101012, + 0x10101110, 0x10101111, 0x10101112, 0x10101211, 0x10111010, 0x10111011, 0x10111012, 0x10111110, + 0x10111111, 0x10111112, 0x10111211, 0x10111212, 0x10121011, 0x10121110, 0x10121111, 0x10121112, + 0x10121211, 0x11101010, 0x11101011, 0x11101012, 0x11101110, 0x11101111, 0x11101112, 0x11101210, + 0x11101211, 0x11111010, 0x11111011, 0x11111012, 0x11111110, 0x11111111, 0x11111112, 0x11111210, + 0x11111211, 0x11111212, 0x11121010, 0x11121011, 0x11121110, 0x11121111, 0x11121112, 0x11121210, + 0x11121211, 0x11121212, 0x12101011, 0x12101110, 0x12101111, 0x12101211, 0x12101212, 0x12111010, + 0x12111011, 0x12111110, 0x12111111, 0x12111112, 0x12111210, 0x12111211, 0x12121011, 0x12121110, + 0x12121111, 0x12121112, 0x12121211, 0x10101020, 0x10101021, 0x10101022, 0x10101120, 0x10101122, + 0x10101220, 0x10101221, 0x10111021, 0x10111120, 0x10111121, 0x10111220, 0x10111221, 0x10121020, + 0x10121021, 0x10121022, 0x10121120, 0x10121121, 0x10121122, 0x10121220, 0x10121221, 0x11101021, + 0x11101121, 0x11101122, 0x11101220, 0x11101221, 0x11101222, 0x11111020, 0x11111021, 0x11111022, + 0x11111120, 0x11111121, 0x11111122, 0x11111220, 0x11111221, 0x11111222, 0x11121021, 0x11121120, + 0x11121121, 0x11121221, 0x12101022, 0x12101121, 0x12101122, 0x12101220, 0x12101221, 0x12101222, + 0x12111021, 0x12111121, 0x12111222, 0x12121022, 0x12121121, 0x12121122, 0x12121220, 0x12121221, + 0x10102100, 0x10102101, 0x10102102, 0x10102201, 0x10112000, 0x10112101, 0x10112200, 0x10122001, + 0x10122202, 0x11102101, 0x11102200, 0x11102202, 0x11112001, 0x11112100, 0x11112101, 0x11112102, + 0x11112200, 0x11112201, 0x11122000, 0x11122002, 0x11122100, 0x11122101, 0x12102002, 0x12102201, + 0x12112000, 0x12112002, 0x12112101, 0x12112200, 0x12122001, 0x12122201, 0x10102011, 0x10102012, + 0x10102111, 0x10102212, 0x10112011, 0x10112110, 0x10112111, 0x10112112, 0x10112211, 0x10122111, + 0x11102011, 0x11102110, 0x11102111, 0x11102112, 0x11102211, 0x11112010, 0x11112011, 0x11112012, + 0x11112110, 0x11112111, 0x11112112, 0x11112210, 0x11112211, 0x11112212, 0x11122011, 0x11122110, + 0x11122111, 0x11122112, 0x11122211, 0x12102011, 0x12102111, 0x12102211, 0x12112011, 0x12112110, + 0x12112111, 0x12112112, 0x12112210, 0x12112211, 0x12122111, 0x10102120, 0x10102220, 0x10112121, + 0x10112222, 0x10122020, 0x10122121, 0x10122122, 0x10122221, 0x11102121, 0x11102220, 0x11102221, + 0x11112021, 0x11112121, 0x11112122, 0x11112220, 0x11112221, 0x11122022, 0x11122121, 0x11122220, + 0x11122222, 0x12102021, 0x12102222, 0x12112022, 0x12112121, 0x12112122, 0x12112220, 0x12112222, + 0x12122021, 0x10200101, 0x10210100, 0x10210102, 0x10210201, 0x10220101, 0x11200100, 0x11210000, + 0x11210101, 0x11210102, 0x11210200, 0x11210202, 0x11220001, 0x11220100, 0x11220102, 0x11220201, + 0x12200001, 0x12210102, 0x12220101, 0x10200011, 0x10200110, 0x10200112, 0x10200211, 0x10210012, + 0x10210111, 0x10220011, 0x10220012, 0x10220112, 0x10220211, 0x11200111, 0x11200211, 0x11210011, + 0x11210111, 0x11210112, 0x11210211, 0x11220111, 0x11220112, 0x11220212, 0x12200110, 0x12200212, + 0x12210012, 0x12210111, 0x12220011, 0x12220112, 0x12220211, 0x10210021, 0x10210122, 0x10210221, + 0x11200020, 0x11200021, 0x11200122, 0x11210121, 0x11210122, 0x11210220, 0x11220020, 0x12200121, + 0x12210021, 0x12210122, 0x12220121, 0x10211001, 0x10211002, 0x10211101, 0x10211102, 0x10211202, + 0x10221001, 0x10221102, 0x10221201, 0x11201000, 0x11201002, 0x11201101, 0x11201200, 0x11201202, + 0x11211001, 0x11211100, 0x11211101, 0x11211102, 0x11211201, 0x11211202, 0x11221000, 0x11221002, + 0x11221101, 0x12201100, 0x12201101, 0x12201201, 0x12211000, 0x12211002, 0x12211100, 0x12211101, + 0x12211102, 0x12211200, 0x12211202, 0x12221001, 0x12221100, 0x12221201, 0x10201111, 0x10201210, + 0x10201212, 0x10211011, 0x10211111, 0x10211112, 0x10211211, 0x11201110, 0x11201111, 0x11201112, + 0x11201211, 0x11211010, 0x11211011, 0x11211110, 0x11211111, 0x11211112, 0x11211211, 0x11221011, + 0x11221110, 0x11221111, 0x11221112, 0x11221211, 0x12201112, 0x12201211, 0x12201212, 0x12211011, + 0x12211111, 0x12211112, 0x12211211, 0x12211212, 0x12221012, 0x12221111, 0x12221112, 0x12221210, + 0x10201022, 0x10201221, 0x10211121, 0x10221020, 0x10221122, 0x10221220, 0x10221221, 0x11201020, + 0x11201121, 0x11201220, 0x11201222, 0x11211021, 0x11211120, 0x11211121, 0x11211122, 0x11211220, + 0x11211222, 0x11221020, 0x11221121, 0x11221220, 0x12201020, 0x12201022, 0x12201121, 0x12201222, + 0x12211120, 0x12211122, 0x12211220, 0x12211221, 0x12221020, 0x12221120, 0x12221122, 0x12221222, + 0x10212102, 0x10212201, 0x10222101, 0x11202001, 0x11212002, 0x11212101, 0x11212202, 0x11222001, + 0x11222201, 0x12202101, 0x12212001, 0x12212200, 0x12222102, 0x10202011, 0x10202110, 0x10212010, + 0x10212111, 0x10222011, 0x10222110, 0x10222112, 0x10222211, 0x11202010, 0x11202011, 0x11202111, + 0x11202112, 0x11202210, 0x11212011, 0x11212110, 0x11212111, 0x11212112, 0x11212211, 0x11222010, + 0x11222111, 0x11222212, 0x12202012, 0x12202110, 0x12202212, 0x12212111, 0x12222011, 0x12222110, + 0x12222111, 0x12222211, 0x10212021, 0x10212122, 0x10212220, 0x11202021, 0x11202120, 0x11202221, + 0x11212020, 0x11212121, 0x11212220, 0x11212222, 0x11222120, 0x11222121, 0x11222221, 0x12202122, + 0x12212120, 0x12212220, 0x12212222, 0x12222122, 0x20000000, 0x20000002, 0x20000200, 0x20000202, + 0x20020000, 0x20020002, 0x20020200, 0x20020202, 0x21000101, 0x21010000, 0x21010001, 0x21010100, + 0x21010102, 0x21010201, 0x21020101, 0x22000000, 0x22000002, 0x22000200, 0x22000202, 0x22010101, + 0x22020000, 0x22020002, 0x22020200, 0x22020202, 0x20000111, 0x20010011, 0x20010110, 0x20010112, + 0x20010211, 0x20020111, 0x21000011, 0x21000110, 0x21000211, 0x21010010, 0x21010012, 0x21010111, + 0x21010112, 0x21010210, 0x21010211, 0x21020110, 0x21020112, 0x21020211, 0x22000111, 0x22000211, + 0x22010110, 0x22010112, 0x22010211, 0x22020111, 0x20000020, 0x20000022, 0x20000220, 0x20000222, + 0x20010121, 0x20020020, 0x20020022, 0x20020220, 0x20020222, 0x21010021, 0x21010120, 0x21010221, + 0x21020121, 0x22000020, 0x22000022, 0x22000220, 0x22000222, 0x22010121, 0x22020020, 0x22020022, + 0x22020220, 0x22020222, 0x20011100, 0x20011201, 0x21001001, 0x21001100, 0x21011001, 0x21011101, + 0x21011202, 0x21021001, 0x21021100, 0x21021201, 0x22011100, 0x22011201, 0x20001011, 0x20001211, + 0x20011012, 0x20011111, 0x20011212, 0x20021112, 0x20021211, 0x21001010, 0x21001011, 0x21001111, + 0x21001210, 0x21011011, 0x21011110, 0x21011111, 0x21011112, 0x21011211, 0x21011212, 0x21021111, + 0x21021112, 0x21021210, 0x21021212, 0x22001011, 0x22001110, 0x22001112, 0x22001211, 0x22011010, + 0x22011012, 0x22011111, 0x22011210, 0x22021112, 0x20011021, 0x20011122, 0x20011221, 0x20021121, + 0x21001021, 0x21001120, 0x21001221, 0x21001222, 0x21011020, 0x21011121, 0x21011221, 0x21011222, + 0x21021021, 0x21021122, 0x21021222, 0x22001121, 0x22011021, 0x22011222, 0x22021120, 0x20002000, + 0x20002002, 0x20002200, 0x20002202, 0x20012101, 0x20022000, 0x20022002, 0x20022200, 0x20022202, + 0x21002001, 0x21002101, 0x21012001, 0x21012100, 0x21012201, 0x21022101, 0x21022201, 0x22002000, + 0x22002002, 0x22002200, 0x22002202, 0x22012101, 0x22022000, 0x22022002, 0x22022200, 0x22022202, + 0x20002111, 0x20002112, 0x20012011, 0x20012110, 0x20012112, 0x20022111, 0x21002011, 0x21002110, + 0x21002112, 0x21002211, 0x21012010, 0x21012012, 0x21012111, 0x21012212, 0x21022011, 0x21022110, + 0x22002111, 0x22012112, 0x22012211, 0x22022111, 0x20002020, 0x20002022, 0x20002220, 0x20002222, + 0x20012121, 0x20022020, 0x20022022, 0x20022220, 0x20022222, 0x21002121, 0x21012021, 0x21012120, + 0x21012122, 0x22002020, 0x22002022, 0x22002220, 0x22002222, 0x22012121, 0x22022020, 0x22022022, + 0x22022220, 0x22022222, 0x20100101, 0x20110001, 0x20110102, 0x20110200, 0x20110201, 0x20120101, + 0x21100001, 0x21100102, 0x21100201, 0x21110101, 0x21110200, 0x21110202, 0x21120201, 0x21120202, + 0x22100101, 0x22110001, 0x22110100, 0x22110102, 0x22110201, 0x22120101, 0x20100011, 0x20100110, + 0x20100112, 0x20100211, 0x20110010, 0x20110111, 0x20110210, 0x20110212, 0x20120011, 0x20120110, + 0x20120112, 0x20120211, 0x21100010, 0x21100111, 0x21110010, 0x21110011, 0x21110110, 0x21110111, + 0x21110112, 0x21110211, 0x21120012, 0x21120111, 0x22100110, 0x22100112, 0x22110012, 0x22110111, + 0x22110210, 0x22120011, 0x22120110, 0x22120112, 0x22120211, 0x20100121, 0x20110021, 0x20110120, + 0x20110221, 0x20120121, 0x21100120, 0x21100122, 0x21100221, 0x21110020, 0x21110022, 0x21110121, + 0x21110220, 0x21120122, 0x21120221, 0x22100121, 0x22110120, 0x22110122, 0x22120221, 0x20101001, + 0x20101100, 0x20101102, 0x20111000, 0x20111101, 0x20111200, 0x20121102, 0x21101000, 0x21101202, + 0x21111001, 0x21111100, 0x21111101, 0x21111102, 0x21111200, 0x21111201, 0x21121000, 0x21121001, + 0x21121002, 0x21121101, 0x22101100, 0x22101102, 0x22111002, 0x22111100, 0x22111101, 0x22111200, + 0x22121001, 0x22121201, 0x20101010, 0x20101111, 0x20101210, 0x20101212, 0x20111010, 0x20111011, + 0x20111110, 0x20111111, 0x20111112, 0x20111211, 0x20121011, 0x20121111, 0x20121211, 0x20121212, + 0x21101011, 0x21101110, 0x21101111, 0x21101112, 0x21101211, 0x21111010, 0x21111011, 0x21111012, + 0x21111110, 0x21111111, 0x21111112, 0x21111210, 0x21111211, 0x21111212, 0x21121011, 0x21121110, + 0x21121111, 0x21121112, 0x21121211, 0x22101011, 0x22101111, 0x22101210, 0x22111011, 0x22111012, + 0x22111110, 0x22111111, 0x22111112, 0x22111211, 0x22111212, 0x22121010, 0x22121012, 0x22121111, + 0x22121210, 0x22121212, 0x20101021, 0x20101120, 0x20111020, 0x20111121, 0x20111221, 0x20121020, + 0x20121122, 0x20121221, 0x21101121, 0x21101220, 0x21101221, 0x21111021, 0x21111022, 0x21111121, + 0x21111122, 0x21111221, 0x21121121, 0x21121220, 0x22101022, 0x22101120, 0x22101221, 0x22101222, + 0x22111022, 0x22111120, 0x22111121, 0x22121120, 0x22121122, 0x22121221, 0x20102101, 0x20112102, + 0x20112201, 0x20122101, 0x21102001, 0x21102102, 0x21112000, 0x21112002, 0x21112101, 0x21112102, + 0x21112202, 0x21122100, 0x21122101, 0x22102101, 0x22112001, 0x22112102, 0x22112201, 0x22122101, + 0x20102110, 0x20102112, 0x20102211, 0x20112010, 0x20112012, 0x20112111, 0x20112210, 0x20112212, + 0x20122010, 0x20122011, 0x20122110, 0x20122112, 0x21102010, 0x21102012, 0x21102111, 0x21102210, + 0x21102212, 0x21112011, 0x21112110, 0x21112111, 0x21112112, 0x21112211, 0x21122012, 0x21122111, + 0x21122112, 0x21122212, 0x22102011, 0x22102110, 0x22112010, 0x22112012, 0x22112111, 0x22112212, + 0x22122011, 0x22122112, 0x20102121, 0x20112121, 0x20122121, 0x21102120, 0x21102122, 0x21102221, + 0x21112020, 0x21112121, 0x21112220, 0x21122021, 0x22102121, 0x22112021, 0x22112120, 0x22112121, + 0x22112122, 0x20200000, 0x20200002, 0x20200200, 0x20200202, 0x20210101, 0x20220000, 0x20220002, + 0x20220200, 0x20220202, 0x21200101, 0x21210001, 0x21210100, 0x21210102, 0x21210201, 0x22200000, + 0x22200002, 0x22200200, 0x22200202, 0x22210101, 0x22220000, 0x22220002, 0x22220200, 0x22220202, + 0x20200111, 0x20200211, 0x20210011, 0x20210110, 0x20210112, 0x20210211, 0x20210212, 0x21200112, + 0x21200211, 0x21210011, 0x21210111, 0x21210210, 0x21210212, 0x21220011, 0x21220110, 0x22200111, + 0x22210010, 0x22210012, 0x22210112, 0x22210211, 0x20200022, 0x20200220, 0x20200222, 0x20210020, + 0x20210221, 0x20220022, 0x20220220, 0x20220222, 0x21200121, 0x21210021, 0x21210122, 0x21210221, + 0x21220121, 0x22200020, 0x22200022, 0x22200220, 0x22200222, 0x22210121, 0x22220020, 0x22220022, + 0x22220220, 0x22220222, 0x20211201, 0x20221101, 0x21201001, 0x21201100, 0x21211000, 0x21211100, + 0x21211101, 0x21211200, 0x21211202, 0x21221001, 0x21221101, 0x21221102, 0x21221200, 0x21221201, + 0x22201101, 0x20201112, 0x20201211, 0x20211010, 0x20211012, 0x20211111, 0x20211210, 0x20221112, + 0x20221211, 0x21201012, 0x21201111, 0x21211011, 0x21211110, 0x21211111, 0x21211112, 0x21211211, + 0x21221111, 0x21221212, 0x22201011, 0x22201110, 0x22201111, 0x22201112, 0x22201211, 0x22211012, + 0x22211111, 0x22211210, 0x20201121, 0x20211021, 0x20211122, 0x20211222, 0x20221021, 0x20221121, + 0x21201120, 0x21201122, 0x21201222, 0x21211022, 0x21211121, 0x21211122, 0x21211220, 0x21221020, + 0x21221022, 0x22201122, 0x22211020, 0x22211121, 0x22211122, 0x22211221, 0x22221021, 0x22221120, + 0x22221122, 0x20202000, 0x20202002, 0x20202200, 0x20202202, 0x20222000, 0x20222002, 0x20222200, + 0x20222202, 0x21212001, 0x21212100, 0x21212102, 0x21212201, 0x22202000, 0x22202002, 0x22202200, + 0x22202202, 0x22212101, 0x22222000, 0x22222002, 0x22222200, 0x22222202, 0x20202111, 0x20212110, + 0x20212211, 0x20222011, 0x20222111, 0x21202011, 0x21212010, 0x21212111, 0x21212212, 0x21222011, + 0x21222112, 0x21222211, 0x22212010, 0x22212112, 0x20202020, 0x20202022, 0x20202220, 0x20202222, + 0x20222020, 0x20222022, 0x20222220, 0x20222222, 0x21212021, 0x21212120, 0x21212122, 0x22202020, + 0x22202022, 0x22202220, 0x22202222, 0x22212121, 0x22222020, 0x22222022, 0x22222220, 0x22222222, +GGML_TABLE_END() +#endif + +#endif // GGML_COMMON_IMPL +#endif // GGML_COMMON_IMPL + +#include + +using namespace metal; + +#define MAX(x, y) ((x) > (y) ? (x) : (y)) +#define MIN(x, y) ((x) < (y) ? (x) : (y)) +#define SWAP(x, y) { auto tmp = (x); (x) = (y); (y) = tmp; } + +#define N_SIMDWIDTH 32 // assuming SIMD group size is 32 + +enum ggml_sort_order { + GGML_SORT_ORDER_ASC, + GGML_SORT_ORDER_DESC, +}; + +// general-purpose kernel for addition, subtraction, multiplication and division of two tensors +// pros: works for non-contiguous tensors, supports broadcast across all dims +// cons: not very efficient +kernel void kernel_add( + device const char * src0, + device const char * src1, + device char * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + constant int64_t & offs, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t i03 = tgpig.z; + const int64_t i02 = tgpig.y; + const int64_t i01 = tgpig.x; + + const int64_t i13 = i03 % ne13; + const int64_t i12 = i02 % ne12; + const int64_t i11 = i01 % ne11; + + device const char * src0_ptr = src0 + i03*nb03 + i02*nb02 + i01*nb01 + offs; + device const char * src1_ptr = src1 + i13*nb13 + i12*nb12 + i11*nb11; + device char * dst_ptr = dst + i03*nb3 + i02*nb2 + i01*nb1 + offs; + + for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { + const int i10 = i0 % ne10; + *((device float *)(dst_ptr + i0*nb0)) = *((device float *)(src0_ptr + i0*nb00)) + *((device float *)(src1_ptr + i10*nb10)); + } +} + +kernel void kernel_sub( + device const char * src0, + device const char * src1, + device char * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + constant int64_t & offs, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t i03 = tgpig.z; + const int64_t i02 = tgpig.y; + const int64_t i01 = tgpig.x; + + const int64_t i13 = i03 % ne13; + const int64_t i12 = i02 % ne12; + const int64_t i11 = i01 % ne11; + + device const char * src0_ptr = src0 + i03*nb03 + i02*nb02 + i01*nb01 + offs; + device const char * src1_ptr = src1 + i13*nb13 + i12*nb12 + i11*nb11; + device char * dst_ptr = dst + i03*nb3 + i02*nb2 + i01*nb1 + offs; + + for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { + const int i10 = i0 % ne10; + *((device float *)(dst_ptr + i0*nb0)) = *((device float *)(src0_ptr + i0*nb00)) - *((device float *)(src1_ptr + i10*nb10)); + } +} + +kernel void kernel_mul( + device const char * src0, + device const char * src1, + device char * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t i03 = tgpig.z; + const int64_t i02 = tgpig.y; + const int64_t i01 = tgpig.x; + + const int64_t i13 = i03 % ne13; + const int64_t i12 = i02 % ne12; + const int64_t i11 = i01 % ne11; + + device const char * src0_ptr = src0 + i03*nb03 + i02*nb02 + i01*nb01; + device const char * src1_ptr = src1 + i13*nb13 + i12*nb12 + i11*nb11; + device char * dst_ptr = dst + i03*nb3 + i02*nb2 + i01*nb1; + + for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { + const int i10 = i0 % ne10; + *((device float *)(dst_ptr + i0*nb0)) = *((device float *)(src0_ptr + i0*nb00)) * *((device float *)(src1_ptr + i10*nb10)); + } +} + +kernel void kernel_div( + device const char * src0, + device const char * src1, + device char * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t i03 = tgpig.z; + const int64_t i02 = tgpig.y; + const int64_t i01 = tgpig.x; + + const int64_t i13 = i03 % ne13; + const int64_t i12 = i02 % ne12; + const int64_t i11 = i01 % ne11; + + device const char * src0_ptr = src0 + i03*nb03 + i02*nb02 + i01*nb01; + device const char * src1_ptr = src1 + i13*nb13 + i12*nb12 + i11*nb11; + device char * dst_ptr = dst + i03*nb3 + i02*nb2 + i01*nb1; + + for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { + const int i10 = i0 % ne10; + *((device float *)(dst_ptr + i0*nb0)) = *((device float *)(src0_ptr + i0*nb00)) / *((device float *)(src1_ptr + i10*nb10)); + } +} + +template +kernel void kernel_repeat( + device const char * src0, + device char * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t i3 = tgpig.z; + const int64_t i2 = tgpig.y; + const int64_t i1 = tgpig.x; + + const int64_t i03 = i3 % ne03; + const int64_t i02 = i2 % ne02; + const int64_t i01 = i1 % ne01; + + device const char * src0_ptr = src0 + i03*nb03 + i02*nb02 + i01*nb01; + device char * dst_ptr = dst + i3*nb3 + i2*nb2 + i1*nb1 ; + + for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { + const int i00 = i0 % ne00; + *((device T *)(dst_ptr + i0*nb0)) = *((device T *)(src0_ptr + i00*nb00)); + } +} + +typedef decltype(kernel_repeat) kernel_repeat_t; + +template [[host_name("kernel_repeat_f32")]] kernel kernel_repeat_t kernel_repeat; +template [[host_name("kernel_repeat_f16")]] kernel kernel_repeat_t kernel_repeat; +template [[host_name("kernel_repeat_i32")]] kernel kernel_repeat_t kernel_repeat; +template [[host_name("kernel_repeat_i16")]] kernel kernel_repeat_t kernel_repeat; + +// assumption: src1 is a row +// broadcast src1 into src0 +kernel void kernel_add_row( + device const float4 * src0, + device const float4 * src1, + device float4 * dst, + constant uint64_t & nb [[buffer(28)]], + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = src0[tpig] + src1[tpig % nb]; +} + +kernel void kernel_sub_row( + device const float4 * src0, + device const float4 * src1, + device float4 * dst, + constant uint64_t & nb [[buffer(28)]], + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = src0[tpig] - src1[tpig % nb]; +} + +kernel void kernel_mul_row( + device const float4 * src0, + device const float4 * src1, + device float4 * dst, + constant uint64_t & nb [[buffer(28)]], + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = src0[tpig] * src1[tpig % nb]; +} + +kernel void kernel_div_row( + device const float4 * src0, + device const float4 * src1, + device float4 * dst, + constant uint64_t & nb [[buffer(28)]], + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = src0[tpig] / src1[tpig % nb]; +} + +kernel void kernel_scale( + device const float * src0, + device float * dst, + constant float & scale, + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = src0[tpig] * scale; +} + +kernel void kernel_scale_4( + device const float4 * src0, + device float4 * dst, + constant float & scale, + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = src0[tpig] * scale; +} + +kernel void kernel_clamp( + device const float * src0, + device float * dst, + constant float & min, + constant float & max, + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = src0[tpig] < min ? min : (src0[tpig] > max ? max : src0[tpig]); +} + +kernel void kernel_relu( + device const float * src0, + device float * dst, + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = max(0.0f, src0[tpig]); +} + +kernel void kernel_sigmoid( + device const float * src0, + device float * dst, + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = 1.0f / (1.0f + exp(-src0[tpig])); +} + +kernel void kernel_tanh( + device const float * src0, + device float * dst, + uint tpig[[thread_position_in_grid]]) { + device const float & x = src0[tpig]; + dst[tpig] = precise::tanh(x); +} + +constant float GELU_COEF_A = 0.044715f; +constant float GELU_QUICK_COEF = -1.702f; +constant float SQRT_2_OVER_PI = 0.79788456080286535587989211986876f; + +kernel void kernel_gelu( + device const float * src0, + device float * dst, + uint tpig[[thread_position_in_grid]]) { + device const float & x = src0[tpig]; + + dst[tpig] = 0.5f*x*(1.0f + precise::tanh(SQRT_2_OVER_PI*x*(1.0f + GELU_COEF_A*x*x))); +} + +kernel void kernel_gelu_4( + device const float4 * src0, + device float4 * dst, + uint tpig[[thread_position_in_grid]]) { + device const float4 & x = src0[tpig]; + + // BEWARE !!! + // Simply using "tanh" instead of "precise::tanh" will sometimes results in NaNs! + // This was observed with Falcon 7B and 40B models + // + dst[tpig] = 0.5f*x*(1.0f + precise::tanh(SQRT_2_OVER_PI*x*(1.0f + GELU_COEF_A*x*x))); +} + +kernel void kernel_gelu_quick( + device const float * src0, + device float * dst, + uint tpig[[thread_position_in_grid]]) { + device const float & x = src0[tpig]; + + dst[tpig] = x*(1.0f/(1.0f+exp(GELU_QUICK_COEF*x))); +} + +kernel void kernel_gelu_quick_4( + device const float4 * src0, + device float4 * dst, + uint tpig[[thread_position_in_grid]]) { + device const float4 & x = src0[tpig]; + + dst[tpig] = x*(1.0f/(1.0f+exp(GELU_QUICK_COEF*x))); +} + +kernel void kernel_silu( + device const float * src0, + device float * dst, + uint tpig[[thread_position_in_grid]]) { + device const float & x = src0[tpig]; + dst[tpig] = x / (1.0f + exp(-x)); +} + +kernel void kernel_silu_4( + device const float4 * src0, + device float4 * dst, + uint tpig[[thread_position_in_grid]]) { + device const float4 & x = src0[tpig]; + dst[tpig] = x / (1.0f + exp(-x)); +} + +kernel void kernel_sqr( + device const float * src0, + device float * dst, + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = src0[tpig] * src0[tpig]; +} + +kernel void kernel_sqrt( + device const float * src0, + device float * dst, + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = sqrt(src0[tpig]); +} + +kernel void kernel_sin( + device const float * src0, + device float * dst, + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = sin(src0[tpig]); +} + +kernel void kernel_cos( + device const float * src0, + device float * dst, + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = cos(src0[tpig]); +} + +kernel void kernel_sum_rows( + device const float * src0, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tpig[[thread_position_in_grid]]) { + int64_t i3 = tpig.z; + int64_t i2 = tpig.y; + int64_t i1 = tpig.x; + + if (i3 >= ne03 || i2 >= ne02 || i1 >= ne01) { + return; + } + + device const float * src_row = (device const float *) ((device const char *) src0 + i1*nb01 + i2*nb02 + i3*nb03); + device float * dst_row = (device float *) ((device char *) dst + i1*nb1 + i2*nb2 + i3*nb3); + + float row_sum = 0; + + for (int64_t i0 = 0; i0 < ne00; i0++) { + row_sum += src_row[i0]; + } + + dst_row[0] = row_sum; +} + +template +kernel void kernel_soft_max( + device const char * src0, + device const char * src1, + device char * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant float & scale, + constant float & max_bias, + constant float & m0, + constant float & m1, + constant uint32_t & n_head_log2, + threadgroup float * buf [[threadgroup(0)]], + uint tgpig[[threadgroup_position_in_grid]], + uint tpitg[[thread_position_in_threadgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint ntg[[threads_per_threadgroup]]) { + const int64_t i03 = (tgpig) / (ne02*ne01); + const int64_t i02 = (tgpig - i03*ne02*ne01) / ne01; + const int64_t i01 = (tgpig - i03*ne02*ne01 - i02*ne01); + + device const float * psrc0 = (device const float *) src0 + (i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00); + device const T * pmask = src1 != src0 ? (device const T *) src1 + i01*ne00 : nullptr; + device float * pdst = (device float *) dst + (i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00); + + float slope = 1.0f; + + // ALiBi + if (max_bias > 0.0f) { + const int64_t h = i02; + + const float base = h < n_head_log2 ? m0 : m1; + const int exp = h < n_head_log2 ? h + 1 : 2*(h - n_head_log2) + 1; + + slope = pow(base, exp); + } + + // parallel max + float lmax = -INFINITY; + + for (int i00 = tpitg; i00 < ne00; i00 += ntg) { + lmax = MAX(lmax, psrc0[i00]*scale + (pmask ? slope*pmask[i00] : 0.0f)); + } + + // find the max value in the block + float max_val = simd_max(lmax); + if (ntg > N_SIMDWIDTH) { + if (sgitg == 0) { + buf[tiisg] = -INFINITY; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + if (tiisg == 0) { + buf[sgitg] = max_val; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + max_val = buf[tiisg]; + max_val = simd_max(max_val); + } + + // parallel sum + float lsum = 0.0f; + for (int i00 = tpitg; i00 < ne00; i00 += ntg) { + const float exp_psrc0 = exp((psrc0[i00]*scale + (pmask ? slope*pmask[i00] : 0.0f)) - max_val); + lsum += exp_psrc0; + pdst[i00] = exp_psrc0; + } + + // This barrier fixes a failing test + // ref: https://github.com/ggerganov/ggml/pull/621#discussion_r1425156335 + threadgroup_barrier(mem_flags::mem_none); + + float sum = simd_sum(lsum); + + if (ntg > N_SIMDWIDTH) { + if (sgitg == 0) { + buf[tiisg] = 0.0f; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + if (tiisg == 0) { + buf[sgitg] = sum; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + sum = buf[tiisg]; + sum = simd_sum(sum); + } + + const float inv_sum = 1.0f/sum; + + for (int i00 = tpitg; i00 < ne00; i00 += ntg) { + pdst[i00] *= inv_sum; + } +} + +template +kernel void kernel_soft_max_4( + device const char * src0, + device const char * src1, + device char * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant float & scale, + constant float & max_bias, + constant float & m0, + constant float & m1, + constant uint32_t & n_head_log2, + threadgroup float * buf [[threadgroup(0)]], + uint tgpig[[threadgroup_position_in_grid]], + uint tpitg[[thread_position_in_threadgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint ntg[[threads_per_threadgroup]]) { + const int64_t i03 = (tgpig) / (ne02*ne01); + const int64_t i02 = (tgpig - i03*ne02*ne01) / ne01; + const int64_t i01 = (tgpig - i03*ne02*ne01 - i02*ne01); + + device const float4 * psrc4 = (device const float4 *) src0 + (i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00)/4; + device const T * pmask = src1 != src0 ? (device const T *) src1 + i01*ne00/4 : nullptr; + device float4 * pdst4 = (device float4 *) dst + (i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00)/4; + + float slope = 1.0f; + + if (max_bias > 0.0f) { + const int64_t h = i02; + + const float base = h < n_head_log2 ? m0 : m1; + const int exp = h < n_head_log2 ? h + 1 : 2*(h - n_head_log2) + 1; + + slope = pow(base, exp); + } + + // parallel max + float4 lmax4 = -INFINITY; + + for (int i00 = tpitg; i00 < ne00/4; i00 += ntg) { + lmax4 = fmax(lmax4, psrc4[i00]*scale + (float4)((pmask ? slope*pmask[i00] : 0.0f))); + } + + const float lmax = MAX(MAX(lmax4[0], lmax4[1]), MAX(lmax4[2], lmax4[3])); + + float max_val = simd_max(lmax); + if (ntg > N_SIMDWIDTH) { + if (sgitg == 0) { + buf[tiisg] = -INFINITY; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + if (tiisg == 0) { + buf[sgitg] = max_val; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + max_val = buf[tiisg]; + max_val = simd_max(max_val); + } + + // parallel sum + float4 lsum4 = 0.0f; + for (int i00 = tpitg; i00 < ne00/4; i00 += ntg) { + const float4 exp_psrc4 = exp((psrc4[i00]*scale + (float4)((pmask ? slope*pmask[i00] : 0.0f))) - max_val); + lsum4 += exp_psrc4; + pdst4[i00] = exp_psrc4; + } + + const float lsum = lsum4[0] + lsum4[1] + lsum4[2] + lsum4[3]; + + // This barrier fixes a failing test + // ref: https://github.com/ggerganov/ggml/pull/621#discussion_r1425156335 + threadgroup_barrier(mem_flags::mem_none); + + float sum = simd_sum(lsum); + + if (ntg > N_SIMDWIDTH) { + if (sgitg == 0) { + buf[tiisg] = 0.0f; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + if (tiisg == 0) { + buf[sgitg] = sum; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + sum = buf[tiisg]; + sum = simd_sum(sum); + } + + const float inv_sum = 1.0f/sum; + + for (int i00 = tpitg; i00 < ne00/4; i00 += ntg) { + pdst4[i00] *= inv_sum; + } +} + +typedef decltype(kernel_soft_max) kernel_soft_max_t; +typedef decltype(kernel_soft_max_4) kernel_soft_max_4_t; + +template [[host_name("kernel_soft_max_f16")]] kernel kernel_soft_max_t kernel_soft_max; +template [[host_name("kernel_soft_max_f32")]] kernel kernel_soft_max_t kernel_soft_max; +template [[host_name("kernel_soft_max_f16_4")]] kernel kernel_soft_max_4_t kernel_soft_max_4; +template [[host_name("kernel_soft_max_f32_4")]] kernel kernel_soft_max_4_t kernel_soft_max_4; + +kernel void kernel_diag_mask_inf( + device const float * src0, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int & n_past, + uint3 tpig[[thread_position_in_grid]]) { + const int64_t i02 = tpig[2]; + const int64_t i01 = tpig[1]; + const int64_t i00 = tpig[0]; + + if (i00 > n_past + i01) { + dst[i02*ne01*ne00 + i01*ne00 + i00] = -INFINITY; + } else { + dst[i02*ne01*ne00 + i01*ne00 + i00] = src0[i02*ne01*ne00 + i01*ne00 + i00]; + } +} + +kernel void kernel_diag_mask_inf_8( + device const float4 * src0, + device float4 * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int & n_past, + uint3 tpig[[thread_position_in_grid]]) { + + const int64_t i = 2*tpig[0]; + + dst[i+0] = src0[i+0]; + dst[i+1] = src0[i+1]; + int64_t i4 = 4*i; + const int64_t i02 = i4/(ne00*ne01); i4 -= i02*ne00*ne01; + const int64_t i01 = i4/(ne00); i4 -= i01*ne00; + const int64_t i00 = i4; + for (int k = 3; k >= 0; --k) { + if (i00 + 4 + k <= n_past + i01) { + break; + } + dst[i+1][k] = -INFINITY; + if (i00 + k > n_past + i01) { + dst[i][k] = -INFINITY; + } + } +} + +// ref: ggml.c:ggml_compute_forward_ssm_conv_f32 +// TODO: optimize +kernel void kernel_ssm_conv_f32( + device const void * src0, + device const void * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t ir = tgpig.x; + const int64_t i2 = tgpig.y; + const int64_t i3 = tgpig.z; + + const int64_t nc = ne10; + const int64_t ncs = ne00; + const int64_t nr = ne01; + const int64_t n_t = ne1; + const int64_t n_s = ne2; + + device const float * s = (device const float *) ((device const char *) src0 + ir*nb01 + i2*nb00 + i3*nb02); + device const float * c = (device const float *) ((device const char *) src1 + ir*nb11); + device float * x = (device float *) ((device char *) dst + ir*nb0 + i2*nb1 + i3*nb2); + + float sumf = 0.0f; + + for (int64_t i0 = 0; i0 < nc; ++i0) { + sumf += s[i0] * c[i0]; + } + + x[0] = sumf; +} + +// ref: ggml.c:ggml_compute_forward_ssm_scan_f32 +// TODO: optimize +kernel void kernel_ssm_scan_f32( + device const void * src0, + device const void * src1, + device const void * src2, + device const void * src3, + device const void * src4, + device const void * src5, + device float * dst, + constant int64_t & d_state, + constant int64_t & d_inner, + constant int64_t & n_seq_tokens, + constant int64_t & n_seqs, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, + constant uint64_t & nb20, + constant uint64_t & nb21, + constant uint64_t & nb22, + constant uint64_t & nb30, + constant uint64_t & nb31, + constant uint64_t & nb40, + constant uint64_t & nb41, + constant uint64_t & nb42, + constant uint64_t & nb50, + constant uint64_t & nb51, + constant uint64_t & nb52, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t ir = tgpig.x; + const int64_t i3 = tgpig.y; + + const int64_t nc = d_state; + const int64_t nr = d_inner; + const int64_t n_t = n_seq_tokens; + const int64_t n_s = n_seqs; + + for (int64_t i2 = 0; i2 < n_t; ++i2) { + device const float * s0 = (device const float *) ((device const char *) src0 + ir*nb01 + i3*nb02); + device const float * x = (device const float *) ((device const char *) src1 + ir*nb10 + i2*nb11 + i3*nb12); + device const float * dt = (device const float *) ((device const char *) src2 + ir*nb20 + i2*nb21 + i3*nb22); + device const float * A = (device const float *) ((device const char *) src3 + ir*nb31); + device const float * B = (device const float *) ((device const char *) src4 + i2*nb41 + i3*nb42); + device const float * C = (device const float *) ((device const char *) src5 + i2*nb51 + i3*nb52); + device float * y = (device float *) ((device char *) dst + ir*nb10 + i2*nb11 + i3*nb12); // TODO: do not use src1 strides + device float * s = (device float *) ((device char *) dst + ir*nb01 + i3*nb02 + nb13); + + if (i2 > 0) { + s0 = s; + } + + // i1 == 0 + float dt_soft_plus = dt[0] <= 20.0f ? log(1.0f + exp(dt[0])) : dt[0]; + float x_dt = x[0] * dt_soft_plus; + float sumf = 0.0f; + + for (int64_t i0 = 0; i0 < nc; ++i0) { + int64_t i = i0; + float state = (s0[i] * exp(dt_soft_plus * A[i])) + (B[i0] * x_dt); + sumf += state * C[i0]; + s[i] = state; + } + + y[0] = sumf; + } +} + +kernel void kernel_norm( + device const void * src0, + device float * dst, + constant int64_t & ne00, + constant uint64_t & nb01, + constant float & eps, + threadgroup float * sum [[threadgroup(0)]], + uint tgpig[[threadgroup_position_in_grid]], + uint tpitg[[thread_position_in_threadgroup]], + uint ntg[[threads_per_threadgroup]]) { + device const float * x = (device const float *) ((device const char *) src0 + tgpig*nb01); + // MEAN + // parallel sum + sum[tpitg] = 0.0f; + for (int i00 = tpitg; i00 < ne00; i00 += ntg) { + sum[tpitg] += x[i00]; + } + // reduce + threadgroup_barrier(mem_flags::mem_threadgroup); + for (uint i = ntg/2; i > 0; i /= 2) { + if (tpitg < i) { + sum[tpitg] += sum[tpitg + i]; + } + threadgroup_barrier(mem_flags::mem_threadgroup); + } + const float mean = sum[0] / ne00; + + // recenter and VARIANCE + threadgroup_barrier(mem_flags::mem_threadgroup); + device float * y = dst + tgpig*ne00; + sum[tpitg] = 0.0f; + for (int i00 = tpitg; i00 < ne00; i00 += ntg) { + y[i00] = x[i00] - mean; + sum[tpitg] += y[i00] * y[i00]; + } + + // reduce + threadgroup_barrier(mem_flags::mem_threadgroup); + for (uint i = ntg/2; i > 0; i /= 2) { + if (tpitg < i) { + sum[tpitg] += sum[tpitg + i]; + } + threadgroup_barrier(mem_flags::mem_threadgroup); + } + const float variance = sum[0] / ne00; + + const float scale = 1.0f/sqrt(variance + eps); + for (int i00 = tpitg; i00 < ne00; i00 += ntg) { + y[i00] = y[i00] * scale; + } +} + +kernel void kernel_rms_norm( + device const void * src0, + device float * dst, + constant int64_t & ne00, + constant uint64_t & nb01, + constant float & eps, + threadgroup float * buf [[threadgroup(0)]], + uint tgpig[[threadgroup_position_in_grid]], + uint tpitg[[thread_position_in_threadgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint ntg[[threads_per_threadgroup]]) { + device const float4 * x = (device const float4 *) ((device const char *) src0 + tgpig*nb01); + + float4 sumf = 0; + float all_sum = 0; + + // parallel sum + for (int i00 = tpitg; i00 < ne00/4; i00 += ntg) { + sumf += x[i00] * x[i00]; + } + all_sum = sumf[0] + sumf[1] + sumf[2] + sumf[3]; + all_sum = simd_sum(all_sum); + if (ntg > N_SIMDWIDTH) { + if (sgitg == 0) { + buf[tiisg] = 0.0f; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + if (tiisg == 0) { + buf[sgitg] = all_sum; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + all_sum = buf[tiisg]; + all_sum = simd_sum(all_sum); + } + + const float mean = all_sum/ne00; + const float scale = 1.0f/sqrt(mean + eps); + + device float4 * y = (device float4 *) (dst + tgpig*ne00); + for (int i00 = tpitg; i00 < ne00/4; i00 += ntg) { + y[i00] = x[i00] * scale; + } +} + +kernel void kernel_group_norm( + device const float * src0, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int32_t & n_groups, + constant float & eps, + threadgroup float * buf [[threadgroup(0)]], + uint tgpig[[threadgroup_position_in_grid]], + uint tpitg[[thread_position_in_threadgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint ntg[[threads_per_threadgroup]]) { + const int64_t ne = ne00*ne01*ne02; + const int64_t gs = ne00*ne01*((ne02 + n_groups - 1) / n_groups); + + int start = tgpig * gs; + int end = start + gs; + + start += tpitg; + + if (end >= ne) { + end = ne; + } + + float tmp = 0.0f; // partial sum for thread in warp + + for (int j = start; j < end; j += ntg) { + tmp += src0[j]; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + tmp = simd_sum(tmp); + if (ntg > N_SIMDWIDTH) { + if (sgitg == 0) { + buf[tiisg] = 0.0f; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + if (tiisg == 0) { + buf[sgitg] = tmp; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + tmp = buf[tiisg]; + tmp = simd_sum(tmp); + } + + const float mean = tmp / gs; + tmp = 0.0f; + + for (int j = start; j < end; j += ntg) { + float xi = src0[j] - mean; + dst[j] = xi; + tmp += xi * xi; + } + + tmp = simd_sum(tmp); + if (ntg > N_SIMDWIDTH) { + if (sgitg == 0) { + buf[tiisg] = 0.0f; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + if (tiisg == 0) { + buf[sgitg] = tmp; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + tmp = buf[tiisg]; + tmp = simd_sum(tmp); + } + + const float variance = tmp / gs; + const float scale = 1.0f/sqrt(variance + eps); + for (int j = start; j < end; j += ntg) { + dst[j] *= scale; + } +} + +// function for calculate inner product between half a q4_0 block and 16 floats (yl), sumy is SUM(yl[i]) +// il indicates where the q4 quants begin (0 or QK4_0/4) +// we assume that the yl's have been multiplied with the appropriate scale factor +// that corresponds to the missing bit shifts (1, 1/16, 1/256, 1/4096) +inline float block_q_n_dot_y(device const block_q4_0 * qb_curr, float sumy, thread float * yl, int il) { + float d = qb_curr->d; + + float2 acc = 0.f; + + device const uint16_t * qs = ((device const uint16_t *)qb_curr + 1 + il/2); + + for (int i = 0; i < 8; i+=2) { + acc[0] += yl[i + 0] * (qs[i / 2] & 0x000F) + + yl[i + 1] * (qs[i / 2] & 0x0F00); + acc[1] += yl[i + 8] * (qs[i / 2] & 0x00F0) + + yl[i + 9] * (qs[i / 2] & 0xF000); + } + return d * (sumy * -8.f + acc[0] + acc[1]); +} + +// function for calculate inner product between half a q4_1 block and 16 floats (yl), sumy is SUM(yl[i]) +// il indicates where the q4 quants begin (0 or QK4_0/4) +// we assume that the yl's have been multiplied with the appropriate scale factor +// that corresponds to the missing bit shifts (1, 1/16, 1/256, 1/4096) +inline float block_q_n_dot_y(device const block_q4_1 * qb_curr, float sumy, thread float * yl, int il) { + float d = qb_curr->d; + float m = qb_curr->m; + + float2 acc = 0.f; + + device const uint16_t * qs = ((device const uint16_t *)qb_curr + 2 + il/2); + + for (int i = 0; i < 8; i+=2) { + acc[0] += yl[i + 0] * (qs[i / 2] & 0x000F) + + yl[i + 1] * (qs[i / 2] & 0x0F00); + acc[1] += yl[i + 8] * (qs[i / 2] & 0x00F0) + + yl[i + 9] * (qs[i / 2] & 0xF000); + } + return d * (acc[0] + acc[1]) + sumy * m; +} + +// function for calculate inner product between half a q5_0 block and 16 floats (yl), sumy is SUM(yl[i]) +// il indicates where the q5 quants begin (0 or QK5_0/4) +// we assume that the yl's have been multiplied with the appropriate scale factor +// that corresponds to the missing bit shifts (1, 1/16, 1/256, 1/4096) +inline float block_q_n_dot_y(device const block_q5_0 * qb_curr, float sumy, thread float * yl, int il) { + float d = qb_curr->d; + + float2 acc = 0.f; + + device const uint16_t * qs = ((device const uint16_t *)qb_curr + 3 + il/2); + const uint32_t qh = *((device const uint32_t *)qb_curr->qh); + + for (int i = 0; i < 8; i+=2) { + acc[0] += yl[i + 0] * ((qs[i / 2] & 0x000F) | ((qh >> (i+0+il ) << 4 ) & 0x00010)) + + yl[i + 1] * ((qs[i / 2] & 0x0F00) | ((qh >> (i+1+il ) << 12) & 0x01000)); + acc[1] += yl[i + 8] * ((qs[i / 2] & 0x00F0) | ((qh >> (i+0+il+QK5_0/2) << 8 ) & 0x00100)) + + yl[i + 9] * ((qs[i / 2] & 0xF000) | ((qh >> (i+1+il+QK5_0/2) << 16) & 0x10000)); + } + return d * (sumy * -16.f + acc[0] + acc[1]); +} + +// function for calculate inner product between half a q5_1 block and 16 floats (yl), sumy is SUM(yl[i]) +// il indicates where the q5 quants begin (0 or QK5_1/4) +// we assume that the yl's have been multiplied with the appropriate scale factor +// that corresponds to the missing bit shifts (1, 1/16, 1/256, 1/4096) +inline float block_q_n_dot_y(device const block_q5_1 * qb_curr, float sumy, thread float * yl, int il) { + float d = qb_curr->d; + float m = qb_curr->m; + + float2 acc = 0.f; + + device const uint16_t * qs = ((device const uint16_t *)qb_curr + 4 + il/2); + const uint32_t qh = *((device const uint32_t *)qb_curr->qh); + + for (int i = 0; i < 8; i+=2) { + acc[0] += yl[i + 0] * ((qs[i / 2] & 0x000F) | ((qh >> (i+0+il ) << 4 ) & 0x00010)) + + yl[i + 1] * ((qs[i / 2] & 0x0F00) | ((qh >> (i+1+il ) << 12) & 0x01000)); + acc[1] += yl[i + 8] * ((qs[i / 2] & 0x00F0) | ((qh >> (i+0+il+QK5_0/2) << 8 ) & 0x00100)) + + yl[i + 9] * ((qs[i / 2] & 0xF000) | ((qh >> (i+1+il+QK5_0/2) << 16) & 0x10000)); + } + return d * (acc[0] + acc[1]) + sumy * m; +} + +// putting them in the kernel cause a significant performance penalty +#define N_DST 4 // each SIMD group works on 4 rows +#define N_SIMDGROUP 2 // number of SIMD groups in a thread group +//Note: This is a template, but strictly speaking it only applies to +// quantizations where the block size is 32. It also does not +// guard against the number of rows not being divisible by +// N_DST, so this is another explicit assumption of the implementation. +template +void mul_vec_q_n_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_values, + uint3 tgpig, uint tiisg, uint sgitg) { + const int nb = ne00/QK4_0; + + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * nsg + sgitg) * nr; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = first_row * nb + (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_q_type * x = (device const block_q_type *) src0 + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[16]; // src1 vector cache + float sumf[nr] = {0.f}; + + const int ix = (tiisg/2); + const int il = (tiisg%2)*8; + + device const float * yb = y + ix * QK4_0 + il; + + // each thread in a SIMD group deals with half a block. + for (int ib = ix; ib < nb; ib += nw/2) { + float sumy = 0; + for (int i = 0; i < 8; i += 2) { + sumy += yb[i] + yb[i+1]; + yl[i+0] = yb[i+ 0]; + yl[i+1] = yb[i+ 1]/256.f; + + sumy += yb[i+16] + yb[i+17]; + yl[i+8] = yb[i+16]/16.f; + yl[i+9] = yb[i+17]/4096.f; + } + + for (int row = 0; row < nr; row++) { + sumf[row] += block_q_n_dot_y(x+ib+row*nb, sumy, yl, il); + } + + yb += QK4_0 * 16; + } + + for (int row = 0; row < nr; ++row) { + const float tot = simd_sum(sumf[row]); + if (tiisg == 0 && first_row + row < ne01) { + dst[im*ne0*ne1 + r1*ne0 + first_row + row] = tot; + } + } +} + +kernel void kernel_mul_mv_q4_0_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + mul_vec_q_n_f32_impl(src0,src1,dst,ne00,ne01,ne02,ne10,ne12,ne0,ne1,r2,r3,nullptr,tgpig,tiisg,sgitg); +} + +kernel void kernel_mul_mv_q4_1_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + mul_vec_q_n_f32_impl(src0,src1,dst,ne00,ne01,ne02,ne10,ne12,ne0,ne1,r2,r3,nullptr,tgpig,tiisg,sgitg); +} + +kernel void kernel_mul_mv_q5_0_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + mul_vec_q_n_f32_impl(src0,src1,dst,ne00,ne01,ne02,ne10,ne12,ne0,ne1,r2,r3,nullptr,tgpig,tiisg,sgitg); +} + +kernel void kernel_mul_mv_q5_1_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + mul_vec_q_n_f32_impl(src0,src1,dst,ne00,ne01,ne02,ne10,ne12,ne0,ne1,r2,r3,nullptr,tgpig,tiisg,sgitg); +} + + +#define NB_Q8_0 8 + +void kernel_mul_mv_q8_0_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_values, + uint3 tgpig, + uint tiisg, + uint sgitg) { + const int nr = N_DST; + const int nsg = N_SIMDGROUP; + const int nw = N_SIMDWIDTH; + + const int nb = ne00/QK8_0; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * nsg + sgitg) * nr; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = first_row * nb + (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_q8_0 * x = (device const block_q8_0 *) src0 + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[NB_Q8_0]; + float sumf[nr]={0.f}; + + const int ix = tiisg/4; + const int il = tiisg%4; + + device const float * yb = y + ix * QK8_0 + NB_Q8_0*il; + + // each thread in a SIMD group deals with NB_Q8_0 quants at a time + for (int ib = ix; ib < nb; ib += nw/4) { + for (int i = 0; i < NB_Q8_0; ++i) { + yl[i] = yb[i]; + } + + for (int row = 0; row < nr; row++) { + device const int8_t * qs = x[ib+row*nb].qs + NB_Q8_0*il; + float sumq = 0.f; + for (int iq = 0; iq < NB_Q8_0; ++iq) { + sumq += qs[iq] * yl[iq]; + } + sumf[row] += sumq*x[ib+row*nb].d; + } + + yb += NB_Q8_0 * nw; + } + + for (int row = 0; row < nr; ++row) { + const float tot = simd_sum(sumf[row]); + if (tiisg == 0 && first_row + row < ne01) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = tot; + } + } +} + +[[host_name("kernel_mul_mv_q8_0_f32")]] +kernel void kernel_mul_mv_q8_0_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + kernel_mul_mv_q8_0_f32_impl(src0,src1,dst,ne00,ne01,ne02,ne10,ne12,ne0,ne1,r2,r3,nullptr,tgpig,tiisg,sgitg); +} + +#define N_MV_T_T 4 + +template +void kernel_mul_mv_impl( + device const char * src0, + device const char * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + uint64_t nb00, + uint64_t nb01, + uint64_t nb02, + int64_t ne10, + int64_t ne11, + int64_t ne12, + uint64_t nb10, + uint64_t nb11, + uint64_t nb12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + uint3 tgpig, + uint tiisg) { + const int64_t r0 = tgpig.x; + const int64_t rb = tgpig.y*N_MV_T_T; + const int64_t im = tgpig.z; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = r0*nb01 + (i12/r2)*nb02 + (i13/r3)*nb02*ne02; + + device const T0 * x = (device const T0 *) (src0 + offset0); + + if (ne00 < 128) { + for (int row = 0; row < N_MV_T_T; ++row) { + int r1 = rb + row; + if (r1 >= ne11) { + break; + } + + device const T1 * y = (device const T1 *) (src1 + r1*nb11 + im*nb12); + + float sumf = 0; + for (int i = tiisg; i < ne00; i += 32) { + sumf += (T0) x[i] * (T1) y[i]; + } + + float all_sum = simd_sum(sumf); + if (tiisg == 0) { + dst[im*ne1*ne0 + r1*ne0 + r0] = all_sum; + } + } + } else { + device const T04 * x4 = (device const T04 *) x; + for (int row = 0; row < N_MV_T_T; ++row) { + int r1 = rb + row; + if (r1 >= ne11) { + break; + } + + device const T1 * y = (device const T1 *) (src1 + r1*nb11 + im*nb12); + device const T14 * y4 = (device const T14 *) y; + + float sumf = 0; + for (int i = tiisg; i < ne00/4; i += 32) { + for (int k = 0; k < 4; ++k) sumf += (float) (x4[i][k] * y4[i][k]); + } + + float all_sum = simd_sum(sumf); + if (tiisg == 0) { + for (int i = 4*(ne00/4); i < ne00; ++i) all_sum += (float) (x[i] * y[i]); + dst[im*ne1*ne0 + r1*ne0 + r0] = all_sum; + } + } + } +} + +template +kernel void kernel_mul_mv( + device const char * src0, + device const char * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]]) { + kernel_mul_mv_impl( + src0, + src1, + dst, + ne00, + ne01, + ne02, + nb00, + nb01, + nb02, + ne10, + ne11, + ne12, + nb10, + nb11, + nb12, + ne0, + ne1, + r2, + r3, + tgpig, + tiisg); +} + +typedef decltype(kernel_mul_mv) mul_mv_t; + +template [[host_name("kernel_mul_mv_f32_f32")]] kernel mul_mv_t kernel_mul_mv; +template [[host_name("kernel_mul_mv_f16_f32")]] kernel mul_mv_t kernel_mul_mv; +template [[host_name("kernel_mul_mv_f16_f16")]] kernel mul_mv_t kernel_mul_mv; + +template +kernel void kernel_mul_mv_1row( + device const char * src0, + device const char * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]]) { + + const int64_t r0 = tgpig.x; + const int64_t r1 = tgpig.y; + const int64_t im = tgpig.z; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = r0*nb01 + (i12/r2)*nb02 + (i13/r3)*nb02*ne02; + + device const T * x = (device const T *) (src0 + offset0); + device const float * y = (device const float *) (src1 + r1*nb11 + im*nb12); + + float sumf = 0; + if (ne00 < 128) { + for (int i = tiisg; i < ne00; i += 32) { + sumf += (float) x[i] * (float) y[i]; + } + float all_sum = simd_sum(sumf); + if (tiisg == 0) { + dst[im*ne1*ne0 + r1*ne0 + r0] = all_sum; + } + } else { + device const T4 * x4 = (device const T4 *) x; + device const float4 * y4 = (device const float4 *) y; + + for (int i = tiisg; i < ne00/4; i += 32) { + for (int k = 0; k < 4; ++k) sumf += (float) (x4[i][k] * y4[i][k]); + } + + float all_sum = simd_sum(sumf); + + if (tiisg == 0) { + for (int i = 4*(ne00/4); i < ne00; ++i) all_sum += (float) (x[i] * y[i]); + dst[im*ne1*ne0 + r1*ne0 + r0] = all_sum; + } + } +} + +typedef decltype(kernel_mul_mv_1row) mul_mv_1row_t; + +template [[host_name("kernel_mul_mv_f16_f32_1row")]] kernel mul_mv_1row_t kernel_mul_mv_1row; + +// Assumes row size (ne00) is a multiple of 4 +template +kernel void kernel_mul_mv_l4( + device const char * src0, + device const char * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]]) { + + const int nrows = ne11; + const int64_t r0 = tgpig.x; + const int64_t im = tgpig.z; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = r0*nb01 + (i12/r2)*nb02 + (i13/r3)*nb02*ne02; + + device const T4 * x4 = (device const T4 *) (src0 + offset0); + + for (int r1 = 0; r1 < nrows; ++r1) { + device const float4 * y4 = (device const float4 *) (src1 + r1*nb11 + im*nb12); + + float sumf = 0; + for (int i = tiisg; i < ne00/4; i += 32) { + for (int k = 0; k < 4; ++k) sumf += (float) (x4[i][k] * y4[i][k]); + } + + float all_sum = simd_sum(sumf); + if (tiisg == 0) { + dst[im*ne1*ne0 + r1*ne0 + r0] = all_sum; + } + } +} + +typedef decltype(kernel_mul_mv_l4) mul_mv_l4_t; + +template [[host_name("kernel_mul_mv_f16_f32_l4")]] kernel mul_mv_l4_t kernel_mul_mv_l4; + +static float rope_yarn_ramp(const float low, const float high, const int i0) { + const float y = (i0 / 2 - low) / max(0.001f, high - low); + return 1.0f - min(1.0f, max(0.0f, y)); +} + +// YaRN algorithm based on LlamaYaRNScaledRotaryEmbedding.py from https://github.com/jquesnelle/yarn +// MIT licensed. Copyright (c) 2023 Jeffrey Quesnelle and Bowen Peng. +static void rope_yarn( + float theta_extrap, float freq_scale, float corr_dims[2], int64_t i0, float ext_factor, float mscale, + thread float * cos_theta, thread float * sin_theta) { + // Get n-d rotational scaling corrected for extrapolation + float theta_interp = freq_scale * theta_extrap; + float theta = theta_interp; + if (ext_factor != 0.0f) { + float ramp_mix = rope_yarn_ramp(corr_dims[0], corr_dims[1], i0) * ext_factor; + theta = theta_interp * (1 - ramp_mix) + theta_extrap * ramp_mix; + + // Get n-d magnitude scaling corrected for interpolation + mscale *= 1.0f + 0.1f * log(1.0f / freq_scale); + } + *cos_theta = cos(theta) * mscale; + *sin_theta = sin(theta) * mscale; +} + +// Apparently solving `n_rot = 2pi * x * base^((2 * max_pos_emb) / n_dims)` for x, we get +// `corr_fac(n_rot) = n_dims * log(max_pos_emb / (n_rot * 2pi)) / (2 * log(base))` +static float rope_yarn_corr_factor(int n_dims, int n_ctx_orig, float n_rot, float base) { + return n_dims * log(n_ctx_orig / (n_rot * 2 * M_PI_F)) / (2 * log(base)); +} + +static void rope_yarn_corr_dims( + int n_dims, int n_ctx_orig, float freq_base, float beta_fast, float beta_slow, float dims[2] +) { + // start and end correction dims + dims[0] = max(0.0f, floor(rope_yarn_corr_factor(n_dims, n_ctx_orig, beta_fast, freq_base))); + dims[1] = min(n_dims - 1.0f, ceil(rope_yarn_corr_factor(n_dims, n_ctx_orig, beta_slow, freq_base))); +} + +template +kernel void kernel_rope_norm( + device const void * src0, + device const int32_t * src1, + device const float * src2, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + constant int & n_past, + constant int & n_dims, + constant int & n_ctx_orig, + constant float & freq_base, + constant float & freq_scale, + constant float & ext_factor, + constant float & attn_factor, + constant float & beta_fast, + constant float & beta_slow, + uint tiitg[[thread_index_in_threadgroup]], + uint3 tptg[[threads_per_threadgroup]], + uint3 tgpig[[threadgroup_position_in_grid]]) { + const int64_t i3 = tgpig[2]; + const int64_t i2 = tgpig[1]; + const int64_t i1 = tgpig[0]; + + float corr_dims[2]; + rope_yarn_corr_dims(n_dims, n_ctx_orig, freq_base, beta_fast, beta_slow, corr_dims); + + device const int32_t * pos = src1; + + const float theta_base = (float) pos[i2]; + const float inv_ndims = -1.f/n_dims; + + float cos_theta; + float sin_theta; + + for (int64_t i0 = 2*tiitg; i0 < ne0; i0 += 2*tptg.x) { + if (i0 < n_dims) { + const int64_t ic = i0/2; + + const float theta = theta_base * pow(freq_base, inv_ndims*i0); + + const float freq_factor = src2 != src0 ? src2[ic] : 1.0f; + + rope_yarn(theta/freq_factor, freq_scale, corr_dims, i0, ext_factor, attn_factor, &cos_theta, &sin_theta); + + device const T * const src = (device T *)((device char *) src0 + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); + device T * dst_data = (device T *)((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + const float x0 = src[0]; + const float x1 = src[1]; + + dst_data[0] = x0*cos_theta - x1*sin_theta; + dst_data[1] = x0*sin_theta + x1*cos_theta; + } else { + device const T * const src = (device T *)((device char *) src0 + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); + device T * dst_data = (device T *)((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + dst_data[0] = src[0]; + dst_data[1] = src[1]; + } + } +} + +template +kernel void kernel_rope_neox( + device const void * src0, + device const int32_t * src1, + device const float * src2, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + constant int & n_past, + constant int & n_dims, + constant int & n_ctx_orig, + constant float & freq_base, + constant float & freq_scale, + constant float & ext_factor, + constant float & attn_factor, + constant float & beta_fast, + constant float & beta_slow, + uint tiitg[[thread_index_in_threadgroup]], + uint3 tptg[[threads_per_threadgroup]], + uint3 tgpig[[threadgroup_position_in_grid]]) { + const int64_t i3 = tgpig[2]; + const int64_t i2 = tgpig[1]; + const int64_t i1 = tgpig[0]; + + float corr_dims[2]; + rope_yarn_corr_dims(n_dims, n_ctx_orig, freq_base, beta_fast, beta_slow, corr_dims); + + device const int32_t * pos = src1; + + const float theta_base = (float) pos[i2]; + const float inv_ndims = -1.f/n_dims; + + float cos_theta; + float sin_theta; + + for (int64_t i0 = 2*tiitg; i0 < ne0; i0 += 2*tptg.x) { + if (i0 < n_dims) { + const int64_t ic = i0/2; + + const float theta = theta_base * pow(freq_base, inv_ndims*i0); + + const float freq_factor = src2 != src0 ? src2[ic] : 1.0f; + + rope_yarn(theta/freq_factor, freq_scale, corr_dims, i0, ext_factor, attn_factor, &cos_theta, &sin_theta); + + device const T * const src = (device T *)((device char *) src0 + i3*nb03 + i2*nb02 + i1*nb01 + ic*nb00); + device T * dst_data = (device T *)((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + ic*nb0); + + const float x0 = src[0]; + const float x1 = src[n_dims/2]; + + dst_data[0] = x0*cos_theta - x1*sin_theta; + dst_data[n_dims/2] = x0*sin_theta + x1*cos_theta; + } else { + device const T * const src = (device T *)((device char *) src0 + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); + device T * dst_data = (device T *)((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + dst_data[0] = src[0]; + dst_data[1] = src[1]; + } + } +} + +typedef decltype(kernel_rope_norm) kernel_rope_norm_t; +typedef decltype(kernel_rope_neox) kernel_rope_neox_t; + +template [[host_name("kernel_rope_norm_f32")]] kernel kernel_rope_norm_t kernel_rope_norm; +template [[host_name("kernel_rope_norm_f16")]] kernel kernel_rope_norm_t kernel_rope_norm; + +template [[host_name("kernel_rope_neox_f32")]] kernel kernel_rope_neox_t kernel_rope_neox; +template [[host_name("kernel_rope_neox_f16")]] kernel kernel_rope_neox_t kernel_rope_neox; + +typedef void (im2col_t)( + device const float * x, + device char * dst, + constant int32_t & ofs0, + constant int32_t & ofs1, + constant int32_t & IW, + constant int32_t & IH, + constant int32_t & CHW, + constant int32_t & s0, + constant int32_t & s1, + constant int32_t & p0, + constant int32_t & p1, + constant int32_t & d0, + constant int32_t & d1, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tgpg[[threadgroups_per_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]); + +template +kernel void kernel_im2col( + device const float * x, + device char * dst, + constant int32_t & ofs0, + constant int32_t & ofs1, + constant int32_t & IW, + constant int32_t & IH, + constant int32_t & CHW, + constant int32_t & s0, + constant int32_t & s1, + constant int32_t & p0, + constant int32_t & p1, + constant int32_t & d0, + constant int32_t & d1, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tgpg[[threadgroups_per_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int32_t iiw = tgpig[2] * s0 + tpitg[2] * d0 - p0; + const int32_t iih = tgpig[1] * s1 + tpitg[1] * d1 - p1; + + const int32_t offset_dst = + (tpitg[0] * tgpg[1] * tgpg[2] + tgpig[1] * tgpg[2] + tgpig[2]) * CHW + + (tgpig[0] * (ntg[1] * ntg[2]) + tpitg[1] * ntg[2] + tpitg[2]); + + device T * pdst = (device T *) (dst); + + if (iih < 0 || iih >= IH || iiw < 0 || iiw >= IW) { + pdst[offset_dst] = 0.0f; + } else { + const int32_t offset_src = tpitg[0] * ofs0 + tgpig[0] * ofs1; + pdst[offset_dst] = x[offset_src + iih * IW + iiw]; + } +} + +template [[host_name("kernel_im2col_f32")]] kernel im2col_t kernel_im2col; +template [[host_name("kernel_im2col_f16")]] kernel im2col_t kernel_im2col; + +kernel void kernel_upscale_f32( + device const char * src0, + device char * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + constant float & sf0, + constant float & sf1, + constant float & sf2, + constant float & sf3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + + const int64_t i3 = tgpig.z; + const int64_t i2 = tgpig.y; + const int64_t i1 = tgpig.x; + + const int64_t i03 = i3/sf3; + const int64_t i02 = i2/sf2; + const int64_t i01 = i1/sf1; + + for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { + const int64_t i00 = i0/sf0; + + device const float * src0_ptr = (device const float *) (src0 + i03*nb03 + i02*nb02 + i01*nb01 + i00*nb00); + device float * dst_ptr = (device float *) (dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + dst_ptr[0] = src0_ptr[0]; + } +} + +kernel void kernel_pad_f32( + device const char * src0, + device char * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + + const int64_t i3 = tgpig.z; + const int64_t i2 = tgpig.y; + const int64_t i1 = tgpig.x; + + const int64_t i03 = i3; + const int64_t i02 = i2; + const int64_t i01 = i1; + + device const float * src0_ptr = (device const float *) (src0 + i03*nb03 + i02*nb02 + i01*nb01); + device float * dst_ptr = (device float *) (dst + i3*nb3 + i2*nb2 + i1*nb1); + + if (i1 < ne01 && i2 < ne02 && i3 < ne03) { + for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { + if (i0 < ne00) { + dst_ptr[i0] = src0_ptr[i0]; + } else { + dst_ptr[i0] = 0.0f; + } + } + + return; + } + + for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { + dst_ptr[i0] = 0.0f; + } +} + +kernel void kernel_unpad_f32( + device const char * src0, + device char * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + + const int64_t i3 = tgpig.z; + const int64_t i2 = tgpig.y; + const int64_t i1 = tgpig.x; + + const int64_t i03 = i3; + const int64_t i02 = i2; + const int64_t i01 = i1; + + device const float * src0_ptr = (device const float *) (src0 + i03*nb03 + i02*nb02 + i01*nb01); + device float * dst_ptr = (device float *) (dst + i3*nb3 + i2*nb2 + i1*nb1); + + if (i1 < ne01 && i2 < ne02 && i3 < ne03) { + for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { + if (i0 < ne00) { + dst_ptr[i0] = src0_ptr[i0]; + } + } + + return; + } +} + +kernel void kernel_arange_f32( + device char * dst, + constant int64_t & ne0, + constant float & start, + constant float & step, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + + device float * dst_ptr = (device float *) dst; + + for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { + dst_ptr[i0] = start + step * i0; + } +} + +kernel void kernel_timestep_embedding_f32( + device const char * src0, + device char * dst, + constant uint64_t & nb1, + constant int & dim, + constant int & max_period, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + + int i = tgpig.x; + device float * embed_data = (device float *)(dst + i*nb1); + + int half_ = dim / 2; + for (int j = tpitg.x; j < half_; j += ntg.x) { + float timestep = ((device float *)src0)[i]; + float freq = (float)exp(-log((float)max_period) * j / half_); + float arg = timestep * freq; + embed_data[j ] = cos(arg); + embed_data[j + half_] = sin(arg); + } + + if (dim % 2 != 0 && tpitg.x == 0) { + embed_data[dim] = 0.f; + } +} + +// bitonic sort implementation following the CUDA kernels as reference +typedef void (argsort_t)( + device const float * x, + device int32_t * dst, + constant int64_t & ncols, + constant int64_t & ncols_pad, + threadgroup int32_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]]); + +template +kernel void kernel_argsort_f32_i32( + device const float * x, + device int32_t * dst, + constant int64_t & ncols, + constant int64_t & ncols_pad, + threadgroup int32_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]]) { + // bitonic sort + int col = tpitg[0]; + int row = tgpig[1]; + + if (col >= ncols_pad) return; + + device const float * x_row = x + row * ncols; + threadgroup int32_t * dst_row = shared_values; + + // initialize indices + dst_row[col] = col; + + threadgroup_barrier(mem_flags::mem_threadgroup); + + for (int k = 2; k <= ncols_pad; k *= 2) { + for (int j = k / 2; j > 0; j /= 2) { + int ixj = col ^ j; + if (ixj > col) { + if ((col & k) == 0) { + if (dst_row[col] >= ncols || + (dst_row[ixj] < ncols && (order == GGML_SORT_ORDER_ASC ? + x_row[dst_row[col]] > x_row[dst_row[ixj]] : + x_row[dst_row[col]] < x_row[dst_row[ixj]])) + ) { + SWAP(dst_row[col], dst_row[ixj]); + } + } else { + if (dst_row[ixj] >= ncols || + (dst_row[col] < ncols && (order == GGML_SORT_ORDER_ASC ? + x_row[dst_row[col]] < x_row[dst_row[ixj]] : + x_row[dst_row[col]] > x_row[dst_row[ixj]])) + ) { + SWAP(dst_row[col], dst_row[ixj]); + } + } + } + threadgroup_barrier(mem_flags::mem_threadgroup); + } + } + + // copy the result to dst without the padding + if (col < ncols) { + dst[row * ncols + col] = dst_row[col]; + } +} + +template [[host_name("kernel_argsort_f32_i32_asc")]] kernel argsort_t kernel_argsort_f32_i32; +template [[host_name("kernel_argsort_f32_i32_desc")]] kernel argsort_t kernel_argsort_f32_i32; + +kernel void kernel_leaky_relu_f32( + device const float * src0, + device float * dst, + constant float & slope, + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = src0[tpig] > 0.0f ? src0[tpig] : src0[tpig] * slope; +} + +typedef void (flash_attn_ext_f16_t)( + device const char * q, + device const char * k, + device const char * v, + device const char * mask, + device float * dst, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, + constant uint64_t & nb21, + constant uint64_t & nb22, + constant uint64_t & nb23, + constant uint64_t & nb31, + constant int64_t & ne1, + constant int64_t & ne2, + constant float & scale, + constant float & max_bias, + constant float & m0, + constant float & m1, + constant uint32_t & n_head_log2, + constant float & logit_softcap, + threadgroup half * shared, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]], + ushort tiisg[[thread_index_in_simdgroup]], + ushort sgitg[[simdgroup_index_in_threadgroup]]); + +// ref: https://arxiv.org/pdf/2307.08691.pdf +template // head size, queries per threadgroup, cache items per threadgroup +kernel void kernel_flash_attn_ext_f16( + device const char * q, + device const char * k, + device const char * v, + device const char * mask, + device float * dst, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, + constant uint64_t & nb21, + constant uint64_t & nb22, + constant uint64_t & nb23, + constant uint64_t & nb31, + constant int64_t & ne1, + constant int64_t & ne2, + constant float & scale, + constant float & max_bias, + constant float & m0, + constant float & m1, + constant uint32_t & n_head_log2, + constant float & logit_softcap, + threadgroup half * shared [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]], + ushort tiisg[[thread_index_in_simdgroup]], + ushort sgitg[[simdgroup_index_in_threadgroup]]) { + const short nsg = ntg.y; // number of simdgroups + + const short iq3 = tgpig[2]; + const short iq2 = tgpig[1]; + const short iq1 = tgpig[0]*Q; + + const short D4 = D/4; + const short D8 = D/8; + //const short Q8 = Q/8; + const short NW = N_SIMDWIDTH; + const short SH = (C + Q); // shared memory per simdgroup in (half) + + const short T = D + 2*nsg*SH; // shared memory size per query in (half) + const short TF = T/2; // shared memory size per query in (float) + const short T4 = T/4; // shared memory size per query in (half4) + + threadgroup half * sq = (threadgroup half *) (shared + 0*D); // holds the query data + threadgroup half4 * sq4 = (threadgroup half4 *) (shared + 0*D); // same as above but in half4 + threadgroup float * ss = (threadgroup float *) (shared + 2*sgitg*SH + 1*D); // scratch buffer for attention and diagonal matrix + + // store the result for all queries in local memory in 8x8 matrices (the O matrix from the paper) + simdgroup_half8x8 lo[D8]; + + // load heads from Q to shared memory + for (short j = sgitg; j < Q; j += nsg) { + device const float4 * q4 = (device const float4 *) ((device const char *) q + ((iq1 + j)*nb01 + iq2*nb02 + iq3*nb03)); + + for (short i = tiisg; i < D4; i += NW) { + if (iq1 + j < ne01) { + sq4[j*T4 + i] = (half4) q4[i]; + } else { + sq4[j*T4 + i] = 0.0h; + } + } + } + + // zero out lo + for (short i = 0; i < D8; ++i) { + lo[i] = make_filled_simdgroup_matrix(0.0h); + } + + // zero out shared memory SH + for (short j = 0; j < Q; ++j) { + for (short i = tiisg; i < SH; i += NW) { + ss[j*TF + i] = 0.0f; + } + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + { + float S[Q] = { [0 ... Q-1] = 0.0h }; + float M[Q] = { [0 ... Q-1] = -FLT_MAX/2 }; + + // assume K and V are same shape + const short ne22 = ne12; + const short ne23 = ne13; + + // broadcast + const short rk2 = ne02/ne12; + const short rk3 = ne03/ne13; + + const short rv2 = ne02/ne22; + const short rv3 = ne03/ne23; + + // k indices + const short ik2 = iq2/rk2; + const short ik3 = iq3/rk3; + + // v indices + const short iv2 = iq2/rv2; + const short iv3 = iq3/rv3; + + // load the queries from shared memory into local memory + simdgroup_half8x8 mq[D8]; + + for (short i = 0; i < D8; ++i) { + simdgroup_load(mq[i], sq + i*8, T); + } + + // pointer to the mask + device const half * mp = (device const half *) (mask + iq1*nb31); + + float slope = 1.0f; + + // ALiBi + if (max_bias > 0.0f) { + const uint32_t h = iq2; + + const float base = h < n_head_log2 ? m0 : m1; + const int exph = h < n_head_log2 ? h + 1 : 2*(h - n_head_log2) + 1; + + slope = pow(base, exph); + } + + // loop over the KV cache + // each simdgroup handles blocks of Q rows and C columns + for (int ic0 = 0; ic0 < ne11; ic0 += C*nsg) { + const int ic = ic0 + C*sgitg; + if (ic >= ne11) { + break; + } + + // Q*K^T + { + for (short cc = 0; cc < C/8; ++cc) { + simdgroup_float8x8 mqk = make_filled_simdgroup_matrix(0.h); + + device const half * pk = (device const half *) ((device const char *) k + ((ic + 8*cc)*nb11 + ik2*nb12 + ik3*nb13)); + + for (short i = 0; i < D8; ++i) { + simdgroup_half8x8 mk; + simdgroup_load(mk, pk + i*8, nb11/sizeof(half), 0, true); // transpose + + simdgroup_multiply_accumulate(mqk, mq[i], mk, mqk); + } + + simdgroup_store(mqk, ss + 8*cc, TF, 0, false); + } + } + + // used to detect blocks full of -INF + float smax = -INFINITY; + + // online softmax + { + float ms[Q]; + + for (short j = 0; j < Q; ++j) { + const float m = M[j]; + + // scale and apply the logitcap / mask + float s = ss[j*TF + tiisg]*scale; + + if (logit_softcap != 0.0f) { + s = logit_softcap*precise::tanh(s); + } + + if (mask != q) { + // mqk = mqk + mask*slope + s += slope*mp[ic + j*nb31/sizeof(half) + tiisg]; + } + + smax = simd_max(max(smax, s)); + M[j] = simd_max(max(M[j], s)); + + ms[j] = exp(m - M[j]); + const float vs = exp(s - M[j]); + + S[j] = S[j]*ms[j] + simd_sum(vs); + + // the P matrix from the paper (Q rows, C columns) + ss[j*TF + tiisg] = vs; + } + + // create a QxQ diagonal matrix for rescaling the output + if (tiisg < Q) { + ss[tiisg*TF + C + tiisg] = ms[tiisg]; + } + } + + // skip -INF blocks + if (smax == -INFINITY) { + continue; + } + + // O = diag(ms)*O + { + simdgroup_float8x8 mm; + simdgroup_load(mm, ss + C, TF, 0, false); + + for (short i = 0; i < D8; ++i) { + simdgroup_multiply(lo[i], mm, lo[i]); + } + } + + // O = O + (Q*K^T)*V + { + for (short cc = 0; cc < C/8; ++cc) { + device const half * pv = (device const half *) ((device const char *) v + ((ic + 8*cc)*nb21 + iv2*nb22 + iv3*nb23)); + + for (short i = 0; i < D8; ++i) { + simdgroup_half8x8 mk; + simdgroup_load(mk, pv + i*8, nb21/sizeof(half), 0, false); + + simdgroup_float8x8 mv; + simdgroup_load(mv, ss + 8*cc, TF, 0, false); + + simdgroup_multiply_accumulate(lo[i], mv, mk, lo[i]); + } + } + } + } + + // these are needed for reducing the results from the simdgroups (reuse the ss buffer) + for (short j = 0; j < Q; ++j) { + if (tiisg == 0) { + ss[j*TF + 0] = S[j]; + ss[j*TF + 1] = M[j]; + } + } + } + + // reduce the warps sequentially + for (short sg = 1; sg < nsg; ++sg) { + float S = { 0.0h }; + float M = { -FLT_MAX/2 }; + + threadgroup_barrier(mem_flags::mem_threadgroup); + + // each simdgroup stores its output to shared memory, reusing sq + if (sgitg == sg) { + for (short i = 0; i < D8; ++i) { + simdgroup_store(lo[i], sq + i*8, T, 0, false); + } + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + // the first simdgroup accumulates the results from the other simdgroups + if (sgitg == 0) { + for (short j = 0; j < Q; ++j) { + const float S0 = ss[j*TF + 0]; + const float S1 = ss[j*TF + sg*SH + 0]; + + const float M0 = ss[j*TF + 1]; + const float M1 = ss[j*TF + sg*SH + 1]; + + M = max(M0, M1); + + const float ms0 = exp(M0 - M); + const float ms1 = exp(M1 - M); + + S = S0*ms0 + S1*ms1; + + if (tiisg == 0) { + ss[j*TF + 0] = S; + ss[j*TF + 1] = M; + + ss[j*TF + C + j ] = ms0; + ss[j*TF + C + j + sg*SH] = ms1; + } + } + + // O_0 = diag(ms0)*O_0 + diag(ms1)*O_1 + { + simdgroup_half8x8 t; + simdgroup_float8x8 ms0; + simdgroup_float8x8 ms1; + + simdgroup_load(ms0, ss + C, TF, 0, false); + simdgroup_load(ms1, ss + C + sg*SH, TF, 0, false); + + for (short i = 0; i < D8; ++i) { + simdgroup_load (t, sq + i*8, T, 0, false); + simdgroup_multiply(t, ms1, t); + + simdgroup_multiply_accumulate(lo[i], ms0, lo[i], t); + } + } + } + } + + // store result to shared memory (reuse sq) + if (sgitg == 0) { + for (short i = 0; i < D8; ++i) { + simdgroup_store(lo[i], sq + i*8, T, 0, false); + } + } + + device float4 * dst4 = (device float4 *) dst; + + // final rescale with 1/S and store to global memory + if (sgitg == 0) { + for (short j = 0; j < Q && iq1 + j < ne01; ++j) { + const float S = ss[j*TF + 0]; + + for (short i = tiisg; i < D4; i += NW) { + dst4[(iq3*ne2*ne1 + iq2 + (iq1 + j)*ne1)*D4 + i] = (float4) sq4[j*T4 + i]/S; + } + } + } +} + +template [[host_name("kernel_flash_attn_ext_f16_h64" )]] kernel flash_attn_ext_f16_t kernel_flash_attn_ext_f16<64>; +template [[host_name("kernel_flash_attn_ext_f16_h80" )]] kernel flash_attn_ext_f16_t kernel_flash_attn_ext_f16<80>; +template [[host_name("kernel_flash_attn_ext_f16_h96" )]] kernel flash_attn_ext_f16_t kernel_flash_attn_ext_f16<96>; +template [[host_name("kernel_flash_attn_ext_f16_h112")]] kernel flash_attn_ext_f16_t kernel_flash_attn_ext_f16<112>; +template [[host_name("kernel_flash_attn_ext_f16_h128")]] kernel flash_attn_ext_f16_t kernel_flash_attn_ext_f16<128>; +//template [[host_name("kernel_flash_attn_ext_f16_h256")]] kernel flash_attn_ext_f16_t kernel_flash_attn_ext_f16<256>; + +template // head size, queries per threadgroup, cache items per threadgroup +kernel void kernel_flash_attn_ext_vec_f16( + device const char * q, + device const char * k, + device const char * v, + device const char * mask, + device float * dst, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, + constant uint64_t & nb21, + constant uint64_t & nb22, + constant uint64_t & nb23, + constant uint64_t & nb31, + constant int64_t & ne1, + constant int64_t & ne2, + constant float & scale, + constant float & max_bias, + constant float & m0, + constant float & m1, + constant uint32_t & n_head_log2, + constant float & logit_softcap, + threadgroup half * shared [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]], + ushort tiisg[[thread_index_in_simdgroup]], + ushort sgitg[[simdgroup_index_in_threadgroup]]) { + const short nsg = ntg.y; // number of simdgroups + + const short iq3 = tgpig[2]; + const short iq2 = tgpig[1]; + const short iq1 = tgpig[0]; + + const short D4 = D/4; + const short NW = N_SIMDWIDTH; + const short SH = (C + Q); // shared memory per simdgroup in (half) + + const short T = D + 2*nsg*SH; // shared memory size per query in (half) + + float slope = 1.0f; + + // ALiBi + if (max_bias > 0.0f) { + const uint32_t h = iq2; + + const float base = h < n_head_log2 ? m0 : m1; + const int exp = h < n_head_log2 ? h + 1 : 2*(h - n_head_log2) + 1; + + slope = pow(base, exp); + } + + //threadgroup half * sq = (threadgroup half *) (shared + 0*D); // holds the query data + threadgroup half4 * sq4 = (threadgroup half4 *) (shared + 0*D); // same as above but in half4 + threadgroup float * ss = (threadgroup float *) (shared + 2*sgitg*SH + 1*D); // scratch buffer for attention and diagonal matrix + threadgroup float4 * ss4 = (threadgroup float4 *) (shared + 2*sgitg*SH + 1*D); // same as above but in half4 + threadgroup half4 * sr4 = (threadgroup half4 *) (shared + sgitg*D + 1*T); // scratch buffer for the results + + // store the result for all queries in local memory in 8x8 matrices (the O matrix from the paper) + half4 lo[D4/NW]; + + // load heads from Q to shared memory + device const float4 * q4 = (device const float4 *) ((device const char *) q + (iq1*nb01 + iq2*nb02 + iq3*nb03)); + + for (short i = tiisg; i < D4; i += NW) { + if (iq1 < ne01) { + sq4[i] = (half4) q4[i]; + } else { + sq4[i] = 0.0h; + } + } + + // zero out lo + for (short i = tiisg; i < D4; i += NW) { + lo[i/NW] = 0.0h; + } + + // zero out shared memory SH + for (short i = tiisg; i < SH/4; i += NW) { + ss4[i] = 0.0h; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + { + float S = { 0.0h }; + float M = { -FLT_MAX/2 }; + + // assume K and V are same shape + const short ne22 = ne12; + const short ne23 = ne13; + + // broadcast + const short rk2 = ne02/ne12; + const short rk3 = ne03/ne13; + + const short rv2 = ne02/ne22; + const short rv3 = ne03/ne23; + + // k indices + const short ik2 = iq2 / rk2; + const short ik3 = iq3 / rk3; + + // v indices + const short iv2 = iq2 / rv2; + const short iv3 = iq3 / rv3; + + // load the queries from shared memory into local memory + float4 mq[D4]; + + for (short ii = 0; ii < D4; ii += NW) { + short i = ii + tiisg; + mq[i] = (float4) sq4[i]; + } + + // pointer to the mask + device const half4 * mp4 = (device const half4 *) (mask + iq1*nb31); + + // loop over the KV cache + // each simdgroup handles blocks of Q rows and C columns + for (int ic0 = 0; ic0 < ne11; ic0 += C*nsg) { + const int ic = ic0 + C*sgitg; + if (ic >= ne11) { + break; + } + + // Q*K^T + { +#pragma unroll + for (short cc = 0; cc < C/4; ++cc) { + float4 mqk = { 0.0h }; + + device const half4 * pk4 = (device const half4 *) ((device const char *) k + ((ic + 4*cc)*nb11 + ik2*nb12 + ik3*nb13)); + +#pragma unroll + for (short ii = 0; ii < D4; ii += NW) { + const short i = ii + tiisg; + + float4x4 mk; + mk[0] = (float4) pk4[i + 0*(nb11/8)]; + mk[1] = (float4) pk4[i + 1*(nb11/8)]; + mk[2] = (float4) pk4[i + 2*(nb11/8)]; + mk[3] = (float4) pk4[i + 3*(nb11/8)]; + + mqk += (float4) (mq[i] * mk); + } + + // reduce the results from the threads in the simdgroup + mqk += simd_shuffle_down(mqk, 16); + mqk += simd_shuffle_down(mqk, 8); + mqk += simd_shuffle_down(mqk, 4); + mqk += simd_shuffle_down(mqk, 2); + mqk += simd_shuffle_down(mqk, 1); + + // mqk = mqk*scale + mask*slope + if (tiisg == 0) { + mqk *= scale; + + if (logit_softcap != 0.0f) { + mqk = logit_softcap*precise::tanh(mqk); + } + + mqk += (mask != q) ? ((float4) mp4[ic/4 + cc])*slope : (float4) 0.0f; + + ss4[cc] = mqk; + } + } + } + + // online softmax + { + const short p = tiisg; + + const float m = M; + const float s = ss[p]; + + M = simd_max(max(M, s)); + + const float ms = exp(m - M); + const float vs = exp(s - M); + + S = S*ms + simd_sum(vs); + + // the P matrix from the paper (Q rows, C columns) + ss[p] = vs; + + // O = diag(ms)*O +#pragma unroll + for (short ii = 0; ii < D4; ii += NW) { + const short i = ii + tiisg; + lo[i/NW] *= ms; + } + } + + // O = O + (Q*K^T)*V + { +#pragma unroll + for (short cc = 0; cc < C/4; ++cc) { + device const half4 * pv4 = (device const half4 *) ((device const char *) v + ((ic + 4*cc)*nb21 + iv2*nb22 + iv3*nb23)); + +#pragma unroll + for (short ii = 0; ii < D4; ii += NW) { + const short i = ii + tiisg; + + lo[i/NW] += pv4[i + 0*(nb21/8)] * ss[4*cc + 0]; + lo[i/NW] += pv4[i + 1*(nb21/8)] * ss[4*cc + 1]; + lo[i/NW] += pv4[i + 2*(nb21/8)] * ss[4*cc + 2]; + lo[i/NW] += pv4[i + 3*(nb21/8)] * ss[4*cc + 3]; + } + } + } + + } + + // these are needed for reducing the results from the simdgroups (reuse the ss buffer) + if (tiisg == 0) { + ss[0] = S; + ss[1] = M; + } + } + + // store results to shared memory + for (short ii = 0; ii < D4; ii += NW) { + short i = ii + tiisg; + sr4[i] = lo[ii/NW]; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + // parallel reduce + for (short r = nsg/2; r > 0; r >>= 1) { + if (sgitg < r) { + const float S0 = ss[ 0]; + const float S1 = ss[r*SH + 0]; + + const float M0 = ss[ 1]; + const float M1 = ss[r*SH + 1]; + + const float M = max(M0, M1); + + const float ms0 = exp(M0 - M); + const float ms1 = exp(M1 - M); + + const float S = S0*ms0 + S1*ms1; + + if (tiisg == 0) { + ss[0] = S; + ss[1] = M; + } + + // O_0 = diag(ms0)*O_0 + diag(ms1)*O_1 + for (short ii = 0; ii < D4; ii += NW) { + short i = ii + tiisg; + sr4[i] = sr4[i]*ms0 + sr4[i + r*D4]*ms1; + } + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + } + + device float4 * dst4 = (device float4 *) dst; + + // final rescale with 1/S and store to global memory + if (sgitg == 0) { + const float S = ss[0]; + + for (short ii = 0; ii < D4; ii += NW) { + short i = ii + tiisg; + dst4[(iq3*ne2*ne1 + iq2 + (iq1)*ne1)*D4 + i] = (float4) sr4[i]/S; + } + } +} + +template [[host_name("kernel_flash_attn_ext_vec_f16_h128")]] kernel flash_attn_ext_f16_t kernel_flash_attn_ext_vec_f16<128>; +//template [[host_name("kernel_flash_attn_ext_vec_f16_h256")]] kernel flash_attn_ext_f16_t kernel_flash_attn_ext_vec_f16<256>; + +template +kernel void kernel_cpy( + device const void * src0, + device void * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t i03 = tgpig[2]; + const int64_t i02 = tgpig[1]; + const int64_t i01 = tgpig[0]; + + const int64_t n = i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; + + const int64_t i3 = n / (ne2*ne1*ne0); + const int64_t i2 = (n - i3*ne2*ne1*ne0) / (ne1*ne0); + const int64_t i1 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0) / ne0; + const int64_t i0 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0 - i1*ne0); + + device T1 * dst_data = (device T1 *) ((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + for (int64_t i00 = tpitg.x; i00 < ne00; i00 += ntg.x) { + device const T0 * src = (device T0 *)((device char *) src0 + i03*nb03 + i02*nb02 + i01*nb01 + i00*nb00); + dst_data[i00] = (T1) src[0]; + } +} + +typedef decltype(kernel_cpy) kernel_cpy_t; + +template [[host_name("kernel_cpy_f32_f32")]] kernel kernel_cpy_t kernel_cpy; +template [[host_name("kernel_cpy_f32_f16")]] kernel kernel_cpy_t kernel_cpy; +template [[host_name("kernel_cpy_f16_f16")]] kernel kernel_cpy_t kernel_cpy; +template [[host_name("kernel_cpy_f16_f32")]] kernel kernel_cpy_t kernel_cpy; + +kernel void kernel_cpy_f32_q8_0( + device const float * src0, + device void * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t i03 = tgpig[2]; + const int64_t i02 = tgpig[1]; + const int64_t i01 = tgpig[0]; + + const int64_t n = i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; + + const int64_t i3 = n / (ne2*ne1*ne0); + const int64_t i2 = (n - i3*ne2*ne1*ne0) / (ne1*ne0); + const int64_t i1 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0) / ne0; + const int64_t i0 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0 - i1*ne0)/QK8_0; + + device block_q8_0 * dst_data = (device block_q8_0 *) ((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + for (int64_t i00 = tpitg.x*QK8_0; i00 < ne00; i00 += ntg.x*QK8_0) { + device const float * src = (device float *)((device char *) src0 + i03*nb03 + i02*nb02 + i01*nb01 + i00*nb00); + + float amax = 0.0f; // absolute max + + for (int j = 0; j < QK8_0; j++) { + const float v = src[j]; + amax = MAX(amax, fabs(v)); + } + + const float d = amax / ((1 << 7) - 1); + const float id = d ? 1.0f/d : 0.0f; + + dst_data[i00/QK8_0].d = d; + + for (int j = 0; j < QK8_0; ++j) { + const float x0 = src[j]*id; + + dst_data[i00/QK8_0].qs[j] = round(x0); + } + } +} + +kernel void kernel_cpy_f32_q4_0( + device const float * src0, + device void * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t i03 = tgpig[2]; + const int64_t i02 = tgpig[1]; + const int64_t i01 = tgpig[0]; + + const int64_t n = i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; + + const int64_t i3 = n / (ne2*ne1*ne0); + const int64_t i2 = (n - i3*ne2*ne1*ne0) / (ne1*ne0); + const int64_t i1 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0) / ne0; + const int64_t i0 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0 - i1*ne0)/QK4_0; + + device block_q4_0 * dst_data = (device block_q4_0 *) ((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + for (int64_t i00 = tpitg.x*QK4_0; i00 < ne00; i00 += ntg.x*QK4_0) { + device const float * src = (device float *)((device char *) src0 + i03*nb03 + i02*nb02 + i01*nb01 + i00*nb00); + + float amax = 0.0f; // absolute max + float max = 0.0f; + + for (int j = 0; j < QK4_0; j++) { + const float v = src[j]; + if (amax < fabs(v)) { + amax = fabs(v); + max = v; + } + } + + const float d = max / -8; + const float id = d ? 1.0f/d : 0.0f; + + dst_data[i00/QK4_0].d = d; + + for (int j = 0; j < QK4_0/2; ++j) { + const float x0 = src[0 + j]*id; + const float x1 = src[QK4_0/2 + j]*id; + + const uint8_t xi0 = MIN(15, (int8_t)(x0 + 8.5f)); + const uint8_t xi1 = MIN(15, (int8_t)(x1 + 8.5f)); + + dst_data[i00/QK4_0].qs[j] = xi0; + dst_data[i00/QK4_0].qs[j] |= xi1 << 4; + } + } +} + +kernel void kernel_cpy_f32_q4_1( + device const float * src0, + device void * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t i03 = tgpig[2]; + const int64_t i02 = tgpig[1]; + const int64_t i01 = tgpig[0]; + + const int64_t n = i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; + + const int64_t i3 = n / (ne2*ne1*ne0); + const int64_t i2 = (n - i3*ne2*ne1*ne0) / (ne1*ne0); + const int64_t i1 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0) / ne0; + const int64_t i0 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0 - i1*ne0)/QK4_1; + + device block_q4_1 * dst_data = (device block_q4_1 *) ((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + for (int64_t i00 = tpitg.x*QK4_1; i00 < ne00; i00 += ntg.x*QK4_1) { + device const float * src = (device float *)((device char *) src0 + i03*nb03 + i02*nb02 + i01*nb01 + i00*nb00); + + float min = FLT_MAX; + float max = -FLT_MAX; + + for (int j = 0; j < QK4_1; j++) { + const float v = src[j]; + if (min > v) min = v; + if (max < v) max = v; + } + + const float d = (max - min) / ((1 << 4) - 1); + const float id = d ? 1.0f/d : 0.0f; + + dst_data[i00/QK4_1].d = d; + dst_data[i00/QK4_1].m = min; + + for (int j = 0; j < QK4_1/2; ++j) { + const float x0 = (src[0 + j] - min)*id; + const float x1 = (src[QK4_1/2 + j] - min)*id; + + const uint8_t xi0 = MIN(15, (int8_t)(x0 + 0.5f)); + const uint8_t xi1 = MIN(15, (int8_t)(x1 + 0.5f)); + + dst_data[i00/QK4_1].qs[j] = xi0; + dst_data[i00/QK4_1].qs[j] |= xi1 << 4; + } + } +} + +kernel void kernel_cpy_f32_q5_0( + device const float * src0, + device void * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t i03 = tgpig[2]; + const int64_t i02 = tgpig[1]; + const int64_t i01 = tgpig[0]; + + const int64_t n = i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; + + const int64_t i3 = n / (ne2*ne1*ne0); + const int64_t i2 = (n - i3*ne2*ne1*ne0) / (ne1*ne0); + const int64_t i1 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0) / ne0; + const int64_t i0 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0 - i1*ne0)/QK5_0; + + device block_q5_0 * dst_data = (device block_q5_0 *) ((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + for (int64_t i00 = tpitg.x*QK5_0; i00 < ne00; i00 += ntg.x*QK5_0) { + device const float * src = (device float *)((device char *) src0 + i03*nb03 + i02*nb02 + i01*nb01 + i00*nb00); + + float amax = 0.0f; // absolute max + float max = 0.0f; + + for (int j = 0; j < QK5_0; j++) { + const float v = src[j]; + if (amax < fabs(v)) { + amax = fabs(v); + max = v; + } + } + + const float d = max / -16; + const float id = d ? 1.0f/d : 0.0f; + + dst_data[i00/QK5_0].d = d; + + uint32_t qh = 0; + for (int j = 0; j < QK5_0/2; ++j) { + const float x0 = src[0 + j]*id; + const float x1 = src[QK5_0/2 + j]*id; + + const uint8_t xi0 = MIN(31, (int8_t)(x0 + 16.5f)); + const uint8_t xi1 = MIN(31, (int8_t)(x1 + 16.5f)); + + dst_data[i00/QK5_0].qs[j] = (xi0 & 0xf) | ((xi1 & 0xf) << 4); + qh |= ((xi0 & 0x10u) >> 4) << (j + 0); + qh |= ((xi1 & 0x10u) >> 4) << (j + QK5_0/2); + } + thread const uint8_t * qh8 = (thread const uint8_t *)&qh; + for (int j = 0; j < 4; ++j) { + dst_data[i00/QK5_0].qh[j] = qh8[j]; + } + } +} + +kernel void kernel_cpy_f32_q5_1( + device const float * src0, + device void * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t i03 = tgpig[2]; + const int64_t i02 = tgpig[1]; + const int64_t i01 = tgpig[0]; + + const int64_t n = i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; + + const int64_t i3 = n / (ne2*ne1*ne0); + const int64_t i2 = (n - i3*ne2*ne1*ne0) / (ne1*ne0); + const int64_t i1 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0) / ne0; + const int64_t i0 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0 - i1*ne0)/QK5_1; + + device block_q5_1 * dst_data = (device block_q5_1 *) ((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + for (int64_t i00 = tpitg.x*QK5_1; i00 < ne00; i00 += ntg.x*QK5_1) { + device const float * src = (device float *)((device char *) src0 + i03*nb03 + i02*nb02 + i01*nb01 + i00*nb00); + + float max = src[0]; + float min = src[0]; + + for (int j = 1; j < QK5_1; j++) { + const float v = src[j]; + min = v < min ? v : min; + max = v > max ? v : max; + } + + const float d = (max - min) / 31; + const float id = d ? 1.0f/d : 0.0f; + + dst_data[i00/QK5_1].d = d; + dst_data[i00/QK5_1].m = min; + + uint32_t qh = 0; + for (int j = 0; j < QK5_1/2; ++j) { + const float x0 = (src[0 + j] - min)*id; + const float x1 = (src[QK5_1/2 + j] - min)*id; + + const uint8_t xi0 = (uint8_t)(x0 + 0.5f); + const uint8_t xi1 = (uint8_t)(x1 + 0.5f); + + dst_data[i00/QK5_1].qs[j] = (xi0 & 0xf) | ((xi1 & 0xf) << 4); + qh |= ((xi0 & 0x10u) >> 4) << (j + 0); + qh |= ((xi1 & 0x10u) >> 4) << (j + QK5_1/2); + } + thread const uint8_t * qh8 = (thread const uint8_t *)&qh; + for (int j = 0; j < 4; ++j) { + dst_data[i00/QK5_1].qh[j] = qh8[j]; + } + } +} + +static inline int best_index_int8(int n, constant float * val, float x) { + if (x <= val[0]) return 0; + if (x >= val[n-1]) return n-1; + int ml = 0, mu = n-1; + while (mu-ml > 1) { + int mav = (ml+mu)/2; + if (x < val[mav]) mu = mav; else ml = mav; + } + return x - val[mu-1] < val[mu] - x ? mu-1 : mu; +} + +constexpr constant static float kvalues_iq4nl_f[16] = { + -127.f, -104.f, -83.f, -65.f, -49.f, -35.f, -22.f, -10.f, 1.f, 13.f, 25.f, 38.f, 53.f, 69.f, 89.f, 113.f +}; + +kernel void kernel_cpy_f32_iq4_nl( + device const float * src0, + device void * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t i03 = tgpig[2]; + const int64_t i02 = tgpig[1]; + const int64_t i01 = tgpig[0]; + + const int64_t n = i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; + + const int64_t i3 = n / (ne2*ne1*ne0); + const int64_t i2 = (n - i3*ne2*ne1*ne0) / (ne1*ne0); + const int64_t i1 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0) / ne0; + const int64_t i0 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0 - i1*ne0)/QK4_NL; + + device block_iq4_nl * dst_data = (device block_iq4_nl *) ((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + for (int64_t i00 = tpitg.x*QK4_NL; i00 < ne00; i00 += ntg.x*QK4_NL) { + device const float * src = (device float *)((device char *) src0 + i03*nb03 + i02*nb02 + i01*nb01 + i00*nb00); + + float amax = 0.0f; // absolute max + float max = 0.0f; + + for (int j = 0; j < QK4_0; j++) { + const float v = src[j]; + if (amax < fabs(v)) { + amax = fabs(v); + max = v; + } + } + + const float d = max / kvalues_iq4nl_f[0]; + const float id = d ? 1.0f/d : 0.0f; + + float sumqx = 0, sumq2 = 0; + for (int j = 0; j < QK4_NL/2; ++j) { + const float x0 = src[0 + j]*id; + const float x1 = src[QK4_NL/2 + j]*id; + + const uint8_t xi0 = best_index_int8(16, kvalues_iq4nl_f, x0); + const uint8_t xi1 = best_index_int8(16, kvalues_iq4nl_f, x1); + + dst_data[i00/QK4_NL].qs[j] = xi0 | (xi1 << 4); + + const float v0 = kvalues_iq4nl_f[xi0]; + const float v1 = kvalues_iq4nl_f[xi1]; + const float w0 = src[0 + j]*src[0 + j]; + const float w1 = src[QK4_NL/2 + j]*src[QK4_NL/2 + j]; + sumqx += w0*v0*src[j] + w1*v1*src[QK4_NL/2 + j]; + sumq2 += w0*v0*v0 + w1*v1*v1; + + } + + dst_data[i00/QK4_NL].d = sumq2 > 0 ? sumqx/sumq2 : d; + + } +} + +kernel void kernel_concat( + device const char * src0, + device const char * src1, + device char * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + constant int32_t & dim, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + + const int64_t i3 = tgpig.z; + const int64_t i2 = tgpig.y; + const int64_t i1 = tgpig.x; + + int64_t o[4] = {0, 0, 0, 0}; + o[dim] = dim == 0 ? ne00 : (dim == 1 ? ne01 : (dim == 2 ? ne02 : ne03)); + + device const float * x; + + for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { + if (i0 < ne00 && i1 < ne01 && i2 < ne02 && i3 < ne03) { + x = (device const float *)(src0 + (i3 )*nb03 + (i2 )*nb02 + (i1 )*nb01 + (i0 )*nb00); + } else { + x = (device const float *)(src1 + (i3 - o[3])*nb13 + (i2 - o[2])*nb12 + (i1 - o[1])*nb11 + (i0 - o[0])*nb10); + } + + device float * y = (device float *)(dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + *y = *x; + } +} + +void kernel_mul_mv_q2_K_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_values, + uint3 tgpig, + uint tiisg, + uint sgitg) { + + const int nb = ne00/QK_K; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_q2_K * x = (device const block_q2_K *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[32]; + float sumf[N_DST]={0.f}, all_sum; + + const int step = sizeof(block_q2_K) * nb; + + const int ix = tiisg/8; // 0...3 + const int it = tiisg%8; // 0...7 + const int iq = it/4; // 0 or 1 + const int ir = it%4; // 0...3 + const int is = (8*ir)/16;// 0 or 1 + + device const float * y4 = y + ix * QK_K + 128 * iq + 8 * ir; + + for (int ib = ix; ib < nb; ib += 4) { + + float4 sumy = {0.f, 0.f, 0.f, 0.f}; + for (int i = 0; i < 8; ++i) { + yl[i+ 0] = y4[i+ 0]; sumy[0] += yl[i+ 0]; + yl[i+ 8] = y4[i+32]; sumy[1] += yl[i+ 8]; + yl[i+16] = y4[i+64]; sumy[2] += yl[i+16]; + yl[i+24] = y4[i+96]; sumy[3] += yl[i+24]; + } + + device const uint8_t * sc = (device const uint8_t *)x[ib].scales + 8*iq + is; + device const uint16_t * qs = (device const uint16_t *)x[ib].qs + 16 * iq + 4 * ir; + device const half * dh = &x[ib].d; + + for (int row = 0; row < N_DST; row++) { + + float4 acc1 = {0.f, 0.f, 0.f, 0.f}; + float4 acc2 = {0.f, 0.f, 0.f, 0.f}; + for (int i = 0; i < 8; i += 2) { + acc1[0] += yl[i+ 0] * (qs[i/2] & 0x0003); + acc2[0] += yl[i+ 1] * (qs[i/2] & 0x0300); + acc1[1] += yl[i+ 8] * (qs[i/2] & 0x000c); + acc2[1] += yl[i+ 9] * (qs[i/2] & 0x0c00); + acc1[2] += yl[i+16] * (qs[i/2] & 0x0030); + acc2[2] += yl[i+17] * (qs[i/2] & 0x3000); + acc1[3] += yl[i+24] * (qs[i/2] & 0x00c0); + acc2[3] += yl[i+25] * (qs[i/2] & 0xc000); + } + float dall = dh[0]; + float dmin = dh[1] * 1.f/16.f; + sumf[row] += dall * ((acc1[0] + 1.f/256.f * acc2[0]) * (sc[0] & 0xF) * 1.f/ 1.f + + (acc1[1] + 1.f/256.f * acc2[1]) * (sc[2] & 0xF) * 1.f/ 4.f + + (acc1[2] + 1.f/256.f * acc2[2]) * (sc[4] & 0xF) * 1.f/16.f + + (acc1[3] + 1.f/256.f * acc2[3]) * (sc[6] & 0xF) * 1.f/64.f) - + dmin * (sumy[0] * (sc[0] & 0xF0) + sumy[1] * (sc[2] & 0xF0) + sumy[2] * (sc[4] & 0xF0) + sumy[3] * (sc[6] & 0xF0)); + + qs += step/2; + sc += step; + dh += step/2; + } + + y4 += 4 * QK_K; + } + + for (int row = 0; row < N_DST; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum; + } + } +} + +[[host_name("kernel_mul_mv_q2_K_f32")]] +kernel void kernel_mul_mv_q2_K_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_q2_K_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, nullptr, tgpig, tiisg, sgitg); +} + +void kernel_mul_mv_q3_K_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_values, + uint3 tgpig, + uint tiisg, + uint sgitg) { + + const int nb = ne00/QK_K; + + const int64_t r0 = tgpig.x; + const int64_t r1 = tgpig.y; + const int64_t im = tgpig.z; + + const int first_row = (r0 * N_SIMDGROUP + sgitg) * 2; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_q3_K * x = (device const block_q3_K *) src0 + first_row*nb + offset0; + device const float * yy = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[32]; + + //const uint16_t kmask1 = 0x3030; + //const uint16_t kmask2 = 0x0f0f; + + const int tid = tiisg/4; + const int ix = tiisg%4; + const int ip = tid/4; // 0 or 1 + const int il = 2*((tid%4)/2); // 0 or 2 + const int ir = tid%2; + const int n = 8; + const int l0 = n*ir; + + // One would think that the Metal compiler would figure out that ip and il can only have + // 4 possible states, and optimize accordingly. Well, no. It needs help, and we do it + // with these two tales. + // + // Possible masks for the high bit + const ushort4 mm[4] = {{0x0001, 0x0100, 0x0002, 0x0200}, // ip = 0, il = 0 + {0x0004, 0x0400, 0x0008, 0x0800}, // ip = 0, il = 2 + {0x0010, 0x1000, 0x0020, 0x2000}, // ip = 1, il = 0 + {0x0040, 0x4000, 0x0080, 0x8000}}; // ip = 1, il = 2 + + // Possible masks for the low 2 bits + const int4 qm[2] = {{0x0003, 0x0300, 0x000c, 0x0c00}, {0x0030, 0x3000, 0x00c0, 0xc000}}; + + const ushort4 hm = mm[2*ip + il/2]; + + const int shift = 2*il; + const float v1 = il == 0 ? 4.f : 64.f; + const float v2 = 4.f * v1; + + const uint16_t s_shift1 = 4*ip; + const uint16_t s_shift2 = s_shift1 + il; + + const int q_offset = 32*ip + l0; + const int y_offset = 128*ip + 32*il + l0; + + const int step = sizeof(block_q3_K) * nb / 2; + + device const float * y1 = yy + ix*QK_K + y_offset; + + uint32_t scales32, aux32; + thread uint16_t * scales16 = (thread uint16_t *)&scales32; + thread const int8_t * scales = (thread const int8_t *)&scales32; + + float sumf1[2] = {0.f}; + float sumf2[2] = {0.f}; + for (int i = ix; i < nb; i += 4) { + + for (int l = 0; l < 8; ++l) { + yl[l+ 0] = y1[l+ 0]; + yl[l+ 8] = y1[l+16]; + yl[l+16] = y1[l+32]; + yl[l+24] = y1[l+48]; + } + + device const uint16_t * q = (device const uint16_t *)(x[i].qs + q_offset); + device const uint16_t * h = (device const uint16_t *)(x[i].hmask + l0); + device const uint16_t * a = (device const uint16_t *)(x[i].scales); + device const half * dh = &x[i].d; + + for (int row = 0; row < 2; ++row) { + + const float d_all = (float)dh[0]; + + scales16[0] = a[4]; + scales16[1] = a[5]; + aux32 = ((scales32 >> s_shift2) << 4) & 0x30303030; + scales16[0] = a[il+0]; + scales16[1] = a[il+1]; + scales32 = ((scales32 >> s_shift1) & 0x0f0f0f0f) | aux32; + + float s1 = 0, s2 = 0, s3 = 0, s4 = 0, s5 = 0, s6 = 0; + for (int l = 0; l < n; l += 2) { + const int32_t qs = q[l/2]; + s1 += yl[l+0] * (qs & qm[il/2][0]); + s2 += yl[l+1] * (qs & qm[il/2][1]); + s3 += ((h[l/2] & hm[0]) ? 0.f : yl[l+0]) + ((h[l/2] & hm[1]) ? 0.f : yl[l+1]); + s4 += yl[l+16] * (qs & qm[il/2][2]); + s5 += yl[l+17] * (qs & qm[il/2][3]); + s6 += ((h[l/2] & hm[2]) ? 0.f : yl[l+16]) + ((h[l/2] & hm[3]) ? 0.f : yl[l+17]); + } + float d1 = d_all * (s1 + 1.f/256.f * s2 - s3*v1); + float d2 = d_all * (s4 + 1.f/256.f * s5 - s6*v2); + sumf1[row] += d1 * (scales[0] - 32); + sumf2[row] += d2 * (scales[2] - 32); + + s1 = s2 = s3 = s4 = s5 = s6 = 0; + for (int l = 0; l < n; l += 2) { + const int32_t qs = q[l/2+8]; + s1 += yl[l+8] * (qs & qm[il/2][0]); + s2 += yl[l+9] * (qs & qm[il/2][1]); + s3 += ((h[l/2+8] & hm[0]) ? 0.f : yl[l+8]) + ((h[l/2+8] & hm[1]) ? 0.f : yl[l+9]); + s4 += yl[l+24] * (qs & qm[il/2][2]); + s5 += yl[l+25] * (qs & qm[il/2][3]); + s6 += ((h[l/2+8] & hm[2]) ? 0.f : yl[l+24]) + ((h[l/2+8] & hm[3]) ? 0.f : yl[l+25]); + } + d1 = d_all * (s1 + 1.f/256.f * s2 - s3*v1); + d2 = d_all * (s4 + 1.f/256.f * s5 - s6*v2); + sumf1[row] += d1 * (scales[1] - 32); + sumf2[row] += d2 * (scales[3] - 32); + + q += step; + h += step; + a += step; + dh += step; + + } + + y1 += 4 * QK_K; + + } + + for (int row = 0; row < 2; ++row) { + const float sumf = (sumf1[row] + 0.25f * sumf2[row]) / (1 << shift); + sumf1[row] = simd_sum(sumf); + } + if (tiisg == 0) { + for (int row = 0; row < 2; ++row) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = sumf1[row]; + } + } +} + +[[host_name("kernel_mul_mv_q3_K_f32")]] +kernel void kernel_mul_mv_q3_K_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_q3_K_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, nullptr, tgpig, tiisg, sgitg); +} + +void kernel_mul_mv_q4_K_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_values, + uint3 tgpig, + uint tiisg, + uint sgitg) { + + const uint16_t kmask1 = 0x3f3f; + const uint16_t kmask2 = 0x0f0f; + const uint16_t kmask3 = 0xc0c0; + + const int ix = tiisg/8; // 0...3 + const int it = tiisg%8; // 0...7 + const int iq = it/4; // 0 or 1 + const int ir = it%4; // 0...3 + + const int nb = ne00/QK_K; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + //const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; + const int first_row = r0 * N_DST; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_q4_K * x = (device const block_q4_K *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[16]; + float yh[16]; + float sumf[N_DST]={0.f}, all_sum; + + const int step = sizeof(block_q4_K) * nb / 2; + + device const float * y4 = y + ix * QK_K + 64 * iq + 8 * ir; + + uint16_t sc16[4]; + thread const uint8_t * sc8 = (thread const uint8_t *)sc16; + + for (int ib = ix; ib < nb; ib += 4) { + + float4 sumy = {0.f, 0.f, 0.f, 0.f}; + for (int i = 0; i < 8; ++i) { + yl[i+0] = y4[i+ 0]; sumy[0] += yl[i+0]; + yl[i+8] = y4[i+ 32]; sumy[1] += yl[i+8]; + yh[i+0] = y4[i+128]; sumy[2] += yh[i+0]; + yh[i+8] = y4[i+160]; sumy[3] += yh[i+8]; + } + + device const uint16_t * sc = (device const uint16_t *)x[ib].scales + iq; + device const uint16_t * q1 = (device const uint16_t *)x[ib].qs + 16 * iq + 4 * ir; + device const half * dh = &x[ib].d; + + for (int row = 0; row < N_DST; row++) { + + sc16[0] = sc[0] & kmask1; + sc16[1] = sc[2] & kmask1; + sc16[2] = ((sc[4] >> 0) & kmask2) | ((sc[0] & kmask3) >> 2); + sc16[3] = ((sc[4] >> 4) & kmask2) | ((sc[2] & kmask3) >> 2); + + device const uint16_t * q2 = q1 + 32; + + float4 acc1 = {0.f, 0.f, 0.f, 0.f}; + float4 acc2 = {0.f, 0.f, 0.f, 0.f}; + for (int i = 0; i < 8; i += 2) { + acc1[0] += yl[i+0] * (q1[i/2] & 0x000F); + acc1[1] += yl[i+1] * (q1[i/2] & 0x0F00); + acc1[2] += yl[i+8] * (q1[i/2] & 0x00F0); + acc1[3] += yl[i+9] * (q1[i/2] & 0xF000); + acc2[0] += yh[i+0] * (q2[i/2] & 0x000F); + acc2[1] += yh[i+1] * (q2[i/2] & 0x0F00); + acc2[2] += yh[i+8] * (q2[i/2] & 0x00F0); + acc2[3] += yh[i+9] * (q2[i/2] & 0xF000); + } + + float dall = dh[0]; + float dmin = dh[1]; + sumf[row] += dall * ((acc1[0] + 1.f/256.f * acc1[1]) * sc8[0] + + (acc1[2] + 1.f/256.f * acc1[3]) * sc8[1] * 1.f/16.f + + (acc2[0] + 1.f/256.f * acc2[1]) * sc8[4] + + (acc2[2] + 1.f/256.f * acc2[3]) * sc8[5] * 1.f/16.f) - + dmin * (sumy[0] * sc8[2] + sumy[1] * sc8[3] + sumy[2] * sc8[6] + sumy[3] * sc8[7]); + + q1 += step; + sc += step; + dh += step; + } + + y4 += 4 * QK_K; + } + + for (int row = 0; row < N_DST; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum; + } + } +} + +[[host_name("kernel_mul_mv_q4_K_f32")]] +kernel void kernel_mul_mv_q4_K_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_q4_K_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, nullptr, tgpig, tiisg, sgitg); +} + +void kernel_mul_mv_q5_K_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_values, + uint3 tgpig, + uint tiisg, + uint sgitg) { + + const int nb = ne00/QK_K; + + const int64_t r0 = tgpig.x; + const int64_t r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * N_SIMDGROUP + sgitg) * 2; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_q5_K * x = (device const block_q5_K *) src0 + first_row*nb + offset0; + device const float * yy = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float sumf[2]={0.f}; + + const int step = sizeof(block_q5_K) * nb; + + float yl[16], yh[16]; + + const uint16_t kmask1 = 0x3f3f; + const uint16_t kmask2 = 0x0f0f; + const uint16_t kmask3 = 0xc0c0; + + const int tid = tiisg/4; + const int ix = tiisg%4; + const int iq = tid/4; + const int ir = tid%4; + const int n = 8; + + const int l0 = n*ir; + const int q_offset = 32*iq + l0; + const int y_offset = 64*iq + l0; + + const uint8_t hm1 = 1u << (2*iq); + const uint8_t hm2 = hm1 << 1; + const uint8_t hm3 = hm1 << 4; + const uint8_t hm4 = hm2 << 4; + + uint16_t sc16[4]; + thread const uint8_t * sc8 = (thread const uint8_t *)sc16; + + device const float * y1 = yy + ix*QK_K + y_offset; + + for (int i = ix; i < nb; i += 4) { + + device const uint8_t * q1 = x[i].qs + q_offset; + device const uint8_t * qh = x[i].qh + l0; + device const half * dh = &x[i].d; + device const uint16_t * a = (device const uint16_t *)x[i].scales + iq; + + device const float * y2 = y1 + 128; + float4 sumy = {0.f, 0.f, 0.f, 0.f}; + for (int l = 0; l < 8; ++l) { + yl[l+0] = y1[l+ 0]; sumy[0] += yl[l+0]; + yl[l+8] = y1[l+32]; sumy[1] += yl[l+8]; + yh[l+0] = y2[l+ 0]; sumy[2] += yh[l+0]; + yh[l+8] = y2[l+32]; sumy[3] += yh[l+8]; + } + + for (int row = 0; row < 2; ++row) { + + device const uint8_t * q2 = q1 + 64; + + sc16[0] = a[0] & kmask1; + sc16[1] = a[2] & kmask1; + sc16[2] = ((a[4] >> 0) & kmask2) | ((a[0] & kmask3) >> 2); + sc16[3] = ((a[4] >> 4) & kmask2) | ((a[2] & kmask3) >> 2); + + float4 acc1 = {0.f}; + float4 acc2 = {0.f}; + for (int l = 0; l < n; ++l) { + uint8_t h = qh[l]; + acc1[0] += yl[l+0] * (q1[l] & 0x0F); + acc1[1] += yl[l+8] * (q1[l] & 0xF0); + acc1[2] += yh[l+0] * (q2[l] & 0x0F); + acc1[3] += yh[l+8] * (q2[l] & 0xF0); + acc2[0] += h & hm1 ? yl[l+0] : 0.f; + acc2[1] += h & hm2 ? yl[l+8] : 0.f; + acc2[2] += h & hm3 ? yh[l+0] : 0.f; + acc2[3] += h & hm4 ? yh[l+8] : 0.f; + } + const float dall = dh[0]; + const float dmin = dh[1]; + sumf[row] += dall * (sc8[0] * (acc1[0] + 16.f*acc2[0]) + + sc8[1] * (acc1[1]/16.f + 16.f*acc2[1]) + + sc8[4] * (acc1[2] + 16.f*acc2[2]) + + sc8[5] * (acc1[3]/16.f + 16.f*acc2[3])) - + dmin * (sumy[0] * sc8[2] + sumy[1] * sc8[3] + sumy[2] * sc8[6] + sumy[3] * sc8[7]); + + q1 += step; + qh += step; + dh += step/2; + a += step/2; + + } + + y1 += 4 * QK_K; + + } + + for (int row = 0; row < 2; ++row) { + const float tot = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = tot; + } + } +} + +[[host_name("kernel_mul_mv_q5_K_f32")]] +kernel void kernel_mul_mv_q5_K_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_q5_K_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, nullptr, tgpig, tiisg, sgitg); +} + +void kernel_mul_mv_q6_K_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_values, + uint3 tgpig, + uint tiisg, + uint sgitg) { + + const uint8_t kmask1 = 0x03; + const uint8_t kmask2 = 0x0C; + const uint8_t kmask3 = 0x30; + const uint8_t kmask4 = 0xC0; + + const int nb = ne00/QK_K; + + const int64_t r0 = tgpig.x; + const int64_t r1 = tgpig.y; + const int im = tgpig.z; + + const int row = 2 * r0 + sgitg; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_q6_K * x = (device const block_q6_K *) src0 + row * nb + offset0; + device const float * yy = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float sumf = 0; + + const int tid = tiisg/2; + const int ix = tiisg%2; + const int ip = tid/8; // 0 or 1 + const int il = tid%8; + const int n = 4; + const int l0 = n*il; + const int is = 8*ip + l0/16; + + const int y_offset = 128*ip + l0; + const int q_offset_l = 64*ip + l0; + const int q_offset_h = 32*ip + l0; + + for (int i = ix; i < nb; i += 2) { + + device const uint8_t * q1 = x[i].ql + q_offset_l; + device const uint8_t * q2 = q1 + 32; + device const uint8_t * qh = x[i].qh + q_offset_h; + device const int8_t * sc = x[i].scales + is; + + device const float * y = yy + i * QK_K + y_offset; + + const float dall = x[i].d; + + float4 sums = {0.f, 0.f, 0.f, 0.f}; + for (int l = 0; l < n; ++l) { + sums[0] += y[l+ 0] * ((int8_t)((q1[l] & 0xF) | ((qh[l] & kmask1) << 4)) - 32); + sums[1] += y[l+32] * ((int8_t)((q2[l] & 0xF) | ((qh[l] & kmask2) << 2)) - 32); + sums[2] += y[l+64] * ((int8_t)((q1[l] >> 4) | ((qh[l] & kmask3) << 0)) - 32); + sums[3] += y[l+96] * ((int8_t)((q2[l] >> 4) | ((qh[l] & kmask4) >> 2)) - 32); + } + + sumf += dall * (sums[0] * sc[0] + sums[1] * sc[2] + sums[2] * sc[4] + sums[3] * sc[6]); + + } + + const float tot = simd_sum(sumf); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + row] = tot; + } +} + +[[host_name("kernel_mul_mv_q6_K_f32")]] +kernel void kernel_mul_mv_q6_K_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_q6_K_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, nullptr, tgpig, tiisg, sgitg); +} + +// ======================= "True" 2-bit + +void kernel_mul_mv_iq2_xxs_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_values, + uint3 tgpig, + uint tiisg, + uint sgitg) { + + const int nb = ne00/QK_K; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_iq2_xxs * x = (device const block_iq2_xxs *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[32]; + float sumf[N_DST]={0.f}, all_sum; + + const int nb32 = nb * (QK_K / 32); + + threadgroup uint64_t * values = (threadgroup uint64_t *)shared_values; + threadgroup uint8_t * shared_signs = (threadgroup uint8_t *)(values + 256); + { + int nval = 4; + int pos = (32*sgitg + tiisg)*nval; + for (int i = 0; i < nval; ++i) values[pos + i] = iq2xxs_grid[pos + i]; + nval = 2; + pos = (32*sgitg + tiisg)*nval; + for (int i = 0; i < nval; ++i) shared_signs[pos+i] = ksigns_iq2xs[pos+i]; + threadgroup_barrier(mem_flags::mem_threadgroup); + } + + const int ix = tiisg; + + device const float * y4 = y + 32 * ix; + + for (int ib32 = ix; ib32 < nb32; ib32 += 32) { + + for (int i = 0; i < 32; ++i) { + yl[i] = y4[i]; + } + + const int ibl = ib32 / (QK_K / 32); + const int ib = ib32 % (QK_K / 32); + + device const block_iq2_xxs * xr = x + ibl; + device const uint16_t * q2 = xr->qs + 4 * ib; + device const half * dh = &xr->d; + + for (int row = 0; row < N_DST; row++) { + + const float db = dh[0]; + device const uint8_t * aux8 = (device const uint8_t *)q2; + const uint32_t aux32 = q2[2] | (q2[3] << 16); + const float d = db * (0.5f + (aux32 >> 28)); + + float sum = 0; + for (int l = 0; l < 4; ++l) { + const threadgroup uint8_t * grid = (const threadgroup uint8_t *)(values + aux8[l]); + const uint8_t signs = shared_signs[(aux32 >> 7*l) & 127]; + for (int j = 0; j < 8; ++j) { + sum += yl[8*l + j] * grid[j] * (signs & kmask_iq2xs[j] ? -1.f : 1.f); + } + } + sumf[row] += d * sum; + + dh += nb*sizeof(block_iq2_xxs)/2; + q2 += nb*sizeof(block_iq2_xxs)/2; + } + + y4 += 32 * 32; + } + + for (int row = 0; row < N_DST; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum * 0.25f; + } + } +} + +[[host_name("kernel_mul_mv_iq2_xxs_f32")]] +kernel void kernel_mul_mv_iq2_xxs_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_iq2_xxs_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); +} + +void kernel_mul_mv_iq2_xs_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_values, + uint3 tgpig, + uint tiisg, + uint sgitg) { + + const int nb = ne00/QK_K; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_iq2_xs * x = (device const block_iq2_xs *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[32]; + float sumf[N_DST]={0.f}, all_sum; + + const int nb32 = nb * (QK_K / 32); + + threadgroup uint64_t * values = (threadgroup uint64_t *)shared_values; + threadgroup uint8_t * shared_signs = (threadgroup uint8_t *)(values + 512); + { + int nval = 8; + int pos = (32*sgitg + tiisg)*nval; + for (int i = 0; i < nval; ++i) values[pos + i] = iq2xs_grid[pos + i]; + nval = 2; + pos = (32*sgitg + tiisg)*nval; + for (int i = 0; i < nval; ++i) shared_signs[pos+i] = ksigns_iq2xs[pos+i]; + threadgroup_barrier(mem_flags::mem_threadgroup); + } + + const int ix = tiisg; + + device const float * y4 = y + 32 * ix; + + for (int ib32 = ix; ib32 < nb32; ib32 += 32) { + + for (int i = 0; i < 32; ++i) { + yl[i] = y4[i]; + } + + const int ibl = ib32 / (QK_K / 32); + const int ib = ib32 % (QK_K / 32); + + device const block_iq2_xs * xr = x + ibl; + device const uint16_t * q2 = xr->qs + 4 * ib; + device const uint8_t * sc = xr->scales + ib; + device const half * dh = &xr->d; + + for (int row = 0; row < N_DST; row++) { + + const float db = dh[0]; + const uint8_t ls1 = sc[0] & 0xf; + const uint8_t ls2 = sc[0] >> 4; + const float d1 = db * (0.5f + ls1); + const float d2 = db * (0.5f + ls2); + + float sum1 = 0, sum2 = 0; + for (int l = 0; l < 2; ++l) { + const threadgroup uint8_t * grid = (const threadgroup uint8_t *)(values + (q2[l] & 511)); + const uint8_t signs = shared_signs[(q2[l] >> 9)]; + for (int j = 0; j < 8; ++j) { + sum1 += yl[8*l + j] * grid[j] * (signs & kmask_iq2xs[j] ? -1.f : 1.f); + } + } + for (int l = 2; l < 4; ++l) { + const threadgroup uint8_t * grid = (const threadgroup uint8_t *)(values + (q2[l] & 511)); + const uint8_t signs = shared_signs[(q2[l] >> 9)]; + for (int j = 0; j < 8; ++j) { + sum2 += yl[8*l + j] * grid[j] * (signs & kmask_iq2xs[j] ? -1.f : 1.f); + } + } + sumf[row] += d1 * sum1 + d2 * sum2; + + dh += nb*sizeof(block_iq2_xs)/2; + q2 += nb*sizeof(block_iq2_xs)/2; + sc += nb*sizeof(block_iq2_xs); + } + + y4 += 32 * 32; + } + + for (int row = 0; row < N_DST; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum * 0.25f; + } + } +} + +[[host_name("kernel_mul_mv_iq2_xs_f32")]] +kernel void kernel_mul_mv_iq2_xs_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_iq2_xs_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); +} + +void kernel_mul_mv_iq3_xxs_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_values, + uint3 tgpig, + uint tiisg, + uint sgitg) { + + const int nb = ne00/QK_K; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_iq3_xxs * x = (device const block_iq3_xxs *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[32]; + float sumf[N_DST]={0.f}, all_sum; + + const int nb32 = nb * (QK_K / 32); + + threadgroup uint32_t * values = (threadgroup uint32_t *)shared_values; + threadgroup uint8_t * shared_signs = (threadgroup uint8_t *)(values + 256); + { + int nval = 4; + int pos = (32*sgitg + tiisg)*nval; + for (int i = 0; i < nval; ++i) values[pos + i] = iq3xxs_grid[pos + i]; + nval = 2; + pos = (32*sgitg + tiisg)*nval; + for (int i = 0; i < nval; ++i) shared_signs[pos+i] = ksigns_iq2xs[pos+i]; + threadgroup_barrier(mem_flags::mem_threadgroup); + } + + const int ix = tiisg; + + device const float * y4 = y + 32 * ix; + + for (int ib32 = ix; ib32 < nb32; ib32 += 32) { + + for (int i = 0; i < 32; ++i) { + yl[i] = y4[i]; + } + + const int ibl = ib32 / (QK_K / 32); + const int ib = ib32 % (QK_K / 32); + + device const block_iq3_xxs * xr = x + ibl; + device const uint8_t * q3 = xr->qs + 8 * ib; + device const uint16_t * gas = (device const uint16_t *)(xr->qs + QK_K/4) + 2 * ib; + device const half * dh = &xr->d; + + for (int row = 0; row < N_DST; row++) { + + const float db = dh[0]; + const uint32_t aux32 = gas[0] | (gas[1] << 16); + const float d = db * (0.5f + (aux32 >> 28)); + + float2 sum = {0}; + for (int l = 0; l < 4; ++l) { + const threadgroup uint8_t * grid1 = (const threadgroup uint8_t *)(values + q3[2*l+0]); + const threadgroup uint8_t * grid2 = (const threadgroup uint8_t *)(values + q3[2*l+1]); + const uint8_t signs = shared_signs[(aux32 >> 7*l) & 127]; + for (int j = 0; j < 4; ++j) { + sum[0] += yl[8*l + j + 0] * grid1[j] * (signs & kmask_iq2xs[j+0] ? -1.f : 1.f); + sum[1] += yl[8*l + j + 4] * grid2[j] * (signs & kmask_iq2xs[j+4] ? -1.f : 1.f); + } + } + sumf[row] += d * (sum[0] + sum[1]); + + dh += nb*sizeof(block_iq3_xxs)/2; + q3 += nb*sizeof(block_iq3_xxs); + gas += nb*sizeof(block_iq3_xxs)/2; + } + + y4 += 32 * 32; + } + + for (int row = 0; row < N_DST; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum * 0.5f; + } + } +} + +[[host_name("kernel_mul_mv_iq3_xxs_f32")]] +kernel void kernel_mul_mv_iq3_xxs_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_iq3_xxs_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); +} + +void kernel_mul_mv_iq3_s_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_values, + uint3 tgpig, + uint tiisg, + uint sgitg) { + + const int nb = ne00/QK_K; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_iq3_s * x = (device const block_iq3_s *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[32]; + float sumf[N_DST]={0.f}, all_sum; + + const int nb32 = nb * (QK_K / 32); + + threadgroup uint32_t * values = (threadgroup uint32_t *)shared_values; + { + int nval = 8; + int pos = (32*sgitg + tiisg)*nval; + for (int i = 0; i < nval; ++i) values[pos + i] = iq3s_grid[pos + i]; + threadgroup_barrier(mem_flags::mem_threadgroup); + } + + const int ix = tiisg; + + device const float * y4 = y + 32 * ix; + + for (int ib32 = ix; ib32 < nb32; ib32 += 32) { + + for (int i = 0; i < 32; ++i) { + yl[i] = y4[i]; + } + + const int ibl = ib32 / (QK_K / 32); + const int ib = ib32 % (QK_K / 32); + + device const block_iq3_s * xr = x + ibl; + device const uint8_t * qs = xr->qs + 8 * ib; + device const uint8_t * qh = xr->qh + ib; + device const uint8_t * sc = xr->scales + (ib/2); + device const uint8_t * signs = xr->signs + 4 * ib; + device const half * dh = &xr->d; + + for (int row = 0; row < N_DST; row++) { + + const float db = dh[0]; + const float d = db * (1 + 2*((sc[0] >> 4*(ib%2)) & 0xf)); + + float2 sum = {0}; + for (int l = 0; l < 4; ++l) { + const threadgroup uint32_t * table1 = qh[0] & kmask_iq2xs[2*l+0] ? values + 256 : values; + const threadgroup uint32_t * table2 = qh[0] & kmask_iq2xs[2*l+1] ? values + 256 : values; + const threadgroup uint8_t * grid1 = (const threadgroup uint8_t *)(table1 + qs[2*l+0]); + const threadgroup uint8_t * grid2 = (const threadgroup uint8_t *)(table2 + qs[2*l+1]); + for (int j = 0; j < 4; ++j) { + sum[0] += yl[8*l + j + 0] * grid1[j] * select(1, -1, signs[l] & kmask_iq2xs[j+0]); + sum[1] += yl[8*l + j + 4] * grid2[j] * select(1, -1, signs[l] & kmask_iq2xs[j+4]); + } + } + sumf[row] += d * (sum[0] + sum[1]); + + dh += nb*sizeof(block_iq3_s)/2; + qs += nb*sizeof(block_iq3_s); + qh += nb*sizeof(block_iq3_s); + sc += nb*sizeof(block_iq3_s); + signs += nb*sizeof(block_iq3_s); + } + + y4 += 32 * 32; + } + + for (int row = 0; row < N_DST; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum; + } + } +} + +[[host_name("kernel_mul_mv_iq3_s_f32")]] +kernel void kernel_mul_mv_iq3_s_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_iq3_s_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); +} + +void kernel_mul_mv_iq2_s_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_values, + uint3 tgpig, + uint tiisg, + uint sgitg) { + + const int nb = ne00/QK_K; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_iq2_s * x = (device const block_iq2_s *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[32]; + float sumf[N_DST]={0.f}, all_sum; + + const int nb32 = nb * (QK_K / 32); + + //threadgroup uint64_t * values = (threadgroup uint64_t *)shared_values; + //{ + // int nval = 32; + // int pos = (32*sgitg + tiisg)*nval; + // for (int i = 0; i < nval; ++i) values[pos + i] = iq2s_grid[pos + i]; + // threadgroup_barrier(mem_flags::mem_threadgroup); + //} + + const int ix = tiisg; + + device const float * y4 = y + 32 * ix; + + for (int ib32 = ix; ib32 < nb32; ib32 += 32) { + + for (int i = 0; i < 32; ++i) { + yl[i] = y4[i]; + } + + const int ibl = ib32 / (QK_K / 32); + const int ib = ib32 % (QK_K / 32); + + device const block_iq2_s * xr = x + ibl; + device const uint8_t * qs = xr->qs + 4 * ib; + device const uint8_t * qh = xr->qh + ib; + device const uint8_t * sc = xr->scales + ib; + device const uint8_t * signs = qs + QK_K/8; + device const half * dh = &xr->d; + + for (int row = 0; row < N_DST; row++) { + + const float db = dh[0]; + const float d1 = db * (0.5f + (sc[0] & 0xf)); + const float d2 = db * (0.5f + (sc[0] >> 4)); + + float2 sum = {0}; + for (int l = 0; l < 2; ++l) { + //const threadgroup uint8_t * grid1 = (const threadgroup uint8_t *)(values + (qs[l+0] | ((qh[0] << (8-2*l)) & 0x300))); + //const threadgroup uint8_t * grid2 = (const threadgroup uint8_t *)(values + (qs[l+2] | ((qh[0] << (4-2*l)) & 0x300))); + constant uint8_t * grid1 = (constant uint8_t *)(iq2s_grid + (qs[l+0] | ((qh[0] << (8-2*l)) & 0x300))); + constant uint8_t * grid2 = (constant uint8_t *)(iq2s_grid + (qs[l+2] | ((qh[0] << (4-2*l)) & 0x300))); + for (int j = 0; j < 8; ++j) { + sum[0] += yl[8*l + j + 0] * grid1[j] * select(1, -1, signs[l+0] & kmask_iq2xs[j]); + sum[1] += yl[8*l + j + 16] * grid2[j] * select(1, -1, signs[l+2] & kmask_iq2xs[j]); + } + } + sumf[row] += d1 * sum[0] + d2 * sum[1]; + + dh += nb*sizeof(block_iq2_s)/2; + qs += nb*sizeof(block_iq2_s); + qh += nb*sizeof(block_iq2_s); + sc += nb*sizeof(block_iq2_s); + signs += nb*sizeof(block_iq2_s); + } + + y4 += 32 * 32; + } + + for (int row = 0; row < N_DST; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum * 0.25f; + } + } +} + +[[host_name("kernel_mul_mv_iq2_s_f32")]] +kernel void kernel_mul_mv_iq2_s_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_iq2_s_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); +} + +void kernel_mul_mv_iq1_s_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_value, + uint3 tgpig, + uint tiisg, + uint sgitg) { + + const int nb = ne00/QK_K; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + device const block_iq1_s * x = (device const block_iq1_s *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[32]; + float sumf[N_DST]={0.f}, all_sum; + + const int nb32 = nb * (QK_K / 32); + + const int ix = tiisg; + + device const float * y4 = y + 32 * ix; + + for (int ib32 = ix; ib32 < nb32; ib32 += 32) { + + float sumy = 0; + for (int i = 0; i < 32; ++i) { + yl[i] = y4[i]; + sumy += yl[i]; + } + + const int ibl = ib32 / (QK_K / 32); + const int ib = ib32 % (QK_K / 32); + + device const block_iq1_s * xr = x + ibl; + device const uint8_t * qs = xr->qs + 4 * ib; + device const uint16_t * qh = xr->qh + ib; + device const half * dh = &xr->d; + + for (int row = 0; row < N_DST; row++) { + + constant uint8_t * grid1 = (constant uint8_t *)(iq1s_grid_gpu + (qs[0] | ((qh[0] << 8) & 0x700))); + constant uint8_t * grid2 = (constant uint8_t *)(iq1s_grid_gpu + (qs[1] | ((qh[0] << 5) & 0x700))); + constant uint8_t * grid3 = (constant uint8_t *)(iq1s_grid_gpu + (qs[2] | ((qh[0] << 2) & 0x700))); + constant uint8_t * grid4 = (constant uint8_t *)(iq1s_grid_gpu + (qs[3] | ((qh[0] >> 1) & 0x700))); + + float sum = 0; + for (int j = 0; j < 4; ++j) { + sum += yl[j+ 0] * (grid1[j] & 0xf) + yl[j+ 4] * (grid1[j] >> 4) + + yl[j+ 8] * (grid2[j] & 0xf) + yl[j+12] * (grid2[j] >> 4) + + yl[j+16] * (grid3[j] & 0xf) + yl[j+20] * (grid3[j] >> 4) + + yl[j+24] * (grid4[j] & 0xf) + yl[j+28] * (grid4[j] >> 4); + } + sumf[row] += (float)dh[0] * (sum + sumy * (qh[0] & 0x8000 ? -1 - IQ1S_DELTA : -1 + IQ1S_DELTA)) * (2*((qh[0] >> 12) & 7) + 1); + + dh += nb*sizeof(block_iq1_s)/2; + qs += nb*sizeof(block_iq1_s); + qh += nb*sizeof(block_iq1_s)/2; + } + + y4 += 32 * 32; + } + + for (int row = 0; row < N_DST; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum; + } + } +} + +void kernel_mul_mv_iq1_m_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_value, + uint3 tgpig, + uint tiisg, + uint sgitg) { + + const int nb = ne00/QK_K; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + device const block_iq1_m * x = (device const block_iq1_m *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[32]; + float sumf[N_DST]={0.f}, all_sum; + + const int nb32 = nb * (QK_K / 32); + + const int ix = tiisg; + + device const float * y4 = y + 32 * ix; + + iq1m_scale_t scale; + + for (int ib32 = ix; ib32 < nb32; ib32 += 32) { + + float4 sumy = {0.f}; + for (int i = 0; i < 8; ++i) { + yl[i+ 0] = y4[i+ 0]; sumy[0] += yl[i+ 0]; + yl[i+ 8] = y4[i+ 8]; sumy[1] += yl[i+ 8]; + yl[i+16] = y4[i+16]; sumy[2] += yl[i+16]; + yl[i+24] = y4[i+24]; sumy[3] += yl[i+24]; + } + + const int ibl = ib32 / (QK_K / 32); + const int ib = ib32 % (QK_K / 32); + + device const block_iq1_m * xr = x + ibl; + device const uint8_t * qs = xr->qs + 4 * ib; + device const uint8_t * qh = xr->qh + 2 * ib; + device const uint16_t * sc = (device const uint16_t *)xr->scales; + + for (int row = 0; row < N_DST; row++) { + scale.u16 = (sc[0] >> 12) | ((sc[1] >> 8) & 0x00f0) | ((sc[2] >> 4) & 0x0f00) | (sc[3] & 0xf000); + + constant uint8_t * grid1 = (constant uint8_t *)(iq1s_grid_gpu + (qs[0] | ((qh[0] << 8) & 0x700))); + constant uint8_t * grid2 = (constant uint8_t *)(iq1s_grid_gpu + (qs[1] | ((qh[0] << 4) & 0x700))); + constant uint8_t * grid3 = (constant uint8_t *)(iq1s_grid_gpu + (qs[2] | ((qh[1] << 8) & 0x700))); + constant uint8_t * grid4 = (constant uint8_t *)(iq1s_grid_gpu + (qs[3] | ((qh[1] << 4) & 0x700))); + + float2 sum = {0.f}; + for (int j = 0; j < 4; ++j) { + sum[0] += yl[j+ 0] * (grid1[j] & 0xf) + yl[j+ 4] * (grid1[j] >> 4) + + yl[j+ 8] * (grid2[j] & 0xf) + yl[j+12] * (grid2[j] >> 4); + sum[1] += yl[j+16] * (grid3[j] & 0xf) + yl[j+20] * (grid3[j] >> 4) + + yl[j+24] * (grid4[j] & 0xf) + yl[j+28] * (grid4[j] >> 4); + } + const float delta1 = sumy[0] * (qh[0] & 0x08 ? -1 - IQ1M_DELTA : -1 + IQ1M_DELTA) + sumy[1] * (qh[0] & 0x80 ? -1 - IQ1M_DELTA : -1 + IQ1M_DELTA); + const float delta2 = sumy[2] * (qh[1] & 0x08 ? -1 - IQ1M_DELTA : -1 + IQ1M_DELTA) + sumy[3] * (qh[1] & 0x80 ? -1 - IQ1M_DELTA : -1 + IQ1M_DELTA); + + sumf[row] += (float)scale.f16 * ((sum[0] + delta1) * (2*((sc[ib/2] >> (6*(ib%2)+0)) & 7) + 1) + + (sum[1] + delta2) * (2*((sc[ib/2] >> (6*(ib%2)+3)) & 7) + 1)); + + sc += nb*sizeof(block_iq1_m)/2; + qs += nb*sizeof(block_iq1_m); + qh += nb*sizeof(block_iq1_m); + } + + y4 += 32 * 32; + } + + for (int row = 0; row < N_DST; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum; + } + } +} + +void kernel_mul_mv_iq4_nl_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_values_i8, + uint3 tgpig, + uint tiisg, + uint sgitg) { + + threadgroup float * shared_values = (threadgroup float *)shared_values_i8; + const int nb = ne00/QK4_NL; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + const int first_row = (r0 * 2 + sgitg) * 2; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + device const block_iq4_nl * x = (device const block_iq4_nl *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + const int ix = tiisg/2; // 0...15 + const int it = tiisg%2; // 0 or 1 + + shared_values[tiisg] = kvalues_iq4nl_f[tiisg%16]; + threadgroup_barrier(mem_flags::mem_threadgroup); + + float4 yl[4]; + float sumf[2]={0.f}, all_sum; + + device const float * yb = y + ix * QK4_NL + it * 8; + + uint32_t aux32[2]; + thread const uint8_t * q8 = (thread const uint8_t *)aux32; + + float4 qf1, qf2; + + for (int ib = ix; ib < nb; ib += 16) { + + device const float4 * y4 = (device const float4 *)yb; + yl[0] = y4[0]; yl[1] = y4[4]; yl[2] = y4[1]; yl[3] = y4[5]; + + for (int row = 0; row < 2 && first_row + row < ne01; ++row) { + + device const block_iq4_nl & xb = x[row*nb + ib]; + device const uint16_t * q4 = (device const uint16_t *)(xb.qs + 8*it); + + float4 acc1 = {0.f}, acc2 = {0.f}; + + aux32[0] = q4[0] | (q4[1] << 16); + aux32[1] = (aux32[0] >> 4) & 0x0f0f0f0f; + aux32[0] &= 0x0f0f0f0f; + qf1 = {shared_values[q8[0]], shared_values[q8[1]], shared_values[q8[2]], shared_values[q8[3]]}; + qf2 = {shared_values[q8[4]], shared_values[q8[5]], shared_values[q8[6]], shared_values[q8[7]]}; + acc1 += yl[0] * qf1; + acc2 += yl[1] * qf2; + + aux32[0] = q4[2] | (q4[3] << 16); + aux32[1] = (aux32[0] >> 4) & 0x0f0f0f0f; + aux32[0] &= 0x0f0f0f0f; + qf1 = {shared_values[q8[0]], shared_values[q8[1]], shared_values[q8[2]], shared_values[q8[3]]}; + qf2 = {shared_values[q8[4]], shared_values[q8[5]], shared_values[q8[6]], shared_values[q8[7]]}; + acc1 += yl[2] * qf1; + acc2 += yl[3] * qf2; + + acc1 += acc2; + + sumf[row] += (float)xb.d * (acc1[0] + acc1[1] + acc1[2] + acc1[3]); + + } + + yb += 16 * QK4_NL; + } + + for (int row = 0; row < 2 && first_row + row < ne01; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum; + } + } +} + +void kernel_mul_mv_iq4_xs_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_values_i8, + uint3 tgpig, + uint tiisg, + uint sgitg) { + + threadgroup float * shared_values = (threadgroup float *)shared_values_i8; + const int nb = ne00/QK_K; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + const int first_row = (r0 * 2 + sgitg) * 2; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + device const block_iq4_xs * x = (device const block_iq4_xs *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + const int ix = tiisg/16; // 0 or 1 + const int it = tiisg%16; // 0...15 + const int ib = it/2; + const int il = it%2; + + shared_values[tiisg] = kvalues_iq4nl_f[tiisg%16]; + threadgroup_barrier(mem_flags::mem_threadgroup); + + float4 yl[4]; + float sumf[2]={0.f}, all_sum; + + device const float * yb = y + ix * QK_K + ib * 32 + il * 8; + + uint32_t aux32[2]; + thread const uint8_t * q8 = (thread const uint8_t *)aux32; + + float4 qf1, qf2; + + for (int ibl = ix; ibl < nb; ibl += 2) { + + device const float4 * y4 = (device const float4 *)yb; + yl[0] = y4[0]; yl[1] = y4[4]; yl[2] = y4[1]; yl[3] = y4[5]; + + for (int row = 0; row < 2; ++row) { + + device const block_iq4_xs & xb = x[row*nb + ibl]; + device const uint32_t * q4 = (device const uint32_t *)(xb.qs + 16*ib + 8*il); + + float4 acc1 = {0.f}, acc2 = {0.f}; + + aux32[0] = q4[0] & 0x0f0f0f0f; + aux32[1] = (q4[0] >> 4) & 0x0f0f0f0f; + qf1 = {shared_values[q8[0]], shared_values[q8[1]], shared_values[q8[2]], shared_values[q8[3]]}; + qf2 = {shared_values[q8[4]], shared_values[q8[5]], shared_values[q8[6]], shared_values[q8[7]]}; + acc1 += yl[0] * qf1; + acc2 += yl[1] * qf2; + + aux32[0] = q4[1] & 0x0f0f0f0f; + aux32[1] = (q4[1] >> 4) & 0x0f0f0f0f; + qf1 = {shared_values[q8[0]], shared_values[q8[1]], shared_values[q8[2]], shared_values[q8[3]]}; + qf2 = {shared_values[q8[4]], shared_values[q8[5]], shared_values[q8[6]], shared_values[q8[7]]}; + acc1 += yl[2] * qf1; + acc2 += yl[3] * qf2; + + acc1 += acc2; + + const int ls = (((xb.scales_l[ib/2] >> 4*(ib%2)) & 0xf) | (((xb.scales_h >> 2*ib) & 3) << 4)) - 32; + sumf[row] += (float)xb.d * ls * (acc1[0] + acc1[1] + acc1[2] + acc1[3]); + + } + + yb += 2 * QK_K; + } + + for (int row = 0; row < 2; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum; + } + } +} + +[[host_name("kernel_mul_mv_iq1_s_f32")]] +kernel void kernel_mul_mv_iq1_s_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_iq1_s_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, nullptr, tgpig, tiisg, sgitg); +} + +[[host_name("kernel_mul_mv_iq1_m_f32")]] +kernel void kernel_mul_mv_iq1_m_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_iq1_m_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, nullptr, tgpig, tiisg, sgitg); +} + +[[host_name("kernel_mul_mv_iq4_nl_f32")]] +kernel void kernel_mul_mv_iq4_nl_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_iq4_nl_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); +} + +[[host_name("kernel_mul_mv_iq4_xs_f32")]] +kernel void kernel_mul_mv_iq4_xs_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_iq4_xs_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); +} + +//============================= templates and their specializations ============================= + +// NOTE: this is not dequantizing - we are simply fitting the template +template +void dequantize_f32(device const float4x4 * src, short il, thread type4x4 & reg) { + float4x4 temp = *(((device float4x4 *)src)); + for (int i = 0; i < 16; i++){ + reg[i/4][i%4] = temp[i/4][i%4]; + } +} + +template +void dequantize_f16(device const half4x4 * src, short il, thread type4x4 & reg) { + half4x4 temp = *(((device half4x4 *)src)); + for (int i = 0; i < 16; i++){ + reg[i/4][i%4] = temp[i/4][i%4]; + } +} + +template +void dequantize_q4_0(device const block_q4_0 *xb, short il, thread type4x4 & reg) { + device const uint16_t * qs = ((device const uint16_t *)xb + 1); + const float d1 = il ? (xb->d / 16.h) : xb->d; + const float d2 = d1 / 256.f; + const float md = -8.h * xb->d; + const ushort mask0 = il ? 0x00F0 : 0x000F; + const ushort mask1 = mask0 << 8; + + for (int i=0;i<8;i++) { + reg[i/2][2*(i%2)+0] = d1 * (qs[i] & mask0) + md; + reg[i/2][2*(i%2)+1] = d2 * (qs[i] & mask1) + md; + } +} + +template +void dequantize_q4_1(device const block_q4_1 *xb, short il, thread type4x4 & reg) { + device const uint16_t * qs = ((device const uint16_t *)xb + 2); + const float d1 = il ? (xb->d / 16.h) : xb->d; + const float d2 = d1 / 256.f; + const float m = xb->m; + const ushort mask0 = il ? 0x00F0 : 0x000F; + const ushort mask1 = mask0 << 8; + + for (int i=0;i<8;i++) { + reg[i/2][2*(i%2)+0] = ((qs[i] & mask0) * d1) + m; + reg[i/2][2*(i%2)+1] = ((qs[i] & mask1) * d2) + m; + } +} + +template +void dequantize_q5_0(device const block_q5_0 *xb, short il, thread type4x4 & reg) { + device const uint16_t * qs = ((device const uint16_t *)xb + 3); + const float d = xb->d; + const float md = -16.h * xb->d; + const ushort mask = il ? 0x00F0 : 0x000F; + + const uint32_t qh = *((device const uint32_t *)xb->qh); + + const int x_mv = il ? 4 : 0; + + const int gh_mv = il ? 12 : 0; + const int gh_bk = il ? 0 : 4; + + for (int i = 0; i < 8; i++) { + // extract the 5-th bits for x0 and x1 + const uint8_t xh_0 = ((qh >> (gh_mv + 2*i )) << gh_bk) & 0x10; + const uint8_t xh_1 = ((qh >> (gh_mv + 2*i+1)) << gh_bk) & 0x10; + + // combine the 4-bits from qs with the 5th bit + const int32_t x0 = ((((qs[i] ) & mask) >> x_mv) | xh_0); + const int32_t x1 = ((((qs[i] >> 8) & mask) >> x_mv) | xh_1); + + reg[i/2][2*(i%2)+0] = d * x0 + md; + reg[i/2][2*(i%2)+1] = d * x1 + md; + } +} + +template +void dequantize_q5_1(device const block_q5_1 *xb, short il, thread type4x4 & reg) { + device const uint16_t * qs = ((device const uint16_t *)xb + 4); + const float d = xb->d; + const float m = xb->m; + const ushort mask = il ? 0x00F0 : 0x000F; + + const uint32_t qh = *((device const uint32_t *)xb->qh); + + const int x_mv = il ? 4 : 0; + + const int gh_mv = il ? 12 : 0; + const int gh_bk = il ? 0 : 4; + + for (int i = 0; i < 8; i++) { + // extract the 5-th bits for x0 and x1 + const uint8_t xh_0 = ((qh >> (gh_mv + 2*i )) << gh_bk) & 0x10; + const uint8_t xh_1 = ((qh >> (gh_mv + 2*i+1)) << gh_bk) & 0x10; + + // combine the 4-bits from qs with the 5th bit + const int32_t x0 = ((((qs[i] ) & mask) >> x_mv) | xh_0); + const int32_t x1 = ((((qs[i] >> 8) & mask) >> x_mv) | xh_1); + + reg[i/2][2*(i%2)+0] = d * x0 + m; + reg[i/2][2*(i%2)+1] = d * x1 + m; + } +} + +template +void dequantize_q8_0(device const block_q8_0 *xb, short il, thread type4x4 & reg) { + device const int8_t * qs = ((device const int8_t *)xb->qs); + const half d = xb->d; + + for (int i = 0; i < 16; i++) { + reg[i/4][i%4] = (qs[i + 16*il] * d); + } +} + +template +void dequantize_q2_K(device const block_q2_K *xb, short il, thread type4x4 & reg) { + const float d = xb->d; + const float min = xb->dmin; + device const uint8_t * q = (device const uint8_t *)xb->qs; + float dl, ml; + uint8_t sc = xb->scales[il]; + + q = q + 32*(il/8) + 16*(il&1); + il = (il/2)%4; + + half coef = il>1 ? (il>2 ? 1/64.h : 1/16.h) : (il>0 ? 1/4.h : 1.h); + uchar mask = il>1 ? (il>2 ? 192 : 48) : (il>0 ? 12 : 3); + dl = d * (sc & 0xF) * coef, ml = min * (sc >> 4); + for (int i = 0; i < 16; ++i) { + reg[i/4][i%4] = dl * (q[i] & mask) - ml; + } +} + +template +void dequantize_q3_K(device const block_q3_K *xb, short il, thread type4x4 & reg) { + const half d_all = xb->d; + device const uint8_t * q = (device const uint8_t *)xb->qs; + device const uint8_t * h = (device const uint8_t *)xb->hmask; + device const int8_t * scales = (device const int8_t *)xb->scales; + + q = q + 32 * (il/8) + 16 * (il&1); + h = h + 16 * (il&1); + uint8_t m = 1 << (il/2); + uint16_t kmask1 = (il/4)>1 ? ((il/4)>2 ? 192 : 48) : \ + ((il/4)>0 ? 12 : 3); + uint16_t kmask2 = il/8 ? 0xF0 : 0x0F; + uint16_t scale_2 = scales[il%8], scale_1 = scales[8 + il%4]; + int16_t dl_int = (il/4)&1 ? (scale_2&kmask2) | ((scale_1&kmask1) << 2) + : (scale_2&kmask2) | ((scale_1&kmask1) << 4); + float dl = il<8 ? d_all * (dl_int - 32.f) : d_all * (dl_int / 16.f - 32.f); + const float ml = 4.f * dl; + + il = (il/2) & 3; + const half coef = il>1 ? (il>2 ? 1/64.h : 1/16.h) : (il>0 ? 1/4.h : 1.h); + const uint8_t mask = il>1 ? (il>2 ? 192 : 48) : (il>0 ? 12 : 3); + dl *= coef; + + for (int i = 0; i < 16; ++i) { + reg[i/4][i%4] = dl * (q[i] & mask) - (h[i] & m ? 0 : ml); + } +} + +static inline uchar2 get_scale_min_k4_just2(int j, int k, device const uchar * q) { + return j < 4 ? uchar2{uchar(q[j+0+k] & 63), uchar(q[j+4+k] & 63)} + : uchar2{uchar((q[j+4+k] & 0xF) | ((q[j-4+k] & 0xc0) >> 2)), uchar((q[j+4+k] >> 4) | ((q[j-0+k] & 0xc0) >> 2))}; +} + +template +void dequantize_q4_K(device const block_q4_K *xb, short il, thread type4x4 & reg) { + device const uchar * q = xb->qs; + + short is = (il/4) * 2; + q = q + (il/4) * 32 + 16 * (il&1); + il = il & 3; + const uchar2 sc = get_scale_min_k4_just2(is, il/2, xb->scales); + const float d = il < 2 ? xb->d : xb->d / 16.h; + const float min = xb->dmin; + const float dl = d * sc[0]; + const float ml = min * sc[1]; + + const ushort mask = il<2 ? 0x0F : 0xF0; + for (int i = 0; i < 16; ++i) { + reg[i/4][i%4] = dl * (q[i] & mask) - ml; + } +} + +template +void dequantize_q5_K(device const block_q5_K *xb, short il, thread type4x4 & reg) { + device const uint8_t * q = xb->qs; + device const uint8_t * qh = xb->qh; + + short is = (il/4) * 2; + q = q + 32 * (il/4) + 16 * (il&1); + qh = qh + 16 * (il&1); + uint8_t ul = 1 << (il/2); + il = il & 3; + const uchar2 sc = get_scale_min_k4_just2(is, il/2, xb->scales); + const float d = il < 2 ? xb->d : xb->d / 16.f; + const float min = xb->dmin; + const float dl = d * sc[0]; + const float ml = min * sc[1]; + + const ushort mask = il<2 ? 0x0F : 0xF0; + const float qh_val = il<2 ? 16.f : 256.f; + for (int i = 0; i < 16; ++i) { + reg[i/4][i%4] = dl * ((q[i] & mask) + (qh[i] & ul ? qh_val : 0)) - ml; + } +} + +template +void dequantize_q6_K(device const block_q6_K *xb, short il, thread type4x4 & reg) { + const half d_all = xb->d; + device const uint8_t * ql = (device const uint8_t *)xb->ql; + device const uint8_t * qh = (device const uint8_t *)xb->qh; + device const int8_t * scales = (device const int8_t *)xb->scales; + + ql = ql + 64*(il/8) + 32*((il/2)&1) + 16*(il&1); + qh = qh + 32*(il/8) + 16*(il&1); + float sc = scales[(il%2) + 2 * ((il/2))]; + il = (il/2) & 3; + + const uint16_t kmask1 = il>1 ? (il>2 ? 192 : 48) : (il>0 ? 12 : 3); + const uint16_t kmask2 = il>1 ? 0xF0 : 0x0F; + const float coef = il>1 ? 1.f/16.f : 1.f; + const float ml = d_all * sc * 32.f; + const float dl = d_all * sc * coef; + for (int i = 0; i < 16; ++i) { + const half q = il&1 ? ((ql[i] & kmask2) | ((qh[i] & kmask1) << 2)) + : ((ql[i] & kmask2) | ((qh[i] & kmask1) << 4)); + reg[i/4][i%4] = dl * q - ml; + } +} + +template +void dequantize_iq2_xxs(device const block_iq2_xxs * xb, short il, thread type4x4 & reg) { + // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 + const float d = xb->d; + const int ib32 = il/2; + il = il%2; + // il = 0 or 1. il = 0 processes the first 16 quants in a block of 32, il = 1 the second 16 + // each block of 32 needs 2 uint32_t's for the quants & scale, so 4 uint16_t's. + device const uint16_t * q2 = xb->qs + 4*ib32; + const uint32_t aux32_g = q2[0] | (q2[1] << 16); + const uint32_t aux32_s = q2[2] | (q2[3] << 16); + thread const uint8_t * aux8 = (thread const uint8_t *)&aux32_g; + const float dl = d * (0.5f + (aux32_s >> 28)) * 0.25f; + constant uint8_t * grid = (constant uint8_t *)(iq2xxs_grid + aux8[2*il+0]); + uint8_t signs = ksigns_iq2xs[(aux32_s >> 14*il) & 127]; + for (int i = 0; i < 8; ++i) { + reg[i/4][i%4] = dl * grid[i] * (signs & kmask_iq2xs[i] ? -1.f : 1.f); + } + grid = (constant uint8_t *)(iq2xxs_grid + aux8[2*il+1]); + signs = ksigns_iq2xs[(aux32_s >> (14*il+7)) & 127]; + for (int i = 0; i < 8; ++i) { + reg[2+i/4][i%4] = dl * grid[i] * (signs & kmask_iq2xs[i] ? -1.f : 1.f); + } +} + +template +void dequantize_iq2_xs(device const block_iq2_xs * xb, short il, thread type4x4 & reg) { + // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 + const float d = xb->d; + const int ib32 = il/2; + il = il%2; + // il = 0 or 1. il = 0 processes the first 16 quants in a block of 32, il = 1 the second 16 + device const uint16_t * q2 = xb->qs + 4*ib32; + const float dl = d * (0.5f + ((xb->scales[ib32] >> 4*il) & 0xf)) * 0.25f; + constant uint8_t * grid = (constant uint8_t *)(iq2xs_grid + (q2[2*il+0] & 511)); + uint8_t signs = ksigns_iq2xs[q2[2*il+0] >> 9]; + for (int i = 0; i < 8; ++i) { + reg[i/4][i%4] = dl * grid[i] * (signs & kmask_iq2xs[i] ? -1.f : 1.f); + } + grid = (constant uint8_t *)(iq2xs_grid + (q2[2*il+1] & 511)); + signs = ksigns_iq2xs[q2[2*il+1] >> 9]; + for (int i = 0; i < 8; ++i) { + reg[2+i/4][i%4] = dl * grid[i] * (signs & kmask_iq2xs[i] ? -1.f : 1.f); + } +} + +template +void dequantize_iq3_xxs(device const block_iq3_xxs * xb, short il, thread type4x4 & reg) { + // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 + const float d = xb->d; + const int ib32 = il/2; + il = il%2; + // il = 0 or 1. il = 0 processes the first 16 quants in a block of 32, il = 1 the second 16 + device const uint8_t * q3 = xb->qs + 8*ib32; + device const uint16_t * gas = (device const uint16_t *)(xb->qs + QK_K/4) + 2*ib32; + const uint32_t aux32 = gas[0] | (gas[1] << 16); + const float dl = d * (0.5f + (aux32 >> 28)) * 0.5f; + constant uint8_t * grid1 = (constant uint8_t *)(iq3xxs_grid + q3[4*il+0]); + constant uint8_t * grid2 = (constant uint8_t *)(iq3xxs_grid + q3[4*il+1]); + uint8_t signs = ksigns_iq2xs[(aux32 >> 14*il) & 127]; + for (int i = 0; i < 4; ++i) { + reg[0][i] = dl * grid1[i] * (signs & kmask_iq2xs[i+0] ? -1.f : 1.f); + reg[1][i] = dl * grid2[i] * (signs & kmask_iq2xs[i+4] ? -1.f : 1.f); + } + grid1 = (constant uint8_t *)(iq3xxs_grid + q3[4*il+2]); + grid2 = (constant uint8_t *)(iq3xxs_grid + q3[4*il+3]); + signs = ksigns_iq2xs[(aux32 >> (14*il+7)) & 127]; + for (int i = 0; i < 4; ++i) { + reg[2][i] = dl * grid1[i] * (signs & kmask_iq2xs[i+0] ? -1.f : 1.f); + reg[3][i] = dl * grid2[i] * (signs & kmask_iq2xs[i+4] ? -1.f : 1.f); + } +} + +template +void dequantize_iq3_s(device const block_iq3_s * xb, short il, thread type4x4 & reg) { + // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 + const float d = xb->d; + const int ib32 = il/2; + il = il%2; + // il = 0 or 1. il = 0 processes the first 16 quants in a block of 32, il = 1 the second 16 + device const uint8_t * qs = xb->qs + 8*ib32; + device const uint8_t * signs = xb->signs + 4*ib32 + 2*il; + const uint8_t qh = xb->qh[ib32] >> 4*il; + const float dl = d * (1 + 2*((xb->scales[ib32/2] >> 4*(ib32%2)) & 0xf)); + constant uint8_t * grid1 = (constant uint8_t *)(iq3s_grid + (qs[4*il+0] | ((qh << 8) & 256))); + constant uint8_t * grid2 = (constant uint8_t *)(iq3s_grid + (qs[4*il+1] | ((qh << 7) & 256))); + for (int i = 0; i < 4; ++i) { + reg[0][i] = dl * grid1[i] * select(1, -1, signs[0] & kmask_iq2xs[i+0]); + reg[1][i] = dl * grid2[i] * select(1, -1, signs[0] & kmask_iq2xs[i+4]); + } + grid1 = (constant uint8_t *)(iq3s_grid + (qs[4*il+2] | ((qh << 6) & 256))); + grid2 = (constant uint8_t *)(iq3s_grid + (qs[4*il+3] | ((qh << 5) & 256))); + for (int i = 0; i < 4; ++i) { + reg[2][i] = dl * grid1[i] * select(1, -1, signs[1] & kmask_iq2xs[i+0]); + reg[3][i] = dl * grid2[i] * select(1, -1, signs[1] & kmask_iq2xs[i+4]); + } +} + +template +void dequantize_iq2_s(device const block_iq2_s * xb, short il, thread type4x4 & reg) { + // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 + const float d = xb->d; + const int ib32 = il/2; + il = il%2; + // il = 0 or 1. il = 0 processes the first 16 quants in a block of 32, il = 1 the second 16 + device const uint8_t * qs = xb->qs + 4*ib32 + 2*il; + device const uint8_t * signs = qs + QK_K/8; + const uint8_t qh = xb->qh[ib32] >> 4*il; + const float dl = d * (0.5f + ((xb->scales[ib32] >> 4*il) & 0xf)) * 0.25f; + constant uint8_t * grid1 = (constant uint8_t *)(iq2s_grid + (qs[0] | ((qh << 8) & 0x300))); + constant uint8_t * grid2 = (constant uint8_t *)(iq2s_grid + (qs[1] | ((qh << 6) & 0x300))); + for (int i = 0; i < 8; ++i) { + reg[i/4+0][i%4] = dl * grid1[i] * select(1, -1, signs[0] & kmask_iq2xs[i]); + reg[i/4+2][i%4] = dl * grid2[i] * select(1, -1, signs[1] & kmask_iq2xs[i]); + } +} + +template +void dequantize_iq1_s(device const block_iq1_s * xb, short il, thread type4x4 & reg) { + // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 + const int ib32 = il/2; + il = il%2; + const float d = xb->d; + device const uint8_t * qs = xb->qs + 4*ib32 + 2*il; + device const uint16_t * qh = xb->qh; + const float dl = d * (2*((qh[ib32] >> 12) & 7) + 1); + const float ml = dl * (qh[ib32] & 0x8000 ? -1 - IQ1S_DELTA : -1 + IQ1S_DELTA); + const uint16_t h = qh[ib32] >> 6*il; + constant uint8_t * grid1 = (constant uint8_t *)(iq1s_grid_gpu + (qs[0] | ((h << 8) & 0x700))); + constant uint8_t * grid2 = (constant uint8_t *)(iq1s_grid_gpu + (qs[1] | ((h << 5) & 0x700))); + for (int i = 0; i < 4; ++i) { + reg[0][i] = dl * (grid1[i] & 0xf) + ml; + reg[1][i] = dl * (grid1[i] >> 4) + ml; + reg[2][i] = dl * (grid2[i] & 0xf) + ml; + reg[3][i] = dl * (grid2[i] >> 4) + ml; + } +} + +template +void dequantize_iq1_m(device const block_iq1_m * xb, short il, thread type4x4 & reg) { + // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 + const int ib32 = il/2; + il = il%2; + device const uint16_t * sc = (device const uint16_t *)xb->scales; + + iq1m_scale_t scale; + scale.u16 = (sc[0] >> 12) | ((sc[1] >> 8) & 0x00f0) | ((sc[2] >> 4) & 0x0f00) | (sc[3] & 0xf000); + const float d = scale.f16; + + device const uint8_t * qs = xb->qs + 4*ib32 + 2*il; + device const uint8_t * qh = xb->qh + 2*ib32 + il; + + const float dl = d * (2*((sc[ib32/2] >> (6*(ib32%2)+3*il)) & 7) + 1); + const float ml1 = dl * (qh[0] & 0x08 ? -1 - IQ1M_DELTA : -1 + IQ1M_DELTA); + const float ml2 = dl * (qh[0] & 0x80 ? -1 - IQ1M_DELTA : -1 + IQ1M_DELTA); + constant uint8_t * grid1 = (constant uint8_t *)(iq1s_grid_gpu + (qs[0] | ((qh[0] << 8) & 0x700))); + constant uint8_t * grid2 = (constant uint8_t *)(iq1s_grid_gpu + (qs[1] | ((qh[0] << 4) & 0x700))); + for (int i = 0; i < 4; ++i) { + reg[0][i] = dl * (grid1[i] & 0xf) + ml1; + reg[1][i] = dl * (grid1[i] >> 4) + ml1; + reg[2][i] = dl * (grid2[i] & 0xf) + ml2; + reg[3][i] = dl * (grid2[i] >> 4) + ml2; + } +} + +template +void dequantize_iq4_nl(device const block_iq4_nl * xb, short il, thread type4x4 & reg) { + device const uint16_t * q4 = (device const uint16_t *)xb->qs; + const float d = xb->d; + uint32_t aux32; + thread const uint8_t * q8 = (thread const uint8_t *)&aux32; + for (int i = 0; i < 4; ++i) { + aux32 = ((q4[2*i] | (q4[2*i+1] << 16)) >> 4*il) & 0x0f0f0f0f; + reg[i][0] = d * kvalues_iq4nl_f[q8[0]]; + reg[i][1] = d * kvalues_iq4nl_f[q8[1]]; + reg[i][2] = d * kvalues_iq4nl_f[q8[2]]; + reg[i][3] = d * kvalues_iq4nl_f[q8[3]]; + } +} + +template +void dequantize_iq4_xs(device const block_iq4_xs * xb, short il, thread type4x4 & reg) { + // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 + const int ib32 = il/2; + il = il%2; + // il = 0 or 1. il = 0 processes the first 16 quants in a block of 32, il = 1 the second 16 + device const uint32_t * q4 = (device const uint32_t *)xb->qs + 4*ib32; + const int ls = ((xb->scales_l[ib32/2] >> 4*(ib32%2)) & 0xf) | (((xb->scales_h >> 2*ib32) & 3) << 4); + const float d = (float)xb->d * (ls - 32); + uint32_t aux32; + thread const uint8_t * q8 = (thread const uint8_t *)&aux32; + for (int i = 0; i < 4; ++i) { + aux32 = (q4[i] >> 4*il) & 0x0f0f0f0f; + reg[i][0] = d * kvalues_iq4nl_f[q8[0]]; + reg[i][1] = d * kvalues_iq4nl_f[q8[1]]; + reg[i][2] = d * kvalues_iq4nl_f[q8[2]]; + reg[i][3] = d * kvalues_iq4nl_f[q8[3]]; + } +} + +template +kernel void kernel_get_rows_q( + device const void * src0, + device const void * src1, + device float * dst, + constant int64_t & ne00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb1, + constant uint64_t & nb2, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint3 tptg [[threads_per_threadgroup]]) { + const int64_t i10 = tgpig.x; + const int64_t i11 = tgpig.y; + + const int64_t r = ((const device int32_t *) ((const device char *) src1 + i11*nb11 + i10*nb10))[0]; + + const int64_t i02 = i11; + + for (int64_t ind = tiitg; ind < ne00/16; ind += tptg.x) { + float4x4 temp; + dequantize_func(((device const block_q *) ((const device char *) src0 + r*nb01 + i02*nb02)) + ind/nl, ind%nl, temp); + *(((device float4x4 *) ((device char *) dst + i11*nb2 + i10*nb1)) + ind) = temp; + } +} + +template +kernel void kernel_get_rows_f( + device const void * src0, + device const void * src1, + device float * dst, + constant int64_t & ne00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb1, + constant uint64_t & nb2, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint3 tptg [[threads_per_threadgroup]]) { + const int64_t i10 = tgpig.x; + const int64_t i11 = tgpig.y; + + const int64_t r = ((const device int32_t *) ((const device char *) src1 + i11*nb11 + i10*nb10))[0]; + + const int64_t i02 = i11; + + for (int ind = tiitg; ind < ne00; ind += tptg.x) { + (( device float *) (( device char *) dst + i11*nb2 + i10*nb1))[ind] = + ((const device T *) ((const device char *) src0 + i02*nb02 + r*nb01))[ind]; + } +} + +kernel void kernel_get_rows_i32( + device const void * src0, + device const void * src1, + device int32_t * dst, + constant int64_t & ne00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb1, + constant uint64_t & nb2, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint3 tptg [[threads_per_threadgroup]]) { + const int64_t i10 = tgpig.x; + const int64_t i11 = tgpig.y; + + const int64_t r = ((const device int32_t *) ((const device char *) src1 + i11*nb11 + i10*nb10))[0]; + + const int64_t i02 = i11; + + for (int ind = tiitg; ind < ne00; ind += tptg.x) { + (( device int32_t *) (( device char *) dst + i11*nb2 + i10*nb1))[ind] = + ((const device int32_t *) ((const device char *) src0 + i02*nb02 + r*nb01))[ind]; + } +} + + +#define BLOCK_SIZE_M 64 // 8 simdgroup matrices from matrix A +#define BLOCK_SIZE_N 32 // 4 simdgroup matrices from matrix B +#define BLOCK_SIZE_K 32 +#define THREAD_MAT_M 4 // each thread take 4 simdgroup matrices from matrix A +#define THREAD_MAT_N 2 // each thread take 2 simdgroup matrices from matrix B +#define THREAD_PER_BLOCK 128 +#define THREAD_PER_ROW 2 // 2 thread for each row in matrix A to load numbers +#define THREAD_PER_COL 4 // 4 thread for each row in matrix B to load numbers +#define SG_MAT_SIZE 64 // simdgroup matrix is of shape 8x8 +#define SG_MAT_ROW 8 + +// each block_q contains 16*nl weights +template +kernel void kernel_mul_mm(device const uchar * src0, + device const uchar * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne02, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup uchar * shared_memory [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + threadgroup T * sa = (threadgroup T *)(shared_memory); + threadgroup float * sb = (threadgroup float *)(shared_memory + 4096); + + const uint r0 = tgpig.y; + const uint r1 = tgpig.x; + const uint im = tgpig.z; + + // if this block is of 64x32 shape or smaller + short n_rows = (ne0 - r0 * BLOCK_SIZE_M < BLOCK_SIZE_M) ? (ne0 - r0 * BLOCK_SIZE_M) : BLOCK_SIZE_M; + short n_cols = (ne1 - r1 * BLOCK_SIZE_N < BLOCK_SIZE_N) ? (ne1 - r1 * BLOCK_SIZE_N) : BLOCK_SIZE_N; + + // a thread shouldn't load data outside of the matrix + short thread_row = ((short)tiitg/THREAD_PER_ROW) < n_rows ? ((short)tiitg/THREAD_PER_ROW) : n_rows - 1; + short thread_col = ((short)tiitg/THREAD_PER_COL) < n_cols ? ((short)tiitg/THREAD_PER_COL) : n_cols - 1; + + simdgroup_T8x8 ma[4]; + simdgroup_float8x8 mb[2]; + simdgroup_float8x8 c_res[8]; + for (int i = 0; i < 8; i++){ + c_res[i] = make_filled_simdgroup_matrix(0.f); + } + + short il = (tiitg % THREAD_PER_ROW); + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + uint offset0 = (i12/r2)*nb02 + (i13/r3)*(nb02*ne02); + ushort offset1 = il/nl; + + device const block_q * x = (device const block_q *)(src0 + (r0 * BLOCK_SIZE_M + thread_row) * nb01 + offset0) + offset1; + device const float * y = (device const float *)(src1 + + nb12 * im + + nb11 * (r1 * BLOCK_SIZE_N + thread_col) + + nb10 * (BLOCK_SIZE_K / THREAD_PER_COL * (tiitg % THREAD_PER_COL))); + + for (int loop_k = 0; loop_k < ne00; loop_k += BLOCK_SIZE_K) { + // load data and store to threadgroup memory + T4x4 temp_a; + dequantize_func(x, il, temp_a); + threadgroup_barrier(mem_flags::mem_threadgroup); + + #pragma unroll(16) + for (int i = 0; i < 16; i++) { + *(sa + SG_MAT_SIZE * ((tiitg / THREAD_PER_ROW / 8) \ + + (tiitg % THREAD_PER_ROW) * 16 + (i / 8) * 8) \ + + (tiitg / THREAD_PER_ROW) % 8 + (i & 7) * 8) = temp_a[i/4][i%4]; + } + + *(threadgroup float2x4 *)(sb + (tiitg % THREAD_PER_COL) * 8 * 32 + 8 * (tiitg / THREAD_PER_COL)) = *((device float2x4 *)y); + + il = (il + 2 < nl) ? il + 2 : il % 2; + x = (il < 2) ? x + (2+nl-1)/nl : x; + y += BLOCK_SIZE_K; + + threadgroup_barrier(mem_flags::mem_threadgroup); + + // load matrices from threadgroup memory and conduct outer products + threadgroup T * lsma = (sa + THREAD_MAT_M * SG_MAT_SIZE * (sgitg % 2)); + threadgroup float * lsmb = (sb + THREAD_MAT_N * SG_MAT_SIZE * (sgitg / 2)); + + #pragma unroll(4) + for (int ik = 0; ik < BLOCK_SIZE_K / 8; ik++) { + #pragma unroll(4) + for (int i = 0; i < 4; i++) { + simdgroup_load(ma[i],lsma + SG_MAT_SIZE * i); + } + simdgroup_barrier(mem_flags::mem_none); + #pragma unroll(2) + for (int i = 0; i < 2; i++) { + simdgroup_load(mb[i],lsmb + SG_MAT_SIZE * i); + } + + lsma += BLOCK_SIZE_M / SG_MAT_ROW * SG_MAT_SIZE; + lsmb += BLOCK_SIZE_N / SG_MAT_ROW * SG_MAT_SIZE; + + #pragma unroll(8) + for (int i = 0; i < 8; i++){ + simdgroup_multiply_accumulate(c_res[i], mb[i/4], ma[i%4], c_res[i]); + } + } + } + + if ((r0 + 1) * BLOCK_SIZE_M <= ne0 && (r1 + 1) * BLOCK_SIZE_N <= ne1) { + device float * C = dst + (BLOCK_SIZE_M * r0 + 32 * (sgitg & 1)) \ + + (BLOCK_SIZE_N * r1 + 16 * (sgitg >> 1)) * ne0 + im*ne1*ne0; + for (int i = 0; i < 8; i++) { + simdgroup_store(c_res[i], C + 8 * (i%4) + 8 * ne0 * (i/4), ne0); + } + } else { + // block is smaller than 64x32, we should avoid writing data outside of the matrix + threadgroup_barrier(mem_flags::mem_threadgroup); + threadgroup float * temp_str = ((threadgroup float *)shared_memory) \ + + 32 * (sgitg&1) + (16 * (sgitg>>1)) * BLOCK_SIZE_M; + for (int i = 0; i < 8; i++) { + simdgroup_store(c_res[i], temp_str + 8 * (i%4) + 8 * BLOCK_SIZE_M * (i/4), BLOCK_SIZE_M); + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + device float * C = dst + (BLOCK_SIZE_M * r0) + (BLOCK_SIZE_N * r1) * ne0 + im*ne1*ne0; + if (sgitg == 0) { + for (int i = 0; i < n_rows; i++) { + for (int j = tiitg; j < n_cols; j += BLOCK_SIZE_N) { + *(C + i + j * ne0) = *(temp_str + i + j * BLOCK_SIZE_M); + } + } + } + } +} + +// same as kernel_mul_mm_impl, but src1 and dst are accessed via indices stored in rowids +template +void kernel_mul_mm_id_impl( + device const uchar * src0, + device const uchar * src1, + threadgroup ushort2 * rowids, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne02, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + int64_t ne1, + int64_t ne0ne1, + threadgroup uchar * shared_memory, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + threadgroup half * sa = (threadgroup half *)(shared_memory); + threadgroup float * sb = (threadgroup float *)(shared_memory + 4096); + + const uint r0 = tgpig.y; + const uint r1 = tgpig.x; + + if (r1 * BLOCK_SIZE_N >= ne1) return; + + // if this block is of 64x32 shape or smaller + short n_rows = (ne0 - r0 * BLOCK_SIZE_M < BLOCK_SIZE_M) ? (ne0 - r0 * BLOCK_SIZE_M) : BLOCK_SIZE_M; + short n_cols = (ne1 - r1 * BLOCK_SIZE_N < BLOCK_SIZE_N) ? (ne1 - r1 * BLOCK_SIZE_N) : BLOCK_SIZE_N; + + // a thread shouldn't load data outside of the matrix + short thread_row = ((short)tiitg/THREAD_PER_ROW) < n_rows ? ((short)tiitg/THREAD_PER_ROW) : n_rows - 1; + short thread_col = ((short)tiitg/THREAD_PER_COL) < n_cols ? ((short)tiitg/THREAD_PER_COL) : n_cols - 1; + + simdgroup_half8x8 ma[4]; + simdgroup_float8x8 mb[2]; + simdgroup_float8x8 c_res[8]; + for (int i = 0; i < 8; i++){ + c_res[i] = make_filled_simdgroup_matrix(0.f); + } + short il = (tiitg % THREAD_PER_ROW); + + ushort offset1 = il/nl; + + threadgroup const auto & id = rowids[r1 * BLOCK_SIZE_N + thread_col]; + + device const block_q * x = (device const block_q *)(src0 + (r0 * BLOCK_SIZE_M + thread_row) * nb01) + offset1; + device const float * y = (device const float *)(src1 + + nb12 * id[1] + + nb11 * (id[0] % ne11) + + nb10 * (BLOCK_SIZE_K / THREAD_PER_COL * (tiitg % THREAD_PER_COL))); + + for (int loop_k = 0; loop_k < ne00; loop_k += BLOCK_SIZE_K) { + // load data and store to threadgroup memory + half4x4 temp_a; + dequantize_func(x, il, temp_a); + threadgroup_barrier(mem_flags::mem_threadgroup); + + for (int i = 0; i < 16; i++) { + *(sa + SG_MAT_SIZE * ((tiitg / THREAD_PER_ROW / 8) \ + + (tiitg % THREAD_PER_ROW) * 16 + (i / 8) * 8) \ + + (tiitg / THREAD_PER_ROW) % 8 + (i & 7) * 8) = temp_a[i/4][i%4]; + } + + *(threadgroup float2x4 *)(sb + (tiitg % THREAD_PER_COL) * 8 * 32 + 8 * (tiitg / THREAD_PER_COL)) = *((device float2x4 *)y); + + il = (il + 2 < nl) ? il + 2 : il % 2; + x = (il < 2) ? x + (2+nl-1)/nl : x; + y += BLOCK_SIZE_K; + + threadgroup_barrier(mem_flags::mem_threadgroup); + + // load matrices from threadgroup memory and conduct outer products + threadgroup half * lsma = (sa + THREAD_MAT_M * SG_MAT_SIZE * (sgitg % 2)); + threadgroup float * lsmb = (sb + THREAD_MAT_N * SG_MAT_SIZE * (sgitg / 2)); + + for (int ik = 0; ik < BLOCK_SIZE_K / 8; ik++) { + for (int i = 0; i < 4; i++) { + simdgroup_load(ma[i], lsma + SG_MAT_SIZE * i); + } + simdgroup_barrier(mem_flags::mem_none); + for (int i = 0; i < 2; i++) { + simdgroup_load(mb[i], lsmb + SG_MAT_SIZE * i); + } + + lsma += BLOCK_SIZE_M / SG_MAT_ROW * SG_MAT_SIZE; + lsmb += BLOCK_SIZE_N / SG_MAT_ROW * SG_MAT_SIZE; + + for (int i = 0; i < 8; i++){ + simdgroup_multiply_accumulate(c_res[i], mb[i/4], ma[i%4], c_res[i]); + } + } + } + + { + threadgroup_barrier(mem_flags::mem_threadgroup); + threadgroup float * temp_str = ((threadgroup float *)shared_memory) \ + + 32 * (sgitg&1) + (16 * (sgitg>>1)) * BLOCK_SIZE_M; + for (int i = 0; i < 8; i++) { + simdgroup_store(c_res[i], temp_str + 8 * (i%4) + 8 * BLOCK_SIZE_M * (i/4), BLOCK_SIZE_M); + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + device float * C = dst + (BLOCK_SIZE_M * r0); + if (sgitg == 0) { + for (int j = tiitg; j < n_cols; j += BLOCK_SIZE_N) { + threadgroup const auto & jid = rowids[r1 * BLOCK_SIZE_N + j]; + int joff = jid[0] * ne0 + jid[1] * ne0ne1; + for (int i = 0; i < n_rows; i++) { + *(C + i + joff) = *(temp_str + i + j * BLOCK_SIZE_M); + } + } + } + } +} + +template +kernel void kernel_mul_mm_id( + device const uchar * src0s, + device const uchar * src1, + device float * dst, + device const uchar * ids, + constant int64_t & nei0, + constant int64_t & nei1, + constant uint64_t & nbi1, + constant int64_t & ne00, + constant int64_t & ne02, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint64_t & nb1, + threadgroup uchar * shared_memory [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + const int32_t i02 = tgpig.z; + tgpig.z = 0; + + device const uchar * src0 = src0s + i02*nb02; + + // row indices + threadgroup ushort2 * rowids = (threadgroup ushort2 *)(shared_memory + 8192); + + // TODO: parallelize this loop + int64_t _ne1 = 0; + for (ushort ii1 = 0; ii1 < nei1; ii1++) { + for (ushort ii0 = 0; ii0 < nei0; ii0++) { + int32_t id = ((device int32_t *) (ids + ii1*nbi1))[ii0]; + if (id == i02) { + //if (tiitg == 0) { + rowids[_ne1] = ushort2(ii0, ii1); + //} + _ne1++; + } + } + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + kernel_mul_mm_id_impl( + src0, + src1, + rowids, + dst, + ne00, + ne02, + nb01, + nb02, + ne11, + ne12, + nb10, + nb11, + nb12, + ne0, + _ne1, + ne0*ne1, + shared_memory, + tgpig, + tiitg, + sgitg); +} + +#define QK_NL 16 + +// +// get rows +// + +typedef decltype(kernel_get_rows_f) get_rows_f_t; + +template [[host_name("kernel_get_rows_f32")]] kernel get_rows_f_t kernel_get_rows_f; +template [[host_name("kernel_get_rows_f16")]] kernel get_rows_f_t kernel_get_rows_f; + +typedef decltype(kernel_get_rows_q) get_rows_q_t; + +template [[host_name("kernel_get_rows_q4_0")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_q4_1")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_q5_0")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_q5_1")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_q8_0")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_q2_K")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_q3_K")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_q4_K")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_q5_K")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_q6_K")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_iq2_xxs")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_iq2_xs")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_iq3_xxs")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_iq3_s")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_iq2_s")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_iq1_s")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_iq1_m")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_iq4_nl")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_iq4_xs")]] kernel get_rows_q_t kernel_get_rows_q; + +// +// matrix-matrix multiplication +// + +typedef decltype(kernel_mul_mm) mat_mm_t; + +template [[host_name("kernel_mul_mm_f32_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_f16_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_q4_0_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_q4_1_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_q5_0_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_q5_1_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_q8_0_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_q2_K_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_q3_K_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_q4_K_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_q5_K_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_q6_K_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq2_xxs_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq2_xs_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq3_xxs_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq3_s_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq2_s_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq1_s_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq1_m_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq4_nl_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq4_xs_f32")]] kernel mat_mm_t kernel_mul_mm; + +// +// indirect matrix-matrix multiplication +// + +typedef decltype(kernel_mul_mm_id) mat_mm_id_t; + +template [[host_name("kernel_mul_mm_id_f32_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_f16_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_q4_0_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_q4_1_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_q5_0_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_q5_1_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_q8_0_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_q2_K_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_q3_K_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_q4_K_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_q5_K_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_q6_K_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq2_xxs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq2_xs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq3_xxs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq3_s_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq2_s_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq1_s_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq1_m_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq4_nl_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq4_xs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; + +// +// matrix-vector multiplication +// + +typedef void (kernel_mul_mv_impl_t)( + device const char * src0, + device const char * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + uint64_t nb00, + uint64_t nb01, + uint64_t nb02, + int64_t ne10, + int64_t ne11, + int64_t ne12, + uint64_t nb10, + uint64_t nb11, + uint64_t nb12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + uint3 tgpig, + uint tiisg); + +typedef void (kernel_mul_mv2_impl_t)( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_values, + uint3 tgpig, + uint tiisg, + uint sgitg); + +template +void mmv_fn( + device const char * src0, + device const char * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + uint64_t nb00, + uint64_t nb01, + uint64_t nb02, + int64_t ne10, + int64_t ne11, + int64_t ne12, + int64_t ne13, + uint64_t nb10, + uint64_t nb11, + uint64_t nb12, + int64_t ne0, + int64_t ne1, + uint64_t nb1, + uint r2, + uint r3, + threadgroup int8_t * shared_values, + uint3 tgpig, + uint tiitg, + uint tiisg, + uint sgitg) { + impl_fn(src0,src1,dst,ne00,ne01,ne02,nb00,nb01,nb02,ne10,ne11,ne12,nb10,nb11,nb12,ne0,ne1,r2,r3,tgpig,tiisg); +} + +template +void mmv_fn( + device const char * src0, + device const char * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + uint64_t nb00, + uint64_t nb01, + uint64_t nb02, + int64_t ne10, + int64_t ne11, + int64_t ne12, + int64_t ne13, + uint64_t nb10, + uint64_t nb11, + uint64_t nb12, + int64_t ne0, + int64_t ne1, + uint64_t nb1, + uint r2, + uint r3, + threadgroup int8_t * shared_values, + uint3 tgpig, + uint tiitg, + uint tiisg, + uint sgitg) { + impl_fn(src0,(const device float *)src1,dst,ne00,ne01,ne02,ne10,ne12,ne0,ne1,r2,r3,shared_values,tgpig,tiisg,sgitg); +} + +typedef decltype(mmv_fn>) mul_mv_impl_fn_t; + +template +kernel void kernel_mul_mv_id( + device const char * src0s, + device const char * src1, + device float * dst, + device const char * ids, + constant int64_t & nei0, + constant int64_t & nei1, + constant uint64_t & nbi1, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint64_t & nb1, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + const int iid1 = tgpig.z/nei0; + const int idx = tgpig.z%nei0; + + tgpig.z = 0; + + const int32_t i02 = ((device const int32_t *) (ids + iid1*nbi1))[idx]; + + const int64_t i11 = idx % ne11; + const int64_t i12 = iid1; + + const int64_t i1 = idx; + const int64_t i2 = i12; + + device const char * src0_cur = src0s + i02*nb02; + device const char * src1_cur = src1 + i11*nb11 + i12*nb12; + device float * dst_cur = dst + i1*ne0 + i2*ne1*ne0; + + impl_fn( + /* src0 */ src0_cur, + /* src1 */ src1_cur, + /* dst */ dst_cur, + /* ne00 */ ne00, + /* ne01 */ ne01, + /* ne02 */ 1,//ne02, + /* nb00 */ nb00, + /* nb01 */ nb01, + /* nb02 */ nb02, + /* ne10 */ ne10, + /* ne11 */ 1,//ne11, + /* ne12 */ 1,//ne12, + /* ne13 */ 1,//ne13, + /* nb10 */ nb10, + /* nb11 */ nb11, + /* nb12 */ nb12, + /* ne0 */ ne0, + /* ne1 */ 1,//ne1, + /* nb1 */ nb1, + /* r2 */ 1, + /* r3 */ 1, + shared_values, + tgpig, + tiitg, + tiisg, + sgitg); +} + +typedef decltype(kernel_mul_mv_id>>) kernel_mul_mv_id_t; + +template [[host_name("kernel_mul_mv_id_f32_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>>; +template [[host_name("kernel_mul_mv_id_f16_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>>; +template [[host_name("kernel_mul_mv_id_q8_0_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>; +template [[host_name("kernel_mul_mv_id_q4_0_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>>; +template [[host_name("kernel_mul_mv_id_q4_1_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>>; +template [[host_name("kernel_mul_mv_id_q5_0_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>>; +template [[host_name("kernel_mul_mv_id_q5_1_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>>; +template [[host_name("kernel_mul_mv_id_q2_K_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>; +template [[host_name("kernel_mul_mv_id_q3_K_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>; +template [[host_name("kernel_mul_mv_id_q4_K_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>; +template [[host_name("kernel_mul_mv_id_q5_K_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>; +template [[host_name("kernel_mul_mv_id_q6_K_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>; +template [[host_name("kernel_mul_mv_id_iq1_s_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>; +template [[host_name("kernel_mul_mv_id_iq1_m_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>; +template [[host_name("kernel_mul_mv_id_iq2_xxs_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>; +template [[host_name("kernel_mul_mv_id_iq2_xs_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>; +template [[host_name("kernel_mul_mv_id_iq3_xxs_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>; +template [[host_name("kernel_mul_mv_id_iq3_s_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>; +template [[host_name("kernel_mul_mv_id_iq2_s_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>; +template [[host_name("kernel_mul_mv_id_iq4_nl_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>; +template [[host_name("kernel_mul_mv_id_iq4_xs_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>; diff --git a/ml/backend/ggml/ggml-metal.h b/ml/backend/ggml/ggml-metal.h new file mode 100644 index 000000000..ab4263a91 --- /dev/null +++ b/ml/backend/ggml/ggml-metal.h @@ -0,0 +1,88 @@ +/** + * llama.cpp - commit 3f1ae2e32cde00c39b96be6d01c2997c29bae555 - do not edit this file + * + * MIT License + * + * Copyright (c) 2023-2024 The ggml authors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +// An interface allowing to compute ggml_cgraph with Metal +// +// This is a fully functional interface that extends ggml with GPU support for Apple devices. +// A similar interface can be created for other GPU backends (e.g. Vulkan, CUDA, etc.) +// +// How it works? +// +// As long as your program can create and evaluate a ggml_cgraph on the CPU, you can use this +// interface to evaluate the same graph on the GPU. Instead of using ggml_graph_compute(), you +// use ggml_metal_graph_compute() (or ggml_vulkan_graph_compute(), etc.) +// +// You only need to make sure that all memory buffers that you used during the graph creation +// are mapped to the device memory with the ggml_metal_add_buffer() function. This mapping is +// used during the graph evaluation to determine the arguments of the compute kernels. +// +// Synchronization between device and host memory (for example for input and output tensors) +// is done with the ggml_metal_set_tensor() and ggml_metal_get_tensor() functions. +// + +#pragma once + +#include "ggml.h" +#include "ggml-backend.h" + +#include +#include + +struct ggml_tensor; +struct ggml_cgraph; + +#ifdef __cplusplus +extern "C" { +#endif + +// +// backend API +// user-code should use only these functions +// + +GGML_API void ggml_backend_metal_log_set_callback(ggml_log_callback log_callback, void * user_data); + +GGML_API ggml_backend_t ggml_backend_metal_init(void); + +GGML_API bool ggml_backend_is_metal(ggml_backend_t backend); + +GGML_API GGML_CALL ggml_backend_buffer_t ggml_backend_metal_buffer_from_ptr(void * data, size_t size, size_t max_size); + +GGML_API void ggml_backend_metal_set_abort_callback(ggml_backend_t backend, ggml_abort_callback abort_callback, void * user_data); + +GGML_API GGML_CALL ggml_backend_buffer_type_t ggml_backend_metal_buffer_type(void); + +// helper to check if the device supports a specific family +// ideally, the user code should be doing these checks +// ref: https://developer.apple.com/metal/Metal-Feature-Set-Tables.pdf +GGML_API bool ggml_backend_metal_supports_family(ggml_backend_t backend, int family); + +// capture all command buffers committed the next time `ggml_backend_graph_compute` is called +GGML_API void ggml_backend_metal_capture_next_compute(ggml_backend_t backend); + +#ifdef __cplusplus +} +#endif diff --git a/ml/backend/ggml/ggml-metal.metal b/ml/backend/ggml/ggml-metal.metal new file mode 100644 index 000000000..72df3e49f --- /dev/null +++ b/ml/backend/ggml/ggml-metal.metal @@ -0,0 +1,6445 @@ +/** + * llama.cpp - commit 3f1ae2e32cde00c39b96be6d01c2997c29bae555 - do not edit this file + * + * MIT License + * + * Copyright (c) 2023-2024 The ggml authors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +#define GGML_COMMON_DECL_METAL +#define GGML_COMMON_IMPL_METAL +#include "ggml-common.h" + +#include + +using namespace metal; + +#define MAX(x, y) ((x) > (y) ? (x) : (y)) +#define MIN(x, y) ((x) < (y) ? (x) : (y)) +#define SWAP(x, y) { auto tmp = (x); (x) = (y); (y) = tmp; } + +#define N_SIMDWIDTH 32 // assuming SIMD group size is 32 + +enum ggml_sort_order { + GGML_SORT_ORDER_ASC, + GGML_SORT_ORDER_DESC, +}; + +// general-purpose kernel for addition, subtraction, multiplication and division of two tensors +// pros: works for non-contiguous tensors, supports broadcast across all dims +// cons: not very efficient +kernel void kernel_add( + device const char * src0, + device const char * src1, + device char * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + constant int64_t & offs, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t i03 = tgpig.z; + const int64_t i02 = tgpig.y; + const int64_t i01 = tgpig.x; + + const int64_t i13 = i03 % ne13; + const int64_t i12 = i02 % ne12; + const int64_t i11 = i01 % ne11; + + device const char * src0_ptr = src0 + i03*nb03 + i02*nb02 + i01*nb01 + offs; + device const char * src1_ptr = src1 + i13*nb13 + i12*nb12 + i11*nb11; + device char * dst_ptr = dst + i03*nb3 + i02*nb2 + i01*nb1 + offs; + + for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { + const int i10 = i0 % ne10; + *((device float *)(dst_ptr + i0*nb0)) = *((device float *)(src0_ptr + i0*nb00)) + *((device float *)(src1_ptr + i10*nb10)); + } +} + +kernel void kernel_sub( + device const char * src0, + device const char * src1, + device char * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + constant int64_t & offs, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t i03 = tgpig.z; + const int64_t i02 = tgpig.y; + const int64_t i01 = tgpig.x; + + const int64_t i13 = i03 % ne13; + const int64_t i12 = i02 % ne12; + const int64_t i11 = i01 % ne11; + + device const char * src0_ptr = src0 + i03*nb03 + i02*nb02 + i01*nb01 + offs; + device const char * src1_ptr = src1 + i13*nb13 + i12*nb12 + i11*nb11; + device char * dst_ptr = dst + i03*nb3 + i02*nb2 + i01*nb1 + offs; + + for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { + const int i10 = i0 % ne10; + *((device float *)(dst_ptr + i0*nb0)) = *((device float *)(src0_ptr + i0*nb00)) - *((device float *)(src1_ptr + i10*nb10)); + } +} + +kernel void kernel_mul( + device const char * src0, + device const char * src1, + device char * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t i03 = tgpig.z; + const int64_t i02 = tgpig.y; + const int64_t i01 = tgpig.x; + + const int64_t i13 = i03 % ne13; + const int64_t i12 = i02 % ne12; + const int64_t i11 = i01 % ne11; + + device const char * src0_ptr = src0 + i03*nb03 + i02*nb02 + i01*nb01; + device const char * src1_ptr = src1 + i13*nb13 + i12*nb12 + i11*nb11; + device char * dst_ptr = dst + i03*nb3 + i02*nb2 + i01*nb1; + + for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { + const int i10 = i0 % ne10; + *((device float *)(dst_ptr + i0*nb0)) = *((device float *)(src0_ptr + i0*nb00)) * *((device float *)(src1_ptr + i10*nb10)); + } +} + +kernel void kernel_div( + device const char * src0, + device const char * src1, + device char * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t i03 = tgpig.z; + const int64_t i02 = tgpig.y; + const int64_t i01 = tgpig.x; + + const int64_t i13 = i03 % ne13; + const int64_t i12 = i02 % ne12; + const int64_t i11 = i01 % ne11; + + device const char * src0_ptr = src0 + i03*nb03 + i02*nb02 + i01*nb01; + device const char * src1_ptr = src1 + i13*nb13 + i12*nb12 + i11*nb11; + device char * dst_ptr = dst + i03*nb3 + i02*nb2 + i01*nb1; + + for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { + const int i10 = i0 % ne10; + *((device float *)(dst_ptr + i0*nb0)) = *((device float *)(src0_ptr + i0*nb00)) / *((device float *)(src1_ptr + i10*nb10)); + } +} + +template +kernel void kernel_repeat( + device const char * src0, + device char * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t i3 = tgpig.z; + const int64_t i2 = tgpig.y; + const int64_t i1 = tgpig.x; + + const int64_t i03 = i3 % ne03; + const int64_t i02 = i2 % ne02; + const int64_t i01 = i1 % ne01; + + device const char * src0_ptr = src0 + i03*nb03 + i02*nb02 + i01*nb01; + device char * dst_ptr = dst + i3*nb3 + i2*nb2 + i1*nb1 ; + + for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { + const int i00 = i0 % ne00; + *((device T *)(dst_ptr + i0*nb0)) = *((device T *)(src0_ptr + i00*nb00)); + } +} + +typedef decltype(kernel_repeat) kernel_repeat_t; + +template [[host_name("kernel_repeat_f32")]] kernel kernel_repeat_t kernel_repeat; +template [[host_name("kernel_repeat_f16")]] kernel kernel_repeat_t kernel_repeat; +template [[host_name("kernel_repeat_i32")]] kernel kernel_repeat_t kernel_repeat; +template [[host_name("kernel_repeat_i16")]] kernel kernel_repeat_t kernel_repeat; + +// assumption: src1 is a row +// broadcast src1 into src0 +kernel void kernel_add_row( + device const float4 * src0, + device const float4 * src1, + device float4 * dst, + constant uint64_t & nb [[buffer(28)]], + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = src0[tpig] + src1[tpig % nb]; +} + +kernel void kernel_sub_row( + device const float4 * src0, + device const float4 * src1, + device float4 * dst, + constant uint64_t & nb [[buffer(28)]], + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = src0[tpig] - src1[tpig % nb]; +} + +kernel void kernel_mul_row( + device const float4 * src0, + device const float4 * src1, + device float4 * dst, + constant uint64_t & nb [[buffer(28)]], + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = src0[tpig] * src1[tpig % nb]; +} + +kernel void kernel_div_row( + device const float4 * src0, + device const float4 * src1, + device float4 * dst, + constant uint64_t & nb [[buffer(28)]], + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = src0[tpig] / src1[tpig % nb]; +} + +kernel void kernel_scale( + device const float * src0, + device float * dst, + constant float & scale, + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = src0[tpig] * scale; +} + +kernel void kernel_scale_4( + device const float4 * src0, + device float4 * dst, + constant float & scale, + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = src0[tpig] * scale; +} + +kernel void kernel_clamp( + device const float * src0, + device float * dst, + constant float & min, + constant float & max, + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = src0[tpig] < min ? min : (src0[tpig] > max ? max : src0[tpig]); +} + +kernel void kernel_relu( + device const float * src0, + device float * dst, + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = max(0.0f, src0[tpig]); +} + +kernel void kernel_sigmoid( + device const float * src0, + device float * dst, + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = 1.0f / (1.0f + exp(-src0[tpig])); +} + +kernel void kernel_tanh( + device const float * src0, + device float * dst, + uint tpig[[thread_position_in_grid]]) { + device const float & x = src0[tpig]; + dst[tpig] = precise::tanh(x); +} + +constant float GELU_COEF_A = 0.044715f; +constant float GELU_QUICK_COEF = -1.702f; +constant float SQRT_2_OVER_PI = 0.79788456080286535587989211986876f; + +kernel void kernel_gelu( + device const float * src0, + device float * dst, + uint tpig[[thread_position_in_grid]]) { + device const float & x = src0[tpig]; + + dst[tpig] = 0.5f*x*(1.0f + precise::tanh(SQRT_2_OVER_PI*x*(1.0f + GELU_COEF_A*x*x))); +} + +kernel void kernel_gelu_4( + device const float4 * src0, + device float4 * dst, + uint tpig[[thread_position_in_grid]]) { + device const float4 & x = src0[tpig]; + + // BEWARE !!! + // Simply using "tanh" instead of "precise::tanh" will sometimes results in NaNs! + // This was observed with Falcon 7B and 40B models + // + dst[tpig] = 0.5f*x*(1.0f + precise::tanh(SQRT_2_OVER_PI*x*(1.0f + GELU_COEF_A*x*x))); +} + +kernel void kernel_gelu_quick( + device const float * src0, + device float * dst, + uint tpig[[thread_position_in_grid]]) { + device const float & x = src0[tpig]; + + dst[tpig] = x*(1.0f/(1.0f+exp(GELU_QUICK_COEF*x))); +} + +kernel void kernel_gelu_quick_4( + device const float4 * src0, + device float4 * dst, + uint tpig[[thread_position_in_grid]]) { + device const float4 & x = src0[tpig]; + + dst[tpig] = x*(1.0f/(1.0f+exp(GELU_QUICK_COEF*x))); +} + +kernel void kernel_silu( + device const float * src0, + device float * dst, + uint tpig[[thread_position_in_grid]]) { + device const float & x = src0[tpig]; + dst[tpig] = x / (1.0f + exp(-x)); +} + +kernel void kernel_silu_4( + device const float4 * src0, + device float4 * dst, + uint tpig[[thread_position_in_grid]]) { + device const float4 & x = src0[tpig]; + dst[tpig] = x / (1.0f + exp(-x)); +} + +kernel void kernel_sqr( + device const float * src0, + device float * dst, + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = src0[tpig] * src0[tpig]; +} + +kernel void kernel_sqrt( + device const float * src0, + device float * dst, + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = sqrt(src0[tpig]); +} + +kernel void kernel_sin( + device const float * src0, + device float * dst, + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = sin(src0[tpig]); +} + +kernel void kernel_cos( + device const float * src0, + device float * dst, + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = cos(src0[tpig]); +} + +kernel void kernel_sum_rows( + device const float * src0, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tpig[[thread_position_in_grid]]) { + int64_t i3 = tpig.z; + int64_t i2 = tpig.y; + int64_t i1 = tpig.x; + + if (i3 >= ne03 || i2 >= ne02 || i1 >= ne01) { + return; + } + + device const float * src_row = (device const float *) ((device const char *) src0 + i1*nb01 + i2*nb02 + i3*nb03); + device float * dst_row = (device float *) ((device char *) dst + i1*nb1 + i2*nb2 + i3*nb3); + + float row_sum = 0; + + for (int64_t i0 = 0; i0 < ne00; i0++) { + row_sum += src_row[i0]; + } + + dst_row[0] = row_sum; +} + +template +kernel void kernel_soft_max( + device const char * src0, + device const char * src1, + device char * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant float & scale, + constant float & max_bias, + constant float & m0, + constant float & m1, + constant uint32_t & n_head_log2, + threadgroup float * buf [[threadgroup(0)]], + uint tgpig[[threadgroup_position_in_grid]], + uint tpitg[[thread_position_in_threadgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint ntg[[threads_per_threadgroup]]) { + const int64_t i03 = (tgpig) / (ne02*ne01); + const int64_t i02 = (tgpig - i03*ne02*ne01) / ne01; + const int64_t i01 = (tgpig - i03*ne02*ne01 - i02*ne01); + + device const float * psrc0 = (device const float *) src0 + (i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00); + device const T * pmask = src1 != src0 ? (device const T *) src1 + i01*ne00 : nullptr; + device float * pdst = (device float *) dst + (i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00); + + float slope = 1.0f; + + // ALiBi + if (max_bias > 0.0f) { + const int64_t h = i02; + + const float base = h < n_head_log2 ? m0 : m1; + const int exp = h < n_head_log2 ? h + 1 : 2*(h - n_head_log2) + 1; + + slope = pow(base, exp); + } + + // parallel max + float lmax = -INFINITY; + + for (int i00 = tpitg; i00 < ne00; i00 += ntg) { + lmax = MAX(lmax, psrc0[i00]*scale + (pmask ? slope*pmask[i00] : 0.0f)); + } + + // find the max value in the block + float max_val = simd_max(lmax); + if (ntg > N_SIMDWIDTH) { + if (sgitg == 0) { + buf[tiisg] = -INFINITY; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + if (tiisg == 0) { + buf[sgitg] = max_val; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + max_val = buf[tiisg]; + max_val = simd_max(max_val); + } + + // parallel sum + float lsum = 0.0f; + for (int i00 = tpitg; i00 < ne00; i00 += ntg) { + const float exp_psrc0 = exp((psrc0[i00]*scale + (pmask ? slope*pmask[i00] : 0.0f)) - max_val); + lsum += exp_psrc0; + pdst[i00] = exp_psrc0; + } + + // This barrier fixes a failing test + // ref: https://github.com/ggerganov/ggml/pull/621#discussion_r1425156335 + threadgroup_barrier(mem_flags::mem_none); + + float sum = simd_sum(lsum); + + if (ntg > N_SIMDWIDTH) { + if (sgitg == 0) { + buf[tiisg] = 0.0f; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + if (tiisg == 0) { + buf[sgitg] = sum; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + sum = buf[tiisg]; + sum = simd_sum(sum); + } + + const float inv_sum = 1.0f/sum; + + for (int i00 = tpitg; i00 < ne00; i00 += ntg) { + pdst[i00] *= inv_sum; + } +} + +template +kernel void kernel_soft_max_4( + device const char * src0, + device const char * src1, + device char * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant float & scale, + constant float & max_bias, + constant float & m0, + constant float & m1, + constant uint32_t & n_head_log2, + threadgroup float * buf [[threadgroup(0)]], + uint tgpig[[threadgroup_position_in_grid]], + uint tpitg[[thread_position_in_threadgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint ntg[[threads_per_threadgroup]]) { + const int64_t i03 = (tgpig) / (ne02*ne01); + const int64_t i02 = (tgpig - i03*ne02*ne01) / ne01; + const int64_t i01 = (tgpig - i03*ne02*ne01 - i02*ne01); + + device const float4 * psrc4 = (device const float4 *) src0 + (i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00)/4; + device const T * pmask = src1 != src0 ? (device const T *) src1 + i01*ne00/4 : nullptr; + device float4 * pdst4 = (device float4 *) dst + (i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00)/4; + + float slope = 1.0f; + + if (max_bias > 0.0f) { + const int64_t h = i02; + + const float base = h < n_head_log2 ? m0 : m1; + const int exp = h < n_head_log2 ? h + 1 : 2*(h - n_head_log2) + 1; + + slope = pow(base, exp); + } + + // parallel max + float4 lmax4 = -INFINITY; + + for (int i00 = tpitg; i00 < ne00/4; i00 += ntg) { + lmax4 = fmax(lmax4, psrc4[i00]*scale + (float4)((pmask ? slope*pmask[i00] : 0.0f))); + } + + const float lmax = MAX(MAX(lmax4[0], lmax4[1]), MAX(lmax4[2], lmax4[3])); + + float max_val = simd_max(lmax); + if (ntg > N_SIMDWIDTH) { + if (sgitg == 0) { + buf[tiisg] = -INFINITY; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + if (tiisg == 0) { + buf[sgitg] = max_val; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + max_val = buf[tiisg]; + max_val = simd_max(max_val); + } + + // parallel sum + float4 lsum4 = 0.0f; + for (int i00 = tpitg; i00 < ne00/4; i00 += ntg) { + const float4 exp_psrc4 = exp((psrc4[i00]*scale + (float4)((pmask ? slope*pmask[i00] : 0.0f))) - max_val); + lsum4 += exp_psrc4; + pdst4[i00] = exp_psrc4; + } + + const float lsum = lsum4[0] + lsum4[1] + lsum4[2] + lsum4[3]; + + // This barrier fixes a failing test + // ref: https://github.com/ggerganov/ggml/pull/621#discussion_r1425156335 + threadgroup_barrier(mem_flags::mem_none); + + float sum = simd_sum(lsum); + + if (ntg > N_SIMDWIDTH) { + if (sgitg == 0) { + buf[tiisg] = 0.0f; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + if (tiisg == 0) { + buf[sgitg] = sum; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + sum = buf[tiisg]; + sum = simd_sum(sum); + } + + const float inv_sum = 1.0f/sum; + + for (int i00 = tpitg; i00 < ne00/4; i00 += ntg) { + pdst4[i00] *= inv_sum; + } +} + +typedef decltype(kernel_soft_max) kernel_soft_max_t; +typedef decltype(kernel_soft_max_4) kernel_soft_max_4_t; + +template [[host_name("kernel_soft_max_f16")]] kernel kernel_soft_max_t kernel_soft_max; +template [[host_name("kernel_soft_max_f32")]] kernel kernel_soft_max_t kernel_soft_max; +template [[host_name("kernel_soft_max_f16_4")]] kernel kernel_soft_max_4_t kernel_soft_max_4; +template [[host_name("kernel_soft_max_f32_4")]] kernel kernel_soft_max_4_t kernel_soft_max_4; + +kernel void kernel_diag_mask_inf( + device const float * src0, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int & n_past, + uint3 tpig[[thread_position_in_grid]]) { + const int64_t i02 = tpig[2]; + const int64_t i01 = tpig[1]; + const int64_t i00 = tpig[0]; + + if (i00 > n_past + i01) { + dst[i02*ne01*ne00 + i01*ne00 + i00] = -INFINITY; + } else { + dst[i02*ne01*ne00 + i01*ne00 + i00] = src0[i02*ne01*ne00 + i01*ne00 + i00]; + } +} + +kernel void kernel_diag_mask_inf_8( + device const float4 * src0, + device float4 * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int & n_past, + uint3 tpig[[thread_position_in_grid]]) { + + const int64_t i = 2*tpig[0]; + + dst[i+0] = src0[i+0]; + dst[i+1] = src0[i+1]; + int64_t i4 = 4*i; + const int64_t i02 = i4/(ne00*ne01); i4 -= i02*ne00*ne01; + const int64_t i01 = i4/(ne00); i4 -= i01*ne00; + const int64_t i00 = i4; + for (int k = 3; k >= 0; --k) { + if (i00 + 4 + k <= n_past + i01) { + break; + } + dst[i+1][k] = -INFINITY; + if (i00 + k > n_past + i01) { + dst[i][k] = -INFINITY; + } + } +} + +// ref: ggml.c:ggml_compute_forward_ssm_conv_f32 +// TODO: optimize +kernel void kernel_ssm_conv_f32( + device const void * src0, + device const void * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t ir = tgpig.x; + const int64_t i2 = tgpig.y; + const int64_t i3 = tgpig.z; + + const int64_t nc = ne10; + const int64_t ncs = ne00; + const int64_t nr = ne01; + const int64_t n_t = ne1; + const int64_t n_s = ne2; + + device const float * s = (device const float *) ((device const char *) src0 + ir*nb01 + i2*nb00 + i3*nb02); + device const float * c = (device const float *) ((device const char *) src1 + ir*nb11); + device float * x = (device float *) ((device char *) dst + ir*nb0 + i2*nb1 + i3*nb2); + + float sumf = 0.0f; + + for (int64_t i0 = 0; i0 < nc; ++i0) { + sumf += s[i0] * c[i0]; + } + + x[0] = sumf; +} + +// ref: ggml.c:ggml_compute_forward_ssm_scan_f32 +// TODO: optimize +kernel void kernel_ssm_scan_f32( + device const void * src0, + device const void * src1, + device const void * src2, + device const void * src3, + device const void * src4, + device const void * src5, + device float * dst, + constant int64_t & d_state, + constant int64_t & d_inner, + constant int64_t & n_seq_tokens, + constant int64_t & n_seqs, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, + constant uint64_t & nb20, + constant uint64_t & nb21, + constant uint64_t & nb22, + constant uint64_t & nb30, + constant uint64_t & nb31, + constant uint64_t & nb40, + constant uint64_t & nb41, + constant uint64_t & nb42, + constant uint64_t & nb50, + constant uint64_t & nb51, + constant uint64_t & nb52, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t ir = tgpig.x; + const int64_t i3 = tgpig.y; + + const int64_t nc = d_state; + const int64_t nr = d_inner; + const int64_t n_t = n_seq_tokens; + const int64_t n_s = n_seqs; + + for (int64_t i2 = 0; i2 < n_t; ++i2) { + device const float * s0 = (device const float *) ((device const char *) src0 + ir*nb01 + i3*nb02); + device const float * x = (device const float *) ((device const char *) src1 + ir*nb10 + i2*nb11 + i3*nb12); + device const float * dt = (device const float *) ((device const char *) src2 + ir*nb20 + i2*nb21 + i3*nb22); + device const float * A = (device const float *) ((device const char *) src3 + ir*nb31); + device const float * B = (device const float *) ((device const char *) src4 + i2*nb41 + i3*nb42); + device const float * C = (device const float *) ((device const char *) src5 + i2*nb51 + i3*nb52); + device float * y = (device float *) ((device char *) dst + ir*nb10 + i2*nb11 + i3*nb12); // TODO: do not use src1 strides + device float * s = (device float *) ((device char *) dst + ir*nb01 + i3*nb02 + nb13); + + if (i2 > 0) { + s0 = s; + } + + // i1 == 0 + float dt_soft_plus = dt[0] <= 20.0f ? log(1.0f + exp(dt[0])) : dt[0]; + float x_dt = x[0] * dt_soft_plus; + float sumf = 0.0f; + + for (int64_t i0 = 0; i0 < nc; ++i0) { + int64_t i = i0; + float state = (s0[i] * exp(dt_soft_plus * A[i])) + (B[i0] * x_dt); + sumf += state * C[i0]; + s[i] = state; + } + + y[0] = sumf; + } +} + +kernel void kernel_norm( + device const void * src0, + device float * dst, + constant int64_t & ne00, + constant uint64_t & nb01, + constant float & eps, + threadgroup float * sum [[threadgroup(0)]], + uint tgpig[[threadgroup_position_in_grid]], + uint tpitg[[thread_position_in_threadgroup]], + uint ntg[[threads_per_threadgroup]]) { + device const float * x = (device const float *) ((device const char *) src0 + tgpig*nb01); + // MEAN + // parallel sum + sum[tpitg] = 0.0f; + for (int i00 = tpitg; i00 < ne00; i00 += ntg) { + sum[tpitg] += x[i00]; + } + // reduce + threadgroup_barrier(mem_flags::mem_threadgroup); + for (uint i = ntg/2; i > 0; i /= 2) { + if (tpitg < i) { + sum[tpitg] += sum[tpitg + i]; + } + threadgroup_barrier(mem_flags::mem_threadgroup); + } + const float mean = sum[0] / ne00; + + // recenter and VARIANCE + threadgroup_barrier(mem_flags::mem_threadgroup); + device float * y = dst + tgpig*ne00; + sum[tpitg] = 0.0f; + for (int i00 = tpitg; i00 < ne00; i00 += ntg) { + y[i00] = x[i00] - mean; + sum[tpitg] += y[i00] * y[i00]; + } + + // reduce + threadgroup_barrier(mem_flags::mem_threadgroup); + for (uint i = ntg/2; i > 0; i /= 2) { + if (tpitg < i) { + sum[tpitg] += sum[tpitg + i]; + } + threadgroup_barrier(mem_flags::mem_threadgroup); + } + const float variance = sum[0] / ne00; + + const float scale = 1.0f/sqrt(variance + eps); + for (int i00 = tpitg; i00 < ne00; i00 += ntg) { + y[i00] = y[i00] * scale; + } +} + +kernel void kernel_rms_norm( + device const void * src0, + device float * dst, + constant int64_t & ne00, + constant uint64_t & nb01, + constant float & eps, + threadgroup float * buf [[threadgroup(0)]], + uint tgpig[[threadgroup_position_in_grid]], + uint tpitg[[thread_position_in_threadgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint ntg[[threads_per_threadgroup]]) { + device const float4 * x = (device const float4 *) ((device const char *) src0 + tgpig*nb01); + + float4 sumf = 0; + float all_sum = 0; + + // parallel sum + for (int i00 = tpitg; i00 < ne00/4; i00 += ntg) { + sumf += x[i00] * x[i00]; + } + all_sum = sumf[0] + sumf[1] + sumf[2] + sumf[3]; + all_sum = simd_sum(all_sum); + if (ntg > N_SIMDWIDTH) { + if (sgitg == 0) { + buf[tiisg] = 0.0f; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + if (tiisg == 0) { + buf[sgitg] = all_sum; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + all_sum = buf[tiisg]; + all_sum = simd_sum(all_sum); + } + + const float mean = all_sum/ne00; + const float scale = 1.0f/sqrt(mean + eps); + + device float4 * y = (device float4 *) (dst + tgpig*ne00); + for (int i00 = tpitg; i00 < ne00/4; i00 += ntg) { + y[i00] = x[i00] * scale; + } +} + +kernel void kernel_group_norm( + device const float * src0, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int32_t & n_groups, + constant float & eps, + threadgroup float * buf [[threadgroup(0)]], + uint tgpig[[threadgroup_position_in_grid]], + uint tpitg[[thread_position_in_threadgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint ntg[[threads_per_threadgroup]]) { + const int64_t ne = ne00*ne01*ne02; + const int64_t gs = ne00*ne01*((ne02 + n_groups - 1) / n_groups); + + int start = tgpig * gs; + int end = start + gs; + + start += tpitg; + + if (end >= ne) { + end = ne; + } + + float tmp = 0.0f; // partial sum for thread in warp + + for (int j = start; j < end; j += ntg) { + tmp += src0[j]; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + tmp = simd_sum(tmp); + if (ntg > N_SIMDWIDTH) { + if (sgitg == 0) { + buf[tiisg] = 0.0f; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + if (tiisg == 0) { + buf[sgitg] = tmp; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + tmp = buf[tiisg]; + tmp = simd_sum(tmp); + } + + const float mean = tmp / gs; + tmp = 0.0f; + + for (int j = start; j < end; j += ntg) { + float xi = src0[j] - mean; + dst[j] = xi; + tmp += xi * xi; + } + + tmp = simd_sum(tmp); + if (ntg > N_SIMDWIDTH) { + if (sgitg == 0) { + buf[tiisg] = 0.0f; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + if (tiisg == 0) { + buf[sgitg] = tmp; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + tmp = buf[tiisg]; + tmp = simd_sum(tmp); + } + + const float variance = tmp / gs; + const float scale = 1.0f/sqrt(variance + eps); + for (int j = start; j < end; j += ntg) { + dst[j] *= scale; + } +} + +// function for calculate inner product between half a q4_0 block and 16 floats (yl), sumy is SUM(yl[i]) +// il indicates where the q4 quants begin (0 or QK4_0/4) +// we assume that the yl's have been multiplied with the appropriate scale factor +// that corresponds to the missing bit shifts (1, 1/16, 1/256, 1/4096) +inline float block_q_n_dot_y(device const block_q4_0 * qb_curr, float sumy, thread float * yl, int il) { + float d = qb_curr->d; + + float2 acc = 0.f; + + device const uint16_t * qs = ((device const uint16_t *)qb_curr + 1 + il/2); + + for (int i = 0; i < 8; i+=2) { + acc[0] += yl[i + 0] * (qs[i / 2] & 0x000F) + + yl[i + 1] * (qs[i / 2] & 0x0F00); + acc[1] += yl[i + 8] * (qs[i / 2] & 0x00F0) + + yl[i + 9] * (qs[i / 2] & 0xF000); + } + return d * (sumy * -8.f + acc[0] + acc[1]); +} + +// function for calculate inner product between half a q4_1 block and 16 floats (yl), sumy is SUM(yl[i]) +// il indicates where the q4 quants begin (0 or QK4_0/4) +// we assume that the yl's have been multiplied with the appropriate scale factor +// that corresponds to the missing bit shifts (1, 1/16, 1/256, 1/4096) +inline float block_q_n_dot_y(device const block_q4_1 * qb_curr, float sumy, thread float * yl, int il) { + float d = qb_curr->d; + float m = qb_curr->m; + + float2 acc = 0.f; + + device const uint16_t * qs = ((device const uint16_t *)qb_curr + 2 + il/2); + + for (int i = 0; i < 8; i+=2) { + acc[0] += yl[i + 0] * (qs[i / 2] & 0x000F) + + yl[i + 1] * (qs[i / 2] & 0x0F00); + acc[1] += yl[i + 8] * (qs[i / 2] & 0x00F0) + + yl[i + 9] * (qs[i / 2] & 0xF000); + } + return d * (acc[0] + acc[1]) + sumy * m; +} + +// function for calculate inner product between half a q5_0 block and 16 floats (yl), sumy is SUM(yl[i]) +// il indicates where the q5 quants begin (0 or QK5_0/4) +// we assume that the yl's have been multiplied with the appropriate scale factor +// that corresponds to the missing bit shifts (1, 1/16, 1/256, 1/4096) +inline float block_q_n_dot_y(device const block_q5_0 * qb_curr, float sumy, thread float * yl, int il) { + float d = qb_curr->d; + + float2 acc = 0.f; + + device const uint16_t * qs = ((device const uint16_t *)qb_curr + 3 + il/2); + const uint32_t qh = *((device const uint32_t *)qb_curr->qh); + + for (int i = 0; i < 8; i+=2) { + acc[0] += yl[i + 0] * ((qs[i / 2] & 0x000F) | ((qh >> (i+0+il ) << 4 ) & 0x00010)) + + yl[i + 1] * ((qs[i / 2] & 0x0F00) | ((qh >> (i+1+il ) << 12) & 0x01000)); + acc[1] += yl[i + 8] * ((qs[i / 2] & 0x00F0) | ((qh >> (i+0+il+QK5_0/2) << 8 ) & 0x00100)) + + yl[i + 9] * ((qs[i / 2] & 0xF000) | ((qh >> (i+1+il+QK5_0/2) << 16) & 0x10000)); + } + return d * (sumy * -16.f + acc[0] + acc[1]); +} + +// function for calculate inner product between half a q5_1 block and 16 floats (yl), sumy is SUM(yl[i]) +// il indicates where the q5 quants begin (0 or QK5_1/4) +// we assume that the yl's have been multiplied with the appropriate scale factor +// that corresponds to the missing bit shifts (1, 1/16, 1/256, 1/4096) +inline float block_q_n_dot_y(device const block_q5_1 * qb_curr, float sumy, thread float * yl, int il) { + float d = qb_curr->d; + float m = qb_curr->m; + + float2 acc = 0.f; + + device const uint16_t * qs = ((device const uint16_t *)qb_curr + 4 + il/2); + const uint32_t qh = *((device const uint32_t *)qb_curr->qh); + + for (int i = 0; i < 8; i+=2) { + acc[0] += yl[i + 0] * ((qs[i / 2] & 0x000F) | ((qh >> (i+0+il ) << 4 ) & 0x00010)) + + yl[i + 1] * ((qs[i / 2] & 0x0F00) | ((qh >> (i+1+il ) << 12) & 0x01000)); + acc[1] += yl[i + 8] * ((qs[i / 2] & 0x00F0) | ((qh >> (i+0+il+QK5_0/2) << 8 ) & 0x00100)) + + yl[i + 9] * ((qs[i / 2] & 0xF000) | ((qh >> (i+1+il+QK5_0/2) << 16) & 0x10000)); + } + return d * (acc[0] + acc[1]) + sumy * m; +} + +// putting them in the kernel cause a significant performance penalty +#define N_DST 4 // each SIMD group works on 4 rows +#define N_SIMDGROUP 2 // number of SIMD groups in a thread group +//Note: This is a template, but strictly speaking it only applies to +// quantizations where the block size is 32. It also does not +// guard against the number of rows not being divisible by +// N_DST, so this is another explicit assumption of the implementation. +template +void mul_vec_q_n_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_values, + uint3 tgpig, uint tiisg, uint sgitg) { + const int nb = ne00/QK4_0; + + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * nsg + sgitg) * nr; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = first_row * nb + (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_q_type * x = (device const block_q_type *) src0 + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[16]; // src1 vector cache + float sumf[nr] = {0.f}; + + const int ix = (tiisg/2); + const int il = (tiisg%2)*8; + + device const float * yb = y + ix * QK4_0 + il; + + // each thread in a SIMD group deals with half a block. + for (int ib = ix; ib < nb; ib += nw/2) { + float sumy = 0; + for (int i = 0; i < 8; i += 2) { + sumy += yb[i] + yb[i+1]; + yl[i+0] = yb[i+ 0]; + yl[i+1] = yb[i+ 1]/256.f; + + sumy += yb[i+16] + yb[i+17]; + yl[i+8] = yb[i+16]/16.f; + yl[i+9] = yb[i+17]/4096.f; + } + + for (int row = 0; row < nr; row++) { + sumf[row] += block_q_n_dot_y(x+ib+row*nb, sumy, yl, il); + } + + yb += QK4_0 * 16; + } + + for (int row = 0; row < nr; ++row) { + const float tot = simd_sum(sumf[row]); + if (tiisg == 0 && first_row + row < ne01) { + dst[im*ne0*ne1 + r1*ne0 + first_row + row] = tot; + } + } +} + +kernel void kernel_mul_mv_q4_0_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + mul_vec_q_n_f32_impl(src0,src1,dst,ne00,ne01,ne02,ne10,ne12,ne0,ne1,r2,r3,nullptr,tgpig,tiisg,sgitg); +} + +kernel void kernel_mul_mv_q4_1_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + mul_vec_q_n_f32_impl(src0,src1,dst,ne00,ne01,ne02,ne10,ne12,ne0,ne1,r2,r3,nullptr,tgpig,tiisg,sgitg); +} + +kernel void kernel_mul_mv_q5_0_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + mul_vec_q_n_f32_impl(src0,src1,dst,ne00,ne01,ne02,ne10,ne12,ne0,ne1,r2,r3,nullptr,tgpig,tiisg,sgitg); +} + +kernel void kernel_mul_mv_q5_1_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + mul_vec_q_n_f32_impl(src0,src1,dst,ne00,ne01,ne02,ne10,ne12,ne0,ne1,r2,r3,nullptr,tgpig,tiisg,sgitg); +} + + +#define NB_Q8_0 8 + +void kernel_mul_mv_q8_0_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_values, + uint3 tgpig, + uint tiisg, + uint sgitg) { + const int nr = N_DST; + const int nsg = N_SIMDGROUP; + const int nw = N_SIMDWIDTH; + + const int nb = ne00/QK8_0; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * nsg + sgitg) * nr; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = first_row * nb + (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_q8_0 * x = (device const block_q8_0 *) src0 + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[NB_Q8_0]; + float sumf[nr]={0.f}; + + const int ix = tiisg/4; + const int il = tiisg%4; + + device const float * yb = y + ix * QK8_0 + NB_Q8_0*il; + + // each thread in a SIMD group deals with NB_Q8_0 quants at a time + for (int ib = ix; ib < nb; ib += nw/4) { + for (int i = 0; i < NB_Q8_0; ++i) { + yl[i] = yb[i]; + } + + for (int row = 0; row < nr; row++) { + device const int8_t * qs = x[ib+row*nb].qs + NB_Q8_0*il; + float sumq = 0.f; + for (int iq = 0; iq < NB_Q8_0; ++iq) { + sumq += qs[iq] * yl[iq]; + } + sumf[row] += sumq*x[ib+row*nb].d; + } + + yb += NB_Q8_0 * nw; + } + + for (int row = 0; row < nr; ++row) { + const float tot = simd_sum(sumf[row]); + if (tiisg == 0 && first_row + row < ne01) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = tot; + } + } +} + +[[host_name("kernel_mul_mv_q8_0_f32")]] +kernel void kernel_mul_mv_q8_0_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + kernel_mul_mv_q8_0_f32_impl(src0,src1,dst,ne00,ne01,ne02,ne10,ne12,ne0,ne1,r2,r3,nullptr,tgpig,tiisg,sgitg); +} + +#define N_MV_T_T 4 + +template +void kernel_mul_mv_impl( + device const char * src0, + device const char * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + uint64_t nb00, + uint64_t nb01, + uint64_t nb02, + int64_t ne10, + int64_t ne11, + int64_t ne12, + uint64_t nb10, + uint64_t nb11, + uint64_t nb12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + uint3 tgpig, + uint tiisg) { + const int64_t r0 = tgpig.x; + const int64_t rb = tgpig.y*N_MV_T_T; + const int64_t im = tgpig.z; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = r0*nb01 + (i12/r2)*nb02 + (i13/r3)*nb02*ne02; + + device const T0 * x = (device const T0 *) (src0 + offset0); + + if (ne00 < 128) { + for (int row = 0; row < N_MV_T_T; ++row) { + int r1 = rb + row; + if (r1 >= ne11) { + break; + } + + device const T1 * y = (device const T1 *) (src1 + r1*nb11 + im*nb12); + + float sumf = 0; + for (int i = tiisg; i < ne00; i += 32) { + sumf += (T0) x[i] * (T1) y[i]; + } + + float all_sum = simd_sum(sumf); + if (tiisg == 0) { + dst[im*ne1*ne0 + r1*ne0 + r0] = all_sum; + } + } + } else { + device const T04 * x4 = (device const T04 *) x; + for (int row = 0; row < N_MV_T_T; ++row) { + int r1 = rb + row; + if (r1 >= ne11) { + break; + } + + device const T1 * y = (device const T1 *) (src1 + r1*nb11 + im*nb12); + device const T14 * y4 = (device const T14 *) y; + + float sumf = 0; + for (int i = tiisg; i < ne00/4; i += 32) { + for (int k = 0; k < 4; ++k) sumf += (float) (x4[i][k] * y4[i][k]); + } + + float all_sum = simd_sum(sumf); + if (tiisg == 0) { + for (int i = 4*(ne00/4); i < ne00; ++i) all_sum += (float) (x[i] * y[i]); + dst[im*ne1*ne0 + r1*ne0 + r0] = all_sum; + } + } + } +} + +template +kernel void kernel_mul_mv( + device const char * src0, + device const char * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]]) { + kernel_mul_mv_impl( + src0, + src1, + dst, + ne00, + ne01, + ne02, + nb00, + nb01, + nb02, + ne10, + ne11, + ne12, + nb10, + nb11, + nb12, + ne0, + ne1, + r2, + r3, + tgpig, + tiisg); +} + +typedef decltype(kernel_mul_mv) mul_mv_t; + +template [[host_name("kernel_mul_mv_f32_f32")]] kernel mul_mv_t kernel_mul_mv; +template [[host_name("kernel_mul_mv_f16_f32")]] kernel mul_mv_t kernel_mul_mv; +template [[host_name("kernel_mul_mv_f16_f16")]] kernel mul_mv_t kernel_mul_mv; + +template +kernel void kernel_mul_mv_1row( + device const char * src0, + device const char * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]]) { + + const int64_t r0 = tgpig.x; + const int64_t r1 = tgpig.y; + const int64_t im = tgpig.z; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = r0*nb01 + (i12/r2)*nb02 + (i13/r3)*nb02*ne02; + + device const T * x = (device const T *) (src0 + offset0); + device const float * y = (device const float *) (src1 + r1*nb11 + im*nb12); + + float sumf = 0; + if (ne00 < 128) { + for (int i = tiisg; i < ne00; i += 32) { + sumf += (float) x[i] * (float) y[i]; + } + float all_sum = simd_sum(sumf); + if (tiisg == 0) { + dst[im*ne1*ne0 + r1*ne0 + r0] = all_sum; + } + } else { + device const T4 * x4 = (device const T4 *) x; + device const float4 * y4 = (device const float4 *) y; + + for (int i = tiisg; i < ne00/4; i += 32) { + for (int k = 0; k < 4; ++k) sumf += (float) (x4[i][k] * y4[i][k]); + } + + float all_sum = simd_sum(sumf); + + if (tiisg == 0) { + for (int i = 4*(ne00/4); i < ne00; ++i) all_sum += (float) (x[i] * y[i]); + dst[im*ne1*ne0 + r1*ne0 + r0] = all_sum; + } + } +} + +typedef decltype(kernel_mul_mv_1row) mul_mv_1row_t; + +template [[host_name("kernel_mul_mv_f16_f32_1row")]] kernel mul_mv_1row_t kernel_mul_mv_1row; + +// Assumes row size (ne00) is a multiple of 4 +template +kernel void kernel_mul_mv_l4( + device const char * src0, + device const char * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]]) { + + const int nrows = ne11; + const int64_t r0 = tgpig.x; + const int64_t im = tgpig.z; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = r0*nb01 + (i12/r2)*nb02 + (i13/r3)*nb02*ne02; + + device const T4 * x4 = (device const T4 *) (src0 + offset0); + + for (int r1 = 0; r1 < nrows; ++r1) { + device const float4 * y4 = (device const float4 *) (src1 + r1*nb11 + im*nb12); + + float sumf = 0; + for (int i = tiisg; i < ne00/4; i += 32) { + for (int k = 0; k < 4; ++k) sumf += (float) (x4[i][k] * y4[i][k]); + } + + float all_sum = simd_sum(sumf); + if (tiisg == 0) { + dst[im*ne1*ne0 + r1*ne0 + r0] = all_sum; + } + } +} + +typedef decltype(kernel_mul_mv_l4) mul_mv_l4_t; + +template [[host_name("kernel_mul_mv_f16_f32_l4")]] kernel mul_mv_l4_t kernel_mul_mv_l4; + +static float rope_yarn_ramp(const float low, const float high, const int i0) { + const float y = (i0 / 2 - low) / max(0.001f, high - low); + return 1.0f - min(1.0f, max(0.0f, y)); +} + +// YaRN algorithm based on LlamaYaRNScaledRotaryEmbedding.py from https://github.com/jquesnelle/yarn +// MIT licensed. Copyright (c) 2023 Jeffrey Quesnelle and Bowen Peng. +static void rope_yarn( + float theta_extrap, float freq_scale, float corr_dims[2], int64_t i0, float ext_factor, float mscale, + thread float * cos_theta, thread float * sin_theta) { + // Get n-d rotational scaling corrected for extrapolation + float theta_interp = freq_scale * theta_extrap; + float theta = theta_interp; + if (ext_factor != 0.0f) { + float ramp_mix = rope_yarn_ramp(corr_dims[0], corr_dims[1], i0) * ext_factor; + theta = theta_interp * (1 - ramp_mix) + theta_extrap * ramp_mix; + + // Get n-d magnitude scaling corrected for interpolation + mscale *= 1.0f + 0.1f * log(1.0f / freq_scale); + } + *cos_theta = cos(theta) * mscale; + *sin_theta = sin(theta) * mscale; +} + +// Apparently solving `n_rot = 2pi * x * base^((2 * max_pos_emb) / n_dims)` for x, we get +// `corr_fac(n_rot) = n_dims * log(max_pos_emb / (n_rot * 2pi)) / (2 * log(base))` +static float rope_yarn_corr_factor(int n_dims, int n_ctx_orig, float n_rot, float base) { + return n_dims * log(n_ctx_orig / (n_rot * 2 * M_PI_F)) / (2 * log(base)); +} + +static void rope_yarn_corr_dims( + int n_dims, int n_ctx_orig, float freq_base, float beta_fast, float beta_slow, float dims[2] +) { + // start and end correction dims + dims[0] = max(0.0f, floor(rope_yarn_corr_factor(n_dims, n_ctx_orig, beta_fast, freq_base))); + dims[1] = min(n_dims - 1.0f, ceil(rope_yarn_corr_factor(n_dims, n_ctx_orig, beta_slow, freq_base))); +} + +template +kernel void kernel_rope_norm( + device const void * src0, + device const int32_t * src1, + device const float * src2, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + constant int & n_past, + constant int & n_dims, + constant int & n_ctx_orig, + constant float & freq_base, + constant float & freq_scale, + constant float & ext_factor, + constant float & attn_factor, + constant float & beta_fast, + constant float & beta_slow, + uint tiitg[[thread_index_in_threadgroup]], + uint3 tptg[[threads_per_threadgroup]], + uint3 tgpig[[threadgroup_position_in_grid]]) { + const int64_t i3 = tgpig[2]; + const int64_t i2 = tgpig[1]; + const int64_t i1 = tgpig[0]; + + float corr_dims[2]; + rope_yarn_corr_dims(n_dims, n_ctx_orig, freq_base, beta_fast, beta_slow, corr_dims); + + device const int32_t * pos = src1; + + const float theta_base = (float) pos[i2]; + const float inv_ndims = -1.f/n_dims; + + float cos_theta; + float sin_theta; + + for (int64_t i0 = 2*tiitg; i0 < ne0; i0 += 2*tptg.x) { + if (i0 < n_dims) { + const int64_t ic = i0/2; + + const float theta = theta_base * pow(freq_base, inv_ndims*i0); + + const float freq_factor = src2 != src0 ? src2[ic] : 1.0f; + + rope_yarn(theta/freq_factor, freq_scale, corr_dims, i0, ext_factor, attn_factor, &cos_theta, &sin_theta); + + device const T * const src = (device T *)((device char *) src0 + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); + device T * dst_data = (device T *)((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + const float x0 = src[0]; + const float x1 = src[1]; + + dst_data[0] = x0*cos_theta - x1*sin_theta; + dst_data[1] = x0*sin_theta + x1*cos_theta; + } else { + device const T * const src = (device T *)((device char *) src0 + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); + device T * dst_data = (device T *)((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + dst_data[0] = src[0]; + dst_data[1] = src[1]; + } + } +} + +template +kernel void kernel_rope_neox( + device const void * src0, + device const int32_t * src1, + device const float * src2, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + constant int & n_past, + constant int & n_dims, + constant int & n_ctx_orig, + constant float & freq_base, + constant float & freq_scale, + constant float & ext_factor, + constant float & attn_factor, + constant float & beta_fast, + constant float & beta_slow, + uint tiitg[[thread_index_in_threadgroup]], + uint3 tptg[[threads_per_threadgroup]], + uint3 tgpig[[threadgroup_position_in_grid]]) { + const int64_t i3 = tgpig[2]; + const int64_t i2 = tgpig[1]; + const int64_t i1 = tgpig[0]; + + float corr_dims[2]; + rope_yarn_corr_dims(n_dims, n_ctx_orig, freq_base, beta_fast, beta_slow, corr_dims); + + device const int32_t * pos = src1; + + const float theta_base = (float) pos[i2]; + const float inv_ndims = -1.f/n_dims; + + float cos_theta; + float sin_theta; + + for (int64_t i0 = 2*tiitg; i0 < ne0; i0 += 2*tptg.x) { + if (i0 < n_dims) { + const int64_t ic = i0/2; + + const float theta = theta_base * pow(freq_base, inv_ndims*i0); + + const float freq_factor = src2 != src0 ? src2[ic] : 1.0f; + + rope_yarn(theta/freq_factor, freq_scale, corr_dims, i0, ext_factor, attn_factor, &cos_theta, &sin_theta); + + device const T * const src = (device T *)((device char *) src0 + i3*nb03 + i2*nb02 + i1*nb01 + ic*nb00); + device T * dst_data = (device T *)((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + ic*nb0); + + const float x0 = src[0]; + const float x1 = src[n_dims/2]; + + dst_data[0] = x0*cos_theta - x1*sin_theta; + dst_data[n_dims/2] = x0*sin_theta + x1*cos_theta; + } else { + device const T * const src = (device T *)((device char *) src0 + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); + device T * dst_data = (device T *)((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + dst_data[0] = src[0]; + dst_data[1] = src[1]; + } + } +} + +typedef decltype(kernel_rope_norm) kernel_rope_norm_t; +typedef decltype(kernel_rope_neox) kernel_rope_neox_t; + +template [[host_name("kernel_rope_norm_f32")]] kernel kernel_rope_norm_t kernel_rope_norm; +template [[host_name("kernel_rope_norm_f16")]] kernel kernel_rope_norm_t kernel_rope_norm; + +template [[host_name("kernel_rope_neox_f32")]] kernel kernel_rope_neox_t kernel_rope_neox; +template [[host_name("kernel_rope_neox_f16")]] kernel kernel_rope_neox_t kernel_rope_neox; + +typedef void (im2col_t)( + device const float * x, + device char * dst, + constant int32_t & ofs0, + constant int32_t & ofs1, + constant int32_t & IW, + constant int32_t & IH, + constant int32_t & CHW, + constant int32_t & s0, + constant int32_t & s1, + constant int32_t & p0, + constant int32_t & p1, + constant int32_t & d0, + constant int32_t & d1, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tgpg[[threadgroups_per_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]); + +template +kernel void kernel_im2col( + device const float * x, + device char * dst, + constant int32_t & ofs0, + constant int32_t & ofs1, + constant int32_t & IW, + constant int32_t & IH, + constant int32_t & CHW, + constant int32_t & s0, + constant int32_t & s1, + constant int32_t & p0, + constant int32_t & p1, + constant int32_t & d0, + constant int32_t & d1, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tgpg[[threadgroups_per_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int32_t iiw = tgpig[2] * s0 + tpitg[2] * d0 - p0; + const int32_t iih = tgpig[1] * s1 + tpitg[1] * d1 - p1; + + const int32_t offset_dst = + (tpitg[0] * tgpg[1] * tgpg[2] + tgpig[1] * tgpg[2] + tgpig[2]) * CHW + + (tgpig[0] * (ntg[1] * ntg[2]) + tpitg[1] * ntg[2] + tpitg[2]); + + device T * pdst = (device T *) (dst); + + if (iih < 0 || iih >= IH || iiw < 0 || iiw >= IW) { + pdst[offset_dst] = 0.0f; + } else { + const int32_t offset_src = tpitg[0] * ofs0 + tgpig[0] * ofs1; + pdst[offset_dst] = x[offset_src + iih * IW + iiw]; + } +} + +template [[host_name("kernel_im2col_f32")]] kernel im2col_t kernel_im2col; +template [[host_name("kernel_im2col_f16")]] kernel im2col_t kernel_im2col; + +kernel void kernel_upscale_f32( + device const char * src0, + device char * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + constant float & sf0, + constant float & sf1, + constant float & sf2, + constant float & sf3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + + const int64_t i3 = tgpig.z; + const int64_t i2 = tgpig.y; + const int64_t i1 = tgpig.x; + + const int64_t i03 = i3/sf3; + const int64_t i02 = i2/sf2; + const int64_t i01 = i1/sf1; + + for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { + const int64_t i00 = i0/sf0; + + device const float * src0_ptr = (device const float *) (src0 + i03*nb03 + i02*nb02 + i01*nb01 + i00*nb00); + device float * dst_ptr = (device float *) (dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + dst_ptr[0] = src0_ptr[0]; + } +} + +kernel void kernel_pad_f32( + device const char * src0, + device char * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + + const int64_t i3 = tgpig.z; + const int64_t i2 = tgpig.y; + const int64_t i1 = tgpig.x; + + const int64_t i03 = i3; + const int64_t i02 = i2; + const int64_t i01 = i1; + + device const float * src0_ptr = (device const float *) (src0 + i03*nb03 + i02*nb02 + i01*nb01); + device float * dst_ptr = (device float *) (dst + i3*nb3 + i2*nb2 + i1*nb1); + + if (i1 < ne01 && i2 < ne02 && i3 < ne03) { + for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { + if (i0 < ne00) { + dst_ptr[i0] = src0_ptr[i0]; + } else { + dst_ptr[i0] = 0.0f; + } + } + + return; + } + + for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { + dst_ptr[i0] = 0.0f; + } +} + +kernel void kernel_unpad_f32( + device const char * src0, + device char * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + + const int64_t i3 = tgpig.z; + const int64_t i2 = tgpig.y; + const int64_t i1 = tgpig.x; + + const int64_t i03 = i3; + const int64_t i02 = i2; + const int64_t i01 = i1; + + device const float * src0_ptr = (device const float *) (src0 + i03*nb03 + i02*nb02 + i01*nb01); + device float * dst_ptr = (device float *) (dst + i3*nb3 + i2*nb2 + i1*nb1); + + if (i1 < ne01 && i2 < ne02 && i3 < ne03) { + for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { + if (i0 < ne00) { + dst_ptr[i0] = src0_ptr[i0]; + } + } + + return; + } +} + +kernel void kernel_arange_f32( + device char * dst, + constant int64_t & ne0, + constant float & start, + constant float & step, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + + device float * dst_ptr = (device float *) dst; + + for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { + dst_ptr[i0] = start + step * i0; + } +} + +kernel void kernel_timestep_embedding_f32( + device const char * src0, + device char * dst, + constant uint64_t & nb1, + constant int & dim, + constant int & max_period, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + + int i = tgpig.x; + device float * embed_data = (device float *)(dst + i*nb1); + + int half_ = dim / 2; + for (int j = tpitg.x; j < half_; j += ntg.x) { + float timestep = ((device float *)src0)[i]; + float freq = (float)exp(-log((float)max_period) * j / half_); + float arg = timestep * freq; + embed_data[j ] = cos(arg); + embed_data[j + half_] = sin(arg); + } + + if (dim % 2 != 0 && tpitg.x == 0) { + embed_data[dim] = 0.f; + } +} + +// bitonic sort implementation following the CUDA kernels as reference +typedef void (argsort_t)( + device const float * x, + device int32_t * dst, + constant int64_t & ncols, + constant int64_t & ncols_pad, + threadgroup int32_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]]); + +template +kernel void kernel_argsort_f32_i32( + device const float * x, + device int32_t * dst, + constant int64_t & ncols, + constant int64_t & ncols_pad, + threadgroup int32_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]]) { + // bitonic sort + int col = tpitg[0]; + int row = tgpig[1]; + + if (col >= ncols_pad) return; + + device const float * x_row = x + row * ncols; + threadgroup int32_t * dst_row = shared_values; + + // initialize indices + dst_row[col] = col; + + threadgroup_barrier(mem_flags::mem_threadgroup); + + for (int k = 2; k <= ncols_pad; k *= 2) { + for (int j = k / 2; j > 0; j /= 2) { + int ixj = col ^ j; + if (ixj > col) { + if ((col & k) == 0) { + if (dst_row[col] >= ncols || + (dst_row[ixj] < ncols && (order == GGML_SORT_ORDER_ASC ? + x_row[dst_row[col]] > x_row[dst_row[ixj]] : + x_row[dst_row[col]] < x_row[dst_row[ixj]])) + ) { + SWAP(dst_row[col], dst_row[ixj]); + } + } else { + if (dst_row[ixj] >= ncols || + (dst_row[col] < ncols && (order == GGML_SORT_ORDER_ASC ? + x_row[dst_row[col]] < x_row[dst_row[ixj]] : + x_row[dst_row[col]] > x_row[dst_row[ixj]])) + ) { + SWAP(dst_row[col], dst_row[ixj]); + } + } + } + threadgroup_barrier(mem_flags::mem_threadgroup); + } + } + + // copy the result to dst without the padding + if (col < ncols) { + dst[row * ncols + col] = dst_row[col]; + } +} + +template [[host_name("kernel_argsort_f32_i32_asc")]] kernel argsort_t kernel_argsort_f32_i32; +template [[host_name("kernel_argsort_f32_i32_desc")]] kernel argsort_t kernel_argsort_f32_i32; + +kernel void kernel_leaky_relu_f32( + device const float * src0, + device float * dst, + constant float & slope, + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = src0[tpig] > 0.0f ? src0[tpig] : src0[tpig] * slope; +} + +typedef void (flash_attn_ext_f16_t)( + device const char * q, + device const char * k, + device const char * v, + device const char * mask, + device float * dst, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, + constant uint64_t & nb21, + constant uint64_t & nb22, + constant uint64_t & nb23, + constant uint64_t & nb31, + constant int64_t & ne1, + constant int64_t & ne2, + constant float & scale, + constant float & max_bias, + constant float & m0, + constant float & m1, + constant uint32_t & n_head_log2, + constant float & logit_softcap, + threadgroup half * shared, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]], + ushort tiisg[[thread_index_in_simdgroup]], + ushort sgitg[[simdgroup_index_in_threadgroup]]); + +// ref: https://arxiv.org/pdf/2307.08691.pdf +template // head size, queries per threadgroup, cache items per threadgroup +kernel void kernel_flash_attn_ext_f16( + device const char * q, + device const char * k, + device const char * v, + device const char * mask, + device float * dst, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, + constant uint64_t & nb21, + constant uint64_t & nb22, + constant uint64_t & nb23, + constant uint64_t & nb31, + constant int64_t & ne1, + constant int64_t & ne2, + constant float & scale, + constant float & max_bias, + constant float & m0, + constant float & m1, + constant uint32_t & n_head_log2, + constant float & logit_softcap, + threadgroup half * shared [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]], + ushort tiisg[[thread_index_in_simdgroup]], + ushort sgitg[[simdgroup_index_in_threadgroup]]) { + const short nsg = ntg.y; // number of simdgroups + + const short iq3 = tgpig[2]; + const short iq2 = tgpig[1]; + const short iq1 = tgpig[0]*Q; + + const short D4 = D/4; + const short D8 = D/8; + //const short Q8 = Q/8; + const short NW = N_SIMDWIDTH; + const short SH = (C + Q); // shared memory per simdgroup in (half) + + const short T = D + 2*nsg*SH; // shared memory size per query in (half) + const short TF = T/2; // shared memory size per query in (float) + const short T4 = T/4; // shared memory size per query in (half4) + + threadgroup half * sq = (threadgroup half *) (shared + 0*D); // holds the query data + threadgroup half4 * sq4 = (threadgroup half4 *) (shared + 0*D); // same as above but in half4 + threadgroup float * ss = (threadgroup float *) (shared + 2*sgitg*SH + 1*D); // scratch buffer for attention and diagonal matrix + + // store the result for all queries in local memory in 8x8 matrices (the O matrix from the paper) + simdgroup_half8x8 lo[D8]; + + // load heads from Q to shared memory + for (short j = sgitg; j < Q; j += nsg) { + device const float4 * q4 = (device const float4 *) ((device const char *) q + ((iq1 + j)*nb01 + iq2*nb02 + iq3*nb03)); + + for (short i = tiisg; i < D4; i += NW) { + if (iq1 + j < ne01) { + sq4[j*T4 + i] = (half4) q4[i]; + } else { + sq4[j*T4 + i] = 0.0h; + } + } + } + + // zero out lo + for (short i = 0; i < D8; ++i) { + lo[i] = make_filled_simdgroup_matrix(0.0h); + } + + // zero out shared memory SH + for (short j = 0; j < Q; ++j) { + for (short i = tiisg; i < SH; i += NW) { + ss[j*TF + i] = 0.0f; + } + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + { + float S[Q] = { [0 ... Q-1] = 0.0h }; + float M[Q] = { [0 ... Q-1] = -FLT_MAX/2 }; + + // assume K and V are same shape + const short ne22 = ne12; + const short ne23 = ne13; + + // broadcast + const short rk2 = ne02/ne12; + const short rk3 = ne03/ne13; + + const short rv2 = ne02/ne22; + const short rv3 = ne03/ne23; + + // k indices + const short ik2 = iq2/rk2; + const short ik3 = iq3/rk3; + + // v indices + const short iv2 = iq2/rv2; + const short iv3 = iq3/rv3; + + // load the queries from shared memory into local memory + simdgroup_half8x8 mq[D8]; + + for (short i = 0; i < D8; ++i) { + simdgroup_load(mq[i], sq + i*8, T); + } + + // pointer to the mask + device const half * mp = (device const half *) (mask + iq1*nb31); + + float slope = 1.0f; + + // ALiBi + if (max_bias > 0.0f) { + const uint32_t h = iq2; + + const float base = h < n_head_log2 ? m0 : m1; + const int exph = h < n_head_log2 ? h + 1 : 2*(h - n_head_log2) + 1; + + slope = pow(base, exph); + } + + // loop over the KV cache + // each simdgroup handles blocks of Q rows and C columns + for (int ic0 = 0; ic0 < ne11; ic0 += C*nsg) { + const int ic = ic0 + C*sgitg; + if (ic >= ne11) { + break; + } + + // Q*K^T + { + for (short cc = 0; cc < C/8; ++cc) { + simdgroup_float8x8 mqk = make_filled_simdgroup_matrix(0.h); + + device const half * pk = (device const half *) ((device const char *) k + ((ic + 8*cc)*nb11 + ik2*nb12 + ik3*nb13)); + + for (short i = 0; i < D8; ++i) { + simdgroup_half8x8 mk; + simdgroup_load(mk, pk + i*8, nb11/sizeof(half), 0, true); // transpose + + simdgroup_multiply_accumulate(mqk, mq[i], mk, mqk); + } + + simdgroup_store(mqk, ss + 8*cc, TF, 0, false); + } + } + + // used to detect blocks full of -INF + float smax = -INFINITY; + + // online softmax + { + float ms[Q]; + + for (short j = 0; j < Q; ++j) { + const float m = M[j]; + + // scale and apply the logitcap / mask + float s = ss[j*TF + tiisg]*scale; + + if (logit_softcap != 0.0f) { + s = logit_softcap*precise::tanh(s); + } + + if (mask != q) { + // mqk = mqk + mask*slope + s += slope*mp[ic + j*nb31/sizeof(half) + tiisg]; + } + + smax = simd_max(max(smax, s)); + M[j] = simd_max(max(M[j], s)); + + ms[j] = exp(m - M[j]); + const float vs = exp(s - M[j]); + + S[j] = S[j]*ms[j] + simd_sum(vs); + + // the P matrix from the paper (Q rows, C columns) + ss[j*TF + tiisg] = vs; + } + + // create a QxQ diagonal matrix for rescaling the output + if (tiisg < Q) { + ss[tiisg*TF + C + tiisg] = ms[tiisg]; + } + } + + // skip -INF blocks + if (smax == -INFINITY) { + continue; + } + + // O = diag(ms)*O + { + simdgroup_float8x8 mm; + simdgroup_load(mm, ss + C, TF, 0, false); + + for (short i = 0; i < D8; ++i) { + simdgroup_multiply(lo[i], mm, lo[i]); + } + } + + // O = O + (Q*K^T)*V + { + for (short cc = 0; cc < C/8; ++cc) { + device const half * pv = (device const half *) ((device const char *) v + ((ic + 8*cc)*nb21 + iv2*nb22 + iv3*nb23)); + + for (short i = 0; i < D8; ++i) { + simdgroup_half8x8 mk; + simdgroup_load(mk, pv + i*8, nb21/sizeof(half), 0, false); + + simdgroup_float8x8 mv; + simdgroup_load(mv, ss + 8*cc, TF, 0, false); + + simdgroup_multiply_accumulate(lo[i], mv, mk, lo[i]); + } + } + } + } + + // these are needed for reducing the results from the simdgroups (reuse the ss buffer) + for (short j = 0; j < Q; ++j) { + if (tiisg == 0) { + ss[j*TF + 0] = S[j]; + ss[j*TF + 1] = M[j]; + } + } + } + + // reduce the warps sequentially + for (short sg = 1; sg < nsg; ++sg) { + float S = { 0.0h }; + float M = { -FLT_MAX/2 }; + + threadgroup_barrier(mem_flags::mem_threadgroup); + + // each simdgroup stores its output to shared memory, reusing sq + if (sgitg == sg) { + for (short i = 0; i < D8; ++i) { + simdgroup_store(lo[i], sq + i*8, T, 0, false); + } + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + // the first simdgroup accumulates the results from the other simdgroups + if (sgitg == 0) { + for (short j = 0; j < Q; ++j) { + const float S0 = ss[j*TF + 0]; + const float S1 = ss[j*TF + sg*SH + 0]; + + const float M0 = ss[j*TF + 1]; + const float M1 = ss[j*TF + sg*SH + 1]; + + M = max(M0, M1); + + const float ms0 = exp(M0 - M); + const float ms1 = exp(M1 - M); + + S = S0*ms0 + S1*ms1; + + if (tiisg == 0) { + ss[j*TF + 0] = S; + ss[j*TF + 1] = M; + + ss[j*TF + C + j ] = ms0; + ss[j*TF + C + j + sg*SH] = ms1; + } + } + + // O_0 = diag(ms0)*O_0 + diag(ms1)*O_1 + { + simdgroup_half8x8 t; + simdgroup_float8x8 ms0; + simdgroup_float8x8 ms1; + + simdgroup_load(ms0, ss + C, TF, 0, false); + simdgroup_load(ms1, ss + C + sg*SH, TF, 0, false); + + for (short i = 0; i < D8; ++i) { + simdgroup_load (t, sq + i*8, T, 0, false); + simdgroup_multiply(t, ms1, t); + + simdgroup_multiply_accumulate(lo[i], ms0, lo[i], t); + } + } + } + } + + // store result to shared memory (reuse sq) + if (sgitg == 0) { + for (short i = 0; i < D8; ++i) { + simdgroup_store(lo[i], sq + i*8, T, 0, false); + } + } + + device float4 * dst4 = (device float4 *) dst; + + // final rescale with 1/S and store to global memory + if (sgitg == 0) { + for (short j = 0; j < Q && iq1 + j < ne01; ++j) { + const float S = ss[j*TF + 0]; + + for (short i = tiisg; i < D4; i += NW) { + dst4[(iq3*ne2*ne1 + iq2 + (iq1 + j)*ne1)*D4 + i] = (float4) sq4[j*T4 + i]/S; + } + } + } +} + +template [[host_name("kernel_flash_attn_ext_f16_h64" )]] kernel flash_attn_ext_f16_t kernel_flash_attn_ext_f16<64>; +template [[host_name("kernel_flash_attn_ext_f16_h80" )]] kernel flash_attn_ext_f16_t kernel_flash_attn_ext_f16<80>; +template [[host_name("kernel_flash_attn_ext_f16_h96" )]] kernel flash_attn_ext_f16_t kernel_flash_attn_ext_f16<96>; +template [[host_name("kernel_flash_attn_ext_f16_h112")]] kernel flash_attn_ext_f16_t kernel_flash_attn_ext_f16<112>; +template [[host_name("kernel_flash_attn_ext_f16_h128")]] kernel flash_attn_ext_f16_t kernel_flash_attn_ext_f16<128>; +//template [[host_name("kernel_flash_attn_ext_f16_h256")]] kernel flash_attn_ext_f16_t kernel_flash_attn_ext_f16<256>; + +template // head size, queries per threadgroup, cache items per threadgroup +kernel void kernel_flash_attn_ext_vec_f16( + device const char * q, + device const char * k, + device const char * v, + device const char * mask, + device float * dst, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, + constant uint64_t & nb21, + constant uint64_t & nb22, + constant uint64_t & nb23, + constant uint64_t & nb31, + constant int64_t & ne1, + constant int64_t & ne2, + constant float & scale, + constant float & max_bias, + constant float & m0, + constant float & m1, + constant uint32_t & n_head_log2, + constant float & logit_softcap, + threadgroup half * shared [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]], + ushort tiisg[[thread_index_in_simdgroup]], + ushort sgitg[[simdgroup_index_in_threadgroup]]) { + const short nsg = ntg.y; // number of simdgroups + + const short iq3 = tgpig[2]; + const short iq2 = tgpig[1]; + const short iq1 = tgpig[0]; + + const short D4 = D/4; + const short NW = N_SIMDWIDTH; + const short SH = (C + Q); // shared memory per simdgroup in (half) + + const short T = D + 2*nsg*SH; // shared memory size per query in (half) + + float slope = 1.0f; + + // ALiBi + if (max_bias > 0.0f) { + const uint32_t h = iq2; + + const float base = h < n_head_log2 ? m0 : m1; + const int exp = h < n_head_log2 ? h + 1 : 2*(h - n_head_log2) + 1; + + slope = pow(base, exp); + } + + //threadgroup half * sq = (threadgroup half *) (shared + 0*D); // holds the query data + threadgroup half4 * sq4 = (threadgroup half4 *) (shared + 0*D); // same as above but in half4 + threadgroup float * ss = (threadgroup float *) (shared + 2*sgitg*SH + 1*D); // scratch buffer for attention and diagonal matrix + threadgroup float4 * ss4 = (threadgroup float4 *) (shared + 2*sgitg*SH + 1*D); // same as above but in half4 + threadgroup half4 * sr4 = (threadgroup half4 *) (shared + sgitg*D + 1*T); // scratch buffer for the results + + // store the result for all queries in local memory in 8x8 matrices (the O matrix from the paper) + half4 lo[D4/NW]; + + // load heads from Q to shared memory + device const float4 * q4 = (device const float4 *) ((device const char *) q + (iq1*nb01 + iq2*nb02 + iq3*nb03)); + + for (short i = tiisg; i < D4; i += NW) { + if (iq1 < ne01) { + sq4[i] = (half4) q4[i]; + } else { + sq4[i] = 0.0h; + } + } + + // zero out lo + for (short i = tiisg; i < D4; i += NW) { + lo[i/NW] = 0.0h; + } + + // zero out shared memory SH + for (short i = tiisg; i < SH/4; i += NW) { + ss4[i] = 0.0h; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + { + float S = { 0.0h }; + float M = { -FLT_MAX/2 }; + + // assume K and V are same shape + const short ne22 = ne12; + const short ne23 = ne13; + + // broadcast + const short rk2 = ne02/ne12; + const short rk3 = ne03/ne13; + + const short rv2 = ne02/ne22; + const short rv3 = ne03/ne23; + + // k indices + const short ik2 = iq2 / rk2; + const short ik3 = iq3 / rk3; + + // v indices + const short iv2 = iq2 / rv2; + const short iv3 = iq3 / rv3; + + // load the queries from shared memory into local memory + float4 mq[D4]; + + for (short ii = 0; ii < D4; ii += NW) { + short i = ii + tiisg; + mq[i] = (float4) sq4[i]; + } + + // pointer to the mask + device const half4 * mp4 = (device const half4 *) (mask + iq1*nb31); + + // loop over the KV cache + // each simdgroup handles blocks of Q rows and C columns + for (int ic0 = 0; ic0 < ne11; ic0 += C*nsg) { + const int ic = ic0 + C*sgitg; + if (ic >= ne11) { + break; + } + + // Q*K^T + { +#pragma unroll + for (short cc = 0; cc < C/4; ++cc) { + float4 mqk = { 0.0h }; + + device const half4 * pk4 = (device const half4 *) ((device const char *) k + ((ic + 4*cc)*nb11 + ik2*nb12 + ik3*nb13)); + +#pragma unroll + for (short ii = 0; ii < D4; ii += NW) { + const short i = ii + tiisg; + + float4x4 mk; + mk[0] = (float4) pk4[i + 0*(nb11/8)]; + mk[1] = (float4) pk4[i + 1*(nb11/8)]; + mk[2] = (float4) pk4[i + 2*(nb11/8)]; + mk[3] = (float4) pk4[i + 3*(nb11/8)]; + + mqk += (float4) (mq[i] * mk); + } + + // reduce the results from the threads in the simdgroup + mqk += simd_shuffle_down(mqk, 16); + mqk += simd_shuffle_down(mqk, 8); + mqk += simd_shuffle_down(mqk, 4); + mqk += simd_shuffle_down(mqk, 2); + mqk += simd_shuffle_down(mqk, 1); + + // mqk = mqk*scale + mask*slope + if (tiisg == 0) { + mqk *= scale; + + if (logit_softcap != 0.0f) { + mqk = logit_softcap*precise::tanh(mqk); + } + + mqk += (mask != q) ? ((float4) mp4[ic/4 + cc])*slope : (float4) 0.0f; + + ss4[cc] = mqk; + } + } + } + + // online softmax + { + const short p = tiisg; + + const float m = M; + const float s = ss[p]; + + M = simd_max(max(M, s)); + + const float ms = exp(m - M); + const float vs = exp(s - M); + + S = S*ms + simd_sum(vs); + + // the P matrix from the paper (Q rows, C columns) + ss[p] = vs; + + // O = diag(ms)*O +#pragma unroll + for (short ii = 0; ii < D4; ii += NW) { + const short i = ii + tiisg; + lo[i/NW] *= ms; + } + } + + // O = O + (Q*K^T)*V + { +#pragma unroll + for (short cc = 0; cc < C/4; ++cc) { + device const half4 * pv4 = (device const half4 *) ((device const char *) v + ((ic + 4*cc)*nb21 + iv2*nb22 + iv3*nb23)); + +#pragma unroll + for (short ii = 0; ii < D4; ii += NW) { + const short i = ii + tiisg; + + lo[i/NW] += pv4[i + 0*(nb21/8)] * ss[4*cc + 0]; + lo[i/NW] += pv4[i + 1*(nb21/8)] * ss[4*cc + 1]; + lo[i/NW] += pv4[i + 2*(nb21/8)] * ss[4*cc + 2]; + lo[i/NW] += pv4[i + 3*(nb21/8)] * ss[4*cc + 3]; + } + } + } + + } + + // these are needed for reducing the results from the simdgroups (reuse the ss buffer) + if (tiisg == 0) { + ss[0] = S; + ss[1] = M; + } + } + + // store results to shared memory + for (short ii = 0; ii < D4; ii += NW) { + short i = ii + tiisg; + sr4[i] = lo[ii/NW]; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + // parallel reduce + for (short r = nsg/2; r > 0; r >>= 1) { + if (sgitg < r) { + const float S0 = ss[ 0]; + const float S1 = ss[r*SH + 0]; + + const float M0 = ss[ 1]; + const float M1 = ss[r*SH + 1]; + + const float M = max(M0, M1); + + const float ms0 = exp(M0 - M); + const float ms1 = exp(M1 - M); + + const float S = S0*ms0 + S1*ms1; + + if (tiisg == 0) { + ss[0] = S; + ss[1] = M; + } + + // O_0 = diag(ms0)*O_0 + diag(ms1)*O_1 + for (short ii = 0; ii < D4; ii += NW) { + short i = ii + tiisg; + sr4[i] = sr4[i]*ms0 + sr4[i + r*D4]*ms1; + } + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + } + + device float4 * dst4 = (device float4 *) dst; + + // final rescale with 1/S and store to global memory + if (sgitg == 0) { + const float S = ss[0]; + + for (short ii = 0; ii < D4; ii += NW) { + short i = ii + tiisg; + dst4[(iq3*ne2*ne1 + iq2 + (iq1)*ne1)*D4 + i] = (float4) sr4[i]/S; + } + } +} + +template [[host_name("kernel_flash_attn_ext_vec_f16_h128")]] kernel flash_attn_ext_f16_t kernel_flash_attn_ext_vec_f16<128>; +//template [[host_name("kernel_flash_attn_ext_vec_f16_h256")]] kernel flash_attn_ext_f16_t kernel_flash_attn_ext_vec_f16<256>; + +template +kernel void kernel_cpy( + device const void * src0, + device void * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t i03 = tgpig[2]; + const int64_t i02 = tgpig[1]; + const int64_t i01 = tgpig[0]; + + const int64_t n = i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; + + const int64_t i3 = n / (ne2*ne1*ne0); + const int64_t i2 = (n - i3*ne2*ne1*ne0) / (ne1*ne0); + const int64_t i1 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0) / ne0; + const int64_t i0 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0 - i1*ne0); + + device T1 * dst_data = (device T1 *) ((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + for (int64_t i00 = tpitg.x; i00 < ne00; i00 += ntg.x) { + device const T0 * src = (device T0 *)((device char *) src0 + i03*nb03 + i02*nb02 + i01*nb01 + i00*nb00); + dst_data[i00] = (T1) src[0]; + } +} + +typedef decltype(kernel_cpy) kernel_cpy_t; + +template [[host_name("kernel_cpy_f32_f32")]] kernel kernel_cpy_t kernel_cpy; +template [[host_name("kernel_cpy_f32_f16")]] kernel kernel_cpy_t kernel_cpy; +template [[host_name("kernel_cpy_f16_f16")]] kernel kernel_cpy_t kernel_cpy; +template [[host_name("kernel_cpy_f16_f32")]] kernel kernel_cpy_t kernel_cpy; + +kernel void kernel_cpy_f32_q8_0( + device const float * src0, + device void * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t i03 = tgpig[2]; + const int64_t i02 = tgpig[1]; + const int64_t i01 = tgpig[0]; + + const int64_t n = i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; + + const int64_t i3 = n / (ne2*ne1*ne0); + const int64_t i2 = (n - i3*ne2*ne1*ne0) / (ne1*ne0); + const int64_t i1 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0) / ne0; + const int64_t i0 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0 - i1*ne0)/QK8_0; + + device block_q8_0 * dst_data = (device block_q8_0 *) ((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + for (int64_t i00 = tpitg.x*QK8_0; i00 < ne00; i00 += ntg.x*QK8_0) { + device const float * src = (device float *)((device char *) src0 + i03*nb03 + i02*nb02 + i01*nb01 + i00*nb00); + + float amax = 0.0f; // absolute max + + for (int j = 0; j < QK8_0; j++) { + const float v = src[j]; + amax = MAX(amax, fabs(v)); + } + + const float d = amax / ((1 << 7) - 1); + const float id = d ? 1.0f/d : 0.0f; + + dst_data[i00/QK8_0].d = d; + + for (int j = 0; j < QK8_0; ++j) { + const float x0 = src[j]*id; + + dst_data[i00/QK8_0].qs[j] = round(x0); + } + } +} + +kernel void kernel_cpy_f32_q4_0( + device const float * src0, + device void * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t i03 = tgpig[2]; + const int64_t i02 = tgpig[1]; + const int64_t i01 = tgpig[0]; + + const int64_t n = i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; + + const int64_t i3 = n / (ne2*ne1*ne0); + const int64_t i2 = (n - i3*ne2*ne1*ne0) / (ne1*ne0); + const int64_t i1 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0) / ne0; + const int64_t i0 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0 - i1*ne0)/QK4_0; + + device block_q4_0 * dst_data = (device block_q4_0 *) ((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + for (int64_t i00 = tpitg.x*QK4_0; i00 < ne00; i00 += ntg.x*QK4_0) { + device const float * src = (device float *)((device char *) src0 + i03*nb03 + i02*nb02 + i01*nb01 + i00*nb00); + + float amax = 0.0f; // absolute max + float max = 0.0f; + + for (int j = 0; j < QK4_0; j++) { + const float v = src[j]; + if (amax < fabs(v)) { + amax = fabs(v); + max = v; + } + } + + const float d = max / -8; + const float id = d ? 1.0f/d : 0.0f; + + dst_data[i00/QK4_0].d = d; + + for (int j = 0; j < QK4_0/2; ++j) { + const float x0 = src[0 + j]*id; + const float x1 = src[QK4_0/2 + j]*id; + + const uint8_t xi0 = MIN(15, (int8_t)(x0 + 8.5f)); + const uint8_t xi1 = MIN(15, (int8_t)(x1 + 8.5f)); + + dst_data[i00/QK4_0].qs[j] = xi0; + dst_data[i00/QK4_0].qs[j] |= xi1 << 4; + } + } +} + +kernel void kernel_cpy_f32_q4_1( + device const float * src0, + device void * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t i03 = tgpig[2]; + const int64_t i02 = tgpig[1]; + const int64_t i01 = tgpig[0]; + + const int64_t n = i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; + + const int64_t i3 = n / (ne2*ne1*ne0); + const int64_t i2 = (n - i3*ne2*ne1*ne0) / (ne1*ne0); + const int64_t i1 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0) / ne0; + const int64_t i0 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0 - i1*ne0)/QK4_1; + + device block_q4_1 * dst_data = (device block_q4_1 *) ((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + for (int64_t i00 = tpitg.x*QK4_1; i00 < ne00; i00 += ntg.x*QK4_1) { + device const float * src = (device float *)((device char *) src0 + i03*nb03 + i02*nb02 + i01*nb01 + i00*nb00); + + float min = FLT_MAX; + float max = -FLT_MAX; + + for (int j = 0; j < QK4_1; j++) { + const float v = src[j]; + if (min > v) min = v; + if (max < v) max = v; + } + + const float d = (max - min) / ((1 << 4) - 1); + const float id = d ? 1.0f/d : 0.0f; + + dst_data[i00/QK4_1].d = d; + dst_data[i00/QK4_1].m = min; + + for (int j = 0; j < QK4_1/2; ++j) { + const float x0 = (src[0 + j] - min)*id; + const float x1 = (src[QK4_1/2 + j] - min)*id; + + const uint8_t xi0 = MIN(15, (int8_t)(x0 + 0.5f)); + const uint8_t xi1 = MIN(15, (int8_t)(x1 + 0.5f)); + + dst_data[i00/QK4_1].qs[j] = xi0; + dst_data[i00/QK4_1].qs[j] |= xi1 << 4; + } + } +} + +kernel void kernel_cpy_f32_q5_0( + device const float * src0, + device void * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t i03 = tgpig[2]; + const int64_t i02 = tgpig[1]; + const int64_t i01 = tgpig[0]; + + const int64_t n = i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; + + const int64_t i3 = n / (ne2*ne1*ne0); + const int64_t i2 = (n - i3*ne2*ne1*ne0) / (ne1*ne0); + const int64_t i1 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0) / ne0; + const int64_t i0 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0 - i1*ne0)/QK5_0; + + device block_q5_0 * dst_data = (device block_q5_0 *) ((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + for (int64_t i00 = tpitg.x*QK5_0; i00 < ne00; i00 += ntg.x*QK5_0) { + device const float * src = (device float *)((device char *) src0 + i03*nb03 + i02*nb02 + i01*nb01 + i00*nb00); + + float amax = 0.0f; // absolute max + float max = 0.0f; + + for (int j = 0; j < QK5_0; j++) { + const float v = src[j]; + if (amax < fabs(v)) { + amax = fabs(v); + max = v; + } + } + + const float d = max / -16; + const float id = d ? 1.0f/d : 0.0f; + + dst_data[i00/QK5_0].d = d; + + uint32_t qh = 0; + for (int j = 0; j < QK5_0/2; ++j) { + const float x0 = src[0 + j]*id; + const float x1 = src[QK5_0/2 + j]*id; + + const uint8_t xi0 = MIN(31, (int8_t)(x0 + 16.5f)); + const uint8_t xi1 = MIN(31, (int8_t)(x1 + 16.5f)); + + dst_data[i00/QK5_0].qs[j] = (xi0 & 0xf) | ((xi1 & 0xf) << 4); + qh |= ((xi0 & 0x10u) >> 4) << (j + 0); + qh |= ((xi1 & 0x10u) >> 4) << (j + QK5_0/2); + } + thread const uint8_t * qh8 = (thread const uint8_t *)&qh; + for (int j = 0; j < 4; ++j) { + dst_data[i00/QK5_0].qh[j] = qh8[j]; + } + } +} + +kernel void kernel_cpy_f32_q5_1( + device const float * src0, + device void * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t i03 = tgpig[2]; + const int64_t i02 = tgpig[1]; + const int64_t i01 = tgpig[0]; + + const int64_t n = i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; + + const int64_t i3 = n / (ne2*ne1*ne0); + const int64_t i2 = (n - i3*ne2*ne1*ne0) / (ne1*ne0); + const int64_t i1 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0) / ne0; + const int64_t i0 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0 - i1*ne0)/QK5_1; + + device block_q5_1 * dst_data = (device block_q5_1 *) ((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + for (int64_t i00 = tpitg.x*QK5_1; i00 < ne00; i00 += ntg.x*QK5_1) { + device const float * src = (device float *)((device char *) src0 + i03*nb03 + i02*nb02 + i01*nb01 + i00*nb00); + + float max = src[0]; + float min = src[0]; + + for (int j = 1; j < QK5_1; j++) { + const float v = src[j]; + min = v < min ? v : min; + max = v > max ? v : max; + } + + const float d = (max - min) / 31; + const float id = d ? 1.0f/d : 0.0f; + + dst_data[i00/QK5_1].d = d; + dst_data[i00/QK5_1].m = min; + + uint32_t qh = 0; + for (int j = 0; j < QK5_1/2; ++j) { + const float x0 = (src[0 + j] - min)*id; + const float x1 = (src[QK5_1/2 + j] - min)*id; + + const uint8_t xi0 = (uint8_t)(x0 + 0.5f); + const uint8_t xi1 = (uint8_t)(x1 + 0.5f); + + dst_data[i00/QK5_1].qs[j] = (xi0 & 0xf) | ((xi1 & 0xf) << 4); + qh |= ((xi0 & 0x10u) >> 4) << (j + 0); + qh |= ((xi1 & 0x10u) >> 4) << (j + QK5_1/2); + } + thread const uint8_t * qh8 = (thread const uint8_t *)&qh; + for (int j = 0; j < 4; ++j) { + dst_data[i00/QK5_1].qh[j] = qh8[j]; + } + } +} + +static inline int best_index_int8(int n, constant float * val, float x) { + if (x <= val[0]) return 0; + if (x >= val[n-1]) return n-1; + int ml = 0, mu = n-1; + while (mu-ml > 1) { + int mav = (ml+mu)/2; + if (x < val[mav]) mu = mav; else ml = mav; + } + return x - val[mu-1] < val[mu] - x ? mu-1 : mu; +} + +constexpr constant static float kvalues_iq4nl_f[16] = { + -127.f, -104.f, -83.f, -65.f, -49.f, -35.f, -22.f, -10.f, 1.f, 13.f, 25.f, 38.f, 53.f, 69.f, 89.f, 113.f +}; + +kernel void kernel_cpy_f32_iq4_nl( + device const float * src0, + device void * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int64_t i03 = tgpig[2]; + const int64_t i02 = tgpig[1]; + const int64_t i01 = tgpig[0]; + + const int64_t n = i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; + + const int64_t i3 = n / (ne2*ne1*ne0); + const int64_t i2 = (n - i3*ne2*ne1*ne0) / (ne1*ne0); + const int64_t i1 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0) / ne0; + const int64_t i0 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0 - i1*ne0)/QK4_NL; + + device block_iq4_nl * dst_data = (device block_iq4_nl *) ((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + for (int64_t i00 = tpitg.x*QK4_NL; i00 < ne00; i00 += ntg.x*QK4_NL) { + device const float * src = (device float *)((device char *) src0 + i03*nb03 + i02*nb02 + i01*nb01 + i00*nb00); + + float amax = 0.0f; // absolute max + float max = 0.0f; + + for (int j = 0; j < QK4_0; j++) { + const float v = src[j]; + if (amax < fabs(v)) { + amax = fabs(v); + max = v; + } + } + + const float d = max / kvalues_iq4nl_f[0]; + const float id = d ? 1.0f/d : 0.0f; + + float sumqx = 0, sumq2 = 0; + for (int j = 0; j < QK4_NL/2; ++j) { + const float x0 = src[0 + j]*id; + const float x1 = src[QK4_NL/2 + j]*id; + + const uint8_t xi0 = best_index_int8(16, kvalues_iq4nl_f, x0); + const uint8_t xi1 = best_index_int8(16, kvalues_iq4nl_f, x1); + + dst_data[i00/QK4_NL].qs[j] = xi0 | (xi1 << 4); + + const float v0 = kvalues_iq4nl_f[xi0]; + const float v1 = kvalues_iq4nl_f[xi1]; + const float w0 = src[0 + j]*src[0 + j]; + const float w1 = src[QK4_NL/2 + j]*src[QK4_NL/2 + j]; + sumqx += w0*v0*src[j] + w1*v1*src[QK4_NL/2 + j]; + sumq2 += w0*v0*v0 + w1*v1*v1; + + } + + dst_data[i00/QK4_NL].d = sumq2 > 0 ? sumqx/sumq2 : d; + + } +} + +kernel void kernel_concat( + device const char * src0, + device const char * src1, + device char * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, + constant int64_t & ne0, + constant int64_t & ne1, + constant int64_t & ne2, + constant int64_t & ne3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, + constant int32_t & dim, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + + const int64_t i3 = tgpig.z; + const int64_t i2 = tgpig.y; + const int64_t i1 = tgpig.x; + + int64_t o[4] = {0, 0, 0, 0}; + o[dim] = dim == 0 ? ne00 : (dim == 1 ? ne01 : (dim == 2 ? ne02 : ne03)); + + device const float * x; + + for (int i0 = tpitg.x; i0 < ne0; i0 += ntg.x) { + if (i0 < ne00 && i1 < ne01 && i2 < ne02 && i3 < ne03) { + x = (device const float *)(src0 + (i3 )*nb03 + (i2 )*nb02 + (i1 )*nb01 + (i0 )*nb00); + } else { + x = (device const float *)(src1 + (i3 - o[3])*nb13 + (i2 - o[2])*nb12 + (i1 - o[1])*nb11 + (i0 - o[0])*nb10); + } + + device float * y = (device float *)(dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + *y = *x; + } +} + +void kernel_mul_mv_q2_K_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_values, + uint3 tgpig, + uint tiisg, + uint sgitg) { + + const int nb = ne00/QK_K; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_q2_K * x = (device const block_q2_K *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[32]; + float sumf[N_DST]={0.f}, all_sum; + + const int step = sizeof(block_q2_K) * nb; + + const int ix = tiisg/8; // 0...3 + const int it = tiisg%8; // 0...7 + const int iq = it/4; // 0 or 1 + const int ir = it%4; // 0...3 + const int is = (8*ir)/16;// 0 or 1 + + device const float * y4 = y + ix * QK_K + 128 * iq + 8 * ir; + + for (int ib = ix; ib < nb; ib += 4) { + + float4 sumy = {0.f, 0.f, 0.f, 0.f}; + for (int i = 0; i < 8; ++i) { + yl[i+ 0] = y4[i+ 0]; sumy[0] += yl[i+ 0]; + yl[i+ 8] = y4[i+32]; sumy[1] += yl[i+ 8]; + yl[i+16] = y4[i+64]; sumy[2] += yl[i+16]; + yl[i+24] = y4[i+96]; sumy[3] += yl[i+24]; + } + + device const uint8_t * sc = (device const uint8_t *)x[ib].scales + 8*iq + is; + device const uint16_t * qs = (device const uint16_t *)x[ib].qs + 16 * iq + 4 * ir; + device const half * dh = &x[ib].d; + + for (int row = 0; row < N_DST; row++) { + + float4 acc1 = {0.f, 0.f, 0.f, 0.f}; + float4 acc2 = {0.f, 0.f, 0.f, 0.f}; + for (int i = 0; i < 8; i += 2) { + acc1[0] += yl[i+ 0] * (qs[i/2] & 0x0003); + acc2[0] += yl[i+ 1] * (qs[i/2] & 0x0300); + acc1[1] += yl[i+ 8] * (qs[i/2] & 0x000c); + acc2[1] += yl[i+ 9] * (qs[i/2] & 0x0c00); + acc1[2] += yl[i+16] * (qs[i/2] & 0x0030); + acc2[2] += yl[i+17] * (qs[i/2] & 0x3000); + acc1[3] += yl[i+24] * (qs[i/2] & 0x00c0); + acc2[3] += yl[i+25] * (qs[i/2] & 0xc000); + } + float dall = dh[0]; + float dmin = dh[1] * 1.f/16.f; + sumf[row] += dall * ((acc1[0] + 1.f/256.f * acc2[0]) * (sc[0] & 0xF) * 1.f/ 1.f + + (acc1[1] + 1.f/256.f * acc2[1]) * (sc[2] & 0xF) * 1.f/ 4.f + + (acc1[2] + 1.f/256.f * acc2[2]) * (sc[4] & 0xF) * 1.f/16.f + + (acc1[3] + 1.f/256.f * acc2[3]) * (sc[6] & 0xF) * 1.f/64.f) - + dmin * (sumy[0] * (sc[0] & 0xF0) + sumy[1] * (sc[2] & 0xF0) + sumy[2] * (sc[4] & 0xF0) + sumy[3] * (sc[6] & 0xF0)); + + qs += step/2; + sc += step; + dh += step/2; + } + + y4 += 4 * QK_K; + } + + for (int row = 0; row < N_DST; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum; + } + } +} + +[[host_name("kernel_mul_mv_q2_K_f32")]] +kernel void kernel_mul_mv_q2_K_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_q2_K_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, nullptr, tgpig, tiisg, sgitg); +} + +void kernel_mul_mv_q3_K_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_values, + uint3 tgpig, + uint tiisg, + uint sgitg) { + + const int nb = ne00/QK_K; + + const int64_t r0 = tgpig.x; + const int64_t r1 = tgpig.y; + const int64_t im = tgpig.z; + + const int first_row = (r0 * N_SIMDGROUP + sgitg) * 2; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_q3_K * x = (device const block_q3_K *) src0 + first_row*nb + offset0; + device const float * yy = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[32]; + + //const uint16_t kmask1 = 0x3030; + //const uint16_t kmask2 = 0x0f0f; + + const int tid = tiisg/4; + const int ix = tiisg%4; + const int ip = tid/4; // 0 or 1 + const int il = 2*((tid%4)/2); // 0 or 2 + const int ir = tid%2; + const int n = 8; + const int l0 = n*ir; + + // One would think that the Metal compiler would figure out that ip and il can only have + // 4 possible states, and optimize accordingly. Well, no. It needs help, and we do it + // with these two tales. + // + // Possible masks for the high bit + const ushort4 mm[4] = {{0x0001, 0x0100, 0x0002, 0x0200}, // ip = 0, il = 0 + {0x0004, 0x0400, 0x0008, 0x0800}, // ip = 0, il = 2 + {0x0010, 0x1000, 0x0020, 0x2000}, // ip = 1, il = 0 + {0x0040, 0x4000, 0x0080, 0x8000}}; // ip = 1, il = 2 + + // Possible masks for the low 2 bits + const int4 qm[2] = {{0x0003, 0x0300, 0x000c, 0x0c00}, {0x0030, 0x3000, 0x00c0, 0xc000}}; + + const ushort4 hm = mm[2*ip + il/2]; + + const int shift = 2*il; + const float v1 = il == 0 ? 4.f : 64.f; + const float v2 = 4.f * v1; + + const uint16_t s_shift1 = 4*ip; + const uint16_t s_shift2 = s_shift1 + il; + + const int q_offset = 32*ip + l0; + const int y_offset = 128*ip + 32*il + l0; + + const int step = sizeof(block_q3_K) * nb / 2; + + device const float * y1 = yy + ix*QK_K + y_offset; + + uint32_t scales32, aux32; + thread uint16_t * scales16 = (thread uint16_t *)&scales32; + thread const int8_t * scales = (thread const int8_t *)&scales32; + + float sumf1[2] = {0.f}; + float sumf2[2] = {0.f}; + for (int i = ix; i < nb; i += 4) { + + for (int l = 0; l < 8; ++l) { + yl[l+ 0] = y1[l+ 0]; + yl[l+ 8] = y1[l+16]; + yl[l+16] = y1[l+32]; + yl[l+24] = y1[l+48]; + } + + device const uint16_t * q = (device const uint16_t *)(x[i].qs + q_offset); + device const uint16_t * h = (device const uint16_t *)(x[i].hmask + l0); + device const uint16_t * a = (device const uint16_t *)(x[i].scales); + device const half * dh = &x[i].d; + + for (int row = 0; row < 2; ++row) { + + const float d_all = (float)dh[0]; + + scales16[0] = a[4]; + scales16[1] = a[5]; + aux32 = ((scales32 >> s_shift2) << 4) & 0x30303030; + scales16[0] = a[il+0]; + scales16[1] = a[il+1]; + scales32 = ((scales32 >> s_shift1) & 0x0f0f0f0f) | aux32; + + float s1 = 0, s2 = 0, s3 = 0, s4 = 0, s5 = 0, s6 = 0; + for (int l = 0; l < n; l += 2) { + const int32_t qs = q[l/2]; + s1 += yl[l+0] * (qs & qm[il/2][0]); + s2 += yl[l+1] * (qs & qm[il/2][1]); + s3 += ((h[l/2] & hm[0]) ? 0.f : yl[l+0]) + ((h[l/2] & hm[1]) ? 0.f : yl[l+1]); + s4 += yl[l+16] * (qs & qm[il/2][2]); + s5 += yl[l+17] * (qs & qm[il/2][3]); + s6 += ((h[l/2] & hm[2]) ? 0.f : yl[l+16]) + ((h[l/2] & hm[3]) ? 0.f : yl[l+17]); + } + float d1 = d_all * (s1 + 1.f/256.f * s2 - s3*v1); + float d2 = d_all * (s4 + 1.f/256.f * s5 - s6*v2); + sumf1[row] += d1 * (scales[0] - 32); + sumf2[row] += d2 * (scales[2] - 32); + + s1 = s2 = s3 = s4 = s5 = s6 = 0; + for (int l = 0; l < n; l += 2) { + const int32_t qs = q[l/2+8]; + s1 += yl[l+8] * (qs & qm[il/2][0]); + s2 += yl[l+9] * (qs & qm[il/2][1]); + s3 += ((h[l/2+8] & hm[0]) ? 0.f : yl[l+8]) + ((h[l/2+8] & hm[1]) ? 0.f : yl[l+9]); + s4 += yl[l+24] * (qs & qm[il/2][2]); + s5 += yl[l+25] * (qs & qm[il/2][3]); + s6 += ((h[l/2+8] & hm[2]) ? 0.f : yl[l+24]) + ((h[l/2+8] & hm[3]) ? 0.f : yl[l+25]); + } + d1 = d_all * (s1 + 1.f/256.f * s2 - s3*v1); + d2 = d_all * (s4 + 1.f/256.f * s5 - s6*v2); + sumf1[row] += d1 * (scales[1] - 32); + sumf2[row] += d2 * (scales[3] - 32); + + q += step; + h += step; + a += step; + dh += step; + + } + + y1 += 4 * QK_K; + + } + + for (int row = 0; row < 2; ++row) { + const float sumf = (sumf1[row] + 0.25f * sumf2[row]) / (1 << shift); + sumf1[row] = simd_sum(sumf); + } + if (tiisg == 0) { + for (int row = 0; row < 2; ++row) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = sumf1[row]; + } + } +} + +[[host_name("kernel_mul_mv_q3_K_f32")]] +kernel void kernel_mul_mv_q3_K_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_q3_K_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, nullptr, tgpig, tiisg, sgitg); +} + +void kernel_mul_mv_q4_K_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_values, + uint3 tgpig, + uint tiisg, + uint sgitg) { + + const uint16_t kmask1 = 0x3f3f; + const uint16_t kmask2 = 0x0f0f; + const uint16_t kmask3 = 0xc0c0; + + const int ix = tiisg/8; // 0...3 + const int it = tiisg%8; // 0...7 + const int iq = it/4; // 0 or 1 + const int ir = it%4; // 0...3 + + const int nb = ne00/QK_K; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + //const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; + const int first_row = r0 * N_DST; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_q4_K * x = (device const block_q4_K *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[16]; + float yh[16]; + float sumf[N_DST]={0.f}, all_sum; + + const int step = sizeof(block_q4_K) * nb / 2; + + device const float * y4 = y + ix * QK_K + 64 * iq + 8 * ir; + + uint16_t sc16[4]; + thread const uint8_t * sc8 = (thread const uint8_t *)sc16; + + for (int ib = ix; ib < nb; ib += 4) { + + float4 sumy = {0.f, 0.f, 0.f, 0.f}; + for (int i = 0; i < 8; ++i) { + yl[i+0] = y4[i+ 0]; sumy[0] += yl[i+0]; + yl[i+8] = y4[i+ 32]; sumy[1] += yl[i+8]; + yh[i+0] = y4[i+128]; sumy[2] += yh[i+0]; + yh[i+8] = y4[i+160]; sumy[3] += yh[i+8]; + } + + device const uint16_t * sc = (device const uint16_t *)x[ib].scales + iq; + device const uint16_t * q1 = (device const uint16_t *)x[ib].qs + 16 * iq + 4 * ir; + device const half * dh = &x[ib].d; + + for (int row = 0; row < N_DST; row++) { + + sc16[0] = sc[0] & kmask1; + sc16[1] = sc[2] & kmask1; + sc16[2] = ((sc[4] >> 0) & kmask2) | ((sc[0] & kmask3) >> 2); + sc16[3] = ((sc[4] >> 4) & kmask2) | ((sc[2] & kmask3) >> 2); + + device const uint16_t * q2 = q1 + 32; + + float4 acc1 = {0.f, 0.f, 0.f, 0.f}; + float4 acc2 = {0.f, 0.f, 0.f, 0.f}; + for (int i = 0; i < 8; i += 2) { + acc1[0] += yl[i+0] * (q1[i/2] & 0x000F); + acc1[1] += yl[i+1] * (q1[i/2] & 0x0F00); + acc1[2] += yl[i+8] * (q1[i/2] & 0x00F0); + acc1[3] += yl[i+9] * (q1[i/2] & 0xF000); + acc2[0] += yh[i+0] * (q2[i/2] & 0x000F); + acc2[1] += yh[i+1] * (q2[i/2] & 0x0F00); + acc2[2] += yh[i+8] * (q2[i/2] & 0x00F0); + acc2[3] += yh[i+9] * (q2[i/2] & 0xF000); + } + + float dall = dh[0]; + float dmin = dh[1]; + sumf[row] += dall * ((acc1[0] + 1.f/256.f * acc1[1]) * sc8[0] + + (acc1[2] + 1.f/256.f * acc1[3]) * sc8[1] * 1.f/16.f + + (acc2[0] + 1.f/256.f * acc2[1]) * sc8[4] + + (acc2[2] + 1.f/256.f * acc2[3]) * sc8[5] * 1.f/16.f) - + dmin * (sumy[0] * sc8[2] + sumy[1] * sc8[3] + sumy[2] * sc8[6] + sumy[3] * sc8[7]); + + q1 += step; + sc += step; + dh += step; + } + + y4 += 4 * QK_K; + } + + for (int row = 0; row < N_DST; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum; + } + } +} + +[[host_name("kernel_mul_mv_q4_K_f32")]] +kernel void kernel_mul_mv_q4_K_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_q4_K_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, nullptr, tgpig, tiisg, sgitg); +} + +void kernel_mul_mv_q5_K_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_values, + uint3 tgpig, + uint tiisg, + uint sgitg) { + + const int nb = ne00/QK_K; + + const int64_t r0 = tgpig.x; + const int64_t r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * N_SIMDGROUP + sgitg) * 2; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_q5_K * x = (device const block_q5_K *) src0 + first_row*nb + offset0; + device const float * yy = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float sumf[2]={0.f}; + + const int step = sizeof(block_q5_K) * nb; + + float yl[16], yh[16]; + + const uint16_t kmask1 = 0x3f3f; + const uint16_t kmask2 = 0x0f0f; + const uint16_t kmask3 = 0xc0c0; + + const int tid = tiisg/4; + const int ix = tiisg%4; + const int iq = tid/4; + const int ir = tid%4; + const int n = 8; + + const int l0 = n*ir; + const int q_offset = 32*iq + l0; + const int y_offset = 64*iq + l0; + + const uint8_t hm1 = 1u << (2*iq); + const uint8_t hm2 = hm1 << 1; + const uint8_t hm3 = hm1 << 4; + const uint8_t hm4 = hm2 << 4; + + uint16_t sc16[4]; + thread const uint8_t * sc8 = (thread const uint8_t *)sc16; + + device const float * y1 = yy + ix*QK_K + y_offset; + + for (int i = ix; i < nb; i += 4) { + + device const uint8_t * q1 = x[i].qs + q_offset; + device const uint8_t * qh = x[i].qh + l0; + device const half * dh = &x[i].d; + device const uint16_t * a = (device const uint16_t *)x[i].scales + iq; + + device const float * y2 = y1 + 128; + float4 sumy = {0.f, 0.f, 0.f, 0.f}; + for (int l = 0; l < 8; ++l) { + yl[l+0] = y1[l+ 0]; sumy[0] += yl[l+0]; + yl[l+8] = y1[l+32]; sumy[1] += yl[l+8]; + yh[l+0] = y2[l+ 0]; sumy[2] += yh[l+0]; + yh[l+8] = y2[l+32]; sumy[3] += yh[l+8]; + } + + for (int row = 0; row < 2; ++row) { + + device const uint8_t * q2 = q1 + 64; + + sc16[0] = a[0] & kmask1; + sc16[1] = a[2] & kmask1; + sc16[2] = ((a[4] >> 0) & kmask2) | ((a[0] & kmask3) >> 2); + sc16[3] = ((a[4] >> 4) & kmask2) | ((a[2] & kmask3) >> 2); + + float4 acc1 = {0.f}; + float4 acc2 = {0.f}; + for (int l = 0; l < n; ++l) { + uint8_t h = qh[l]; + acc1[0] += yl[l+0] * (q1[l] & 0x0F); + acc1[1] += yl[l+8] * (q1[l] & 0xF0); + acc1[2] += yh[l+0] * (q2[l] & 0x0F); + acc1[3] += yh[l+8] * (q2[l] & 0xF0); + acc2[0] += h & hm1 ? yl[l+0] : 0.f; + acc2[1] += h & hm2 ? yl[l+8] : 0.f; + acc2[2] += h & hm3 ? yh[l+0] : 0.f; + acc2[3] += h & hm4 ? yh[l+8] : 0.f; + } + const float dall = dh[0]; + const float dmin = dh[1]; + sumf[row] += dall * (sc8[0] * (acc1[0] + 16.f*acc2[0]) + + sc8[1] * (acc1[1]/16.f + 16.f*acc2[1]) + + sc8[4] * (acc1[2] + 16.f*acc2[2]) + + sc8[5] * (acc1[3]/16.f + 16.f*acc2[3])) - + dmin * (sumy[0] * sc8[2] + sumy[1] * sc8[3] + sumy[2] * sc8[6] + sumy[3] * sc8[7]); + + q1 += step; + qh += step; + dh += step/2; + a += step/2; + + } + + y1 += 4 * QK_K; + + } + + for (int row = 0; row < 2; ++row) { + const float tot = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = tot; + } + } +} + +[[host_name("kernel_mul_mv_q5_K_f32")]] +kernel void kernel_mul_mv_q5_K_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_q5_K_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, nullptr, tgpig, tiisg, sgitg); +} + +void kernel_mul_mv_q6_K_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_values, + uint3 tgpig, + uint tiisg, + uint sgitg) { + + const uint8_t kmask1 = 0x03; + const uint8_t kmask2 = 0x0C; + const uint8_t kmask3 = 0x30; + const uint8_t kmask4 = 0xC0; + + const int nb = ne00/QK_K; + + const int64_t r0 = tgpig.x; + const int64_t r1 = tgpig.y; + const int im = tgpig.z; + + const int row = 2 * r0 + sgitg; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_q6_K * x = (device const block_q6_K *) src0 + row * nb + offset0; + device const float * yy = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float sumf = 0; + + const int tid = tiisg/2; + const int ix = tiisg%2; + const int ip = tid/8; // 0 or 1 + const int il = tid%8; + const int n = 4; + const int l0 = n*il; + const int is = 8*ip + l0/16; + + const int y_offset = 128*ip + l0; + const int q_offset_l = 64*ip + l0; + const int q_offset_h = 32*ip + l0; + + for (int i = ix; i < nb; i += 2) { + + device const uint8_t * q1 = x[i].ql + q_offset_l; + device const uint8_t * q2 = q1 + 32; + device const uint8_t * qh = x[i].qh + q_offset_h; + device const int8_t * sc = x[i].scales + is; + + device const float * y = yy + i * QK_K + y_offset; + + const float dall = x[i].d; + + float4 sums = {0.f, 0.f, 0.f, 0.f}; + for (int l = 0; l < n; ++l) { + sums[0] += y[l+ 0] * ((int8_t)((q1[l] & 0xF) | ((qh[l] & kmask1) << 4)) - 32); + sums[1] += y[l+32] * ((int8_t)((q2[l] & 0xF) | ((qh[l] & kmask2) << 2)) - 32); + sums[2] += y[l+64] * ((int8_t)((q1[l] >> 4) | ((qh[l] & kmask3) << 0)) - 32); + sums[3] += y[l+96] * ((int8_t)((q2[l] >> 4) | ((qh[l] & kmask4) >> 2)) - 32); + } + + sumf += dall * (sums[0] * sc[0] + sums[1] * sc[2] + sums[2] * sc[4] + sums[3] * sc[6]); + + } + + const float tot = simd_sum(sumf); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + row] = tot; + } +} + +[[host_name("kernel_mul_mv_q6_K_f32")]] +kernel void kernel_mul_mv_q6_K_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_q6_K_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, nullptr, tgpig, tiisg, sgitg); +} + +// ======================= "True" 2-bit + +void kernel_mul_mv_iq2_xxs_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_values, + uint3 tgpig, + uint tiisg, + uint sgitg) { + + const int nb = ne00/QK_K; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_iq2_xxs * x = (device const block_iq2_xxs *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[32]; + float sumf[N_DST]={0.f}, all_sum; + + const int nb32 = nb * (QK_K / 32); + + threadgroup uint64_t * values = (threadgroup uint64_t *)shared_values; + threadgroup uint8_t * shared_signs = (threadgroup uint8_t *)(values + 256); + { + int nval = 4; + int pos = (32*sgitg + tiisg)*nval; + for (int i = 0; i < nval; ++i) values[pos + i] = iq2xxs_grid[pos + i]; + nval = 2; + pos = (32*sgitg + tiisg)*nval; + for (int i = 0; i < nval; ++i) shared_signs[pos+i] = ksigns_iq2xs[pos+i]; + threadgroup_barrier(mem_flags::mem_threadgroup); + } + + const int ix = tiisg; + + device const float * y4 = y + 32 * ix; + + for (int ib32 = ix; ib32 < nb32; ib32 += 32) { + + for (int i = 0; i < 32; ++i) { + yl[i] = y4[i]; + } + + const int ibl = ib32 / (QK_K / 32); + const int ib = ib32 % (QK_K / 32); + + device const block_iq2_xxs * xr = x + ibl; + device const uint16_t * q2 = xr->qs + 4 * ib; + device const half * dh = &xr->d; + + for (int row = 0; row < N_DST; row++) { + + const float db = dh[0]; + device const uint8_t * aux8 = (device const uint8_t *)q2; + const uint32_t aux32 = q2[2] | (q2[3] << 16); + const float d = db * (0.5f + (aux32 >> 28)); + + float sum = 0; + for (int l = 0; l < 4; ++l) { + const threadgroup uint8_t * grid = (const threadgroup uint8_t *)(values + aux8[l]); + const uint8_t signs = shared_signs[(aux32 >> 7*l) & 127]; + for (int j = 0; j < 8; ++j) { + sum += yl[8*l + j] * grid[j] * (signs & kmask_iq2xs[j] ? -1.f : 1.f); + } + } + sumf[row] += d * sum; + + dh += nb*sizeof(block_iq2_xxs)/2; + q2 += nb*sizeof(block_iq2_xxs)/2; + } + + y4 += 32 * 32; + } + + for (int row = 0; row < N_DST; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum * 0.25f; + } + } +} + +[[host_name("kernel_mul_mv_iq2_xxs_f32")]] +kernel void kernel_mul_mv_iq2_xxs_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_iq2_xxs_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); +} + +void kernel_mul_mv_iq2_xs_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_values, + uint3 tgpig, + uint tiisg, + uint sgitg) { + + const int nb = ne00/QK_K; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_iq2_xs * x = (device const block_iq2_xs *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[32]; + float sumf[N_DST]={0.f}, all_sum; + + const int nb32 = nb * (QK_K / 32); + + threadgroup uint64_t * values = (threadgroup uint64_t *)shared_values; + threadgroup uint8_t * shared_signs = (threadgroup uint8_t *)(values + 512); + { + int nval = 8; + int pos = (32*sgitg + tiisg)*nval; + for (int i = 0; i < nval; ++i) values[pos + i] = iq2xs_grid[pos + i]; + nval = 2; + pos = (32*sgitg + tiisg)*nval; + for (int i = 0; i < nval; ++i) shared_signs[pos+i] = ksigns_iq2xs[pos+i]; + threadgroup_barrier(mem_flags::mem_threadgroup); + } + + const int ix = tiisg; + + device const float * y4 = y + 32 * ix; + + for (int ib32 = ix; ib32 < nb32; ib32 += 32) { + + for (int i = 0; i < 32; ++i) { + yl[i] = y4[i]; + } + + const int ibl = ib32 / (QK_K / 32); + const int ib = ib32 % (QK_K / 32); + + device const block_iq2_xs * xr = x + ibl; + device const uint16_t * q2 = xr->qs + 4 * ib; + device const uint8_t * sc = xr->scales + ib; + device const half * dh = &xr->d; + + for (int row = 0; row < N_DST; row++) { + + const float db = dh[0]; + const uint8_t ls1 = sc[0] & 0xf; + const uint8_t ls2 = sc[0] >> 4; + const float d1 = db * (0.5f + ls1); + const float d2 = db * (0.5f + ls2); + + float sum1 = 0, sum2 = 0; + for (int l = 0; l < 2; ++l) { + const threadgroup uint8_t * grid = (const threadgroup uint8_t *)(values + (q2[l] & 511)); + const uint8_t signs = shared_signs[(q2[l] >> 9)]; + for (int j = 0; j < 8; ++j) { + sum1 += yl[8*l + j] * grid[j] * (signs & kmask_iq2xs[j] ? -1.f : 1.f); + } + } + for (int l = 2; l < 4; ++l) { + const threadgroup uint8_t * grid = (const threadgroup uint8_t *)(values + (q2[l] & 511)); + const uint8_t signs = shared_signs[(q2[l] >> 9)]; + for (int j = 0; j < 8; ++j) { + sum2 += yl[8*l + j] * grid[j] * (signs & kmask_iq2xs[j] ? -1.f : 1.f); + } + } + sumf[row] += d1 * sum1 + d2 * sum2; + + dh += nb*sizeof(block_iq2_xs)/2; + q2 += nb*sizeof(block_iq2_xs)/2; + sc += nb*sizeof(block_iq2_xs); + } + + y4 += 32 * 32; + } + + for (int row = 0; row < N_DST; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum * 0.25f; + } + } +} + +[[host_name("kernel_mul_mv_iq2_xs_f32")]] +kernel void kernel_mul_mv_iq2_xs_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_iq2_xs_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); +} + +void kernel_mul_mv_iq3_xxs_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_values, + uint3 tgpig, + uint tiisg, + uint sgitg) { + + const int nb = ne00/QK_K; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_iq3_xxs * x = (device const block_iq3_xxs *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[32]; + float sumf[N_DST]={0.f}, all_sum; + + const int nb32 = nb * (QK_K / 32); + + threadgroup uint32_t * values = (threadgroup uint32_t *)shared_values; + threadgroup uint8_t * shared_signs = (threadgroup uint8_t *)(values + 256); + { + int nval = 4; + int pos = (32*sgitg + tiisg)*nval; + for (int i = 0; i < nval; ++i) values[pos + i] = iq3xxs_grid[pos + i]; + nval = 2; + pos = (32*sgitg + tiisg)*nval; + for (int i = 0; i < nval; ++i) shared_signs[pos+i] = ksigns_iq2xs[pos+i]; + threadgroup_barrier(mem_flags::mem_threadgroup); + } + + const int ix = tiisg; + + device const float * y4 = y + 32 * ix; + + for (int ib32 = ix; ib32 < nb32; ib32 += 32) { + + for (int i = 0; i < 32; ++i) { + yl[i] = y4[i]; + } + + const int ibl = ib32 / (QK_K / 32); + const int ib = ib32 % (QK_K / 32); + + device const block_iq3_xxs * xr = x + ibl; + device const uint8_t * q3 = xr->qs + 8 * ib; + device const uint16_t * gas = (device const uint16_t *)(xr->qs + QK_K/4) + 2 * ib; + device const half * dh = &xr->d; + + for (int row = 0; row < N_DST; row++) { + + const float db = dh[0]; + const uint32_t aux32 = gas[0] | (gas[1] << 16); + const float d = db * (0.5f + (aux32 >> 28)); + + float2 sum = {0}; + for (int l = 0; l < 4; ++l) { + const threadgroup uint8_t * grid1 = (const threadgroup uint8_t *)(values + q3[2*l+0]); + const threadgroup uint8_t * grid2 = (const threadgroup uint8_t *)(values + q3[2*l+1]); + const uint8_t signs = shared_signs[(aux32 >> 7*l) & 127]; + for (int j = 0; j < 4; ++j) { + sum[0] += yl[8*l + j + 0] * grid1[j] * (signs & kmask_iq2xs[j+0] ? -1.f : 1.f); + sum[1] += yl[8*l + j + 4] * grid2[j] * (signs & kmask_iq2xs[j+4] ? -1.f : 1.f); + } + } + sumf[row] += d * (sum[0] + sum[1]); + + dh += nb*sizeof(block_iq3_xxs)/2; + q3 += nb*sizeof(block_iq3_xxs); + gas += nb*sizeof(block_iq3_xxs)/2; + } + + y4 += 32 * 32; + } + + for (int row = 0; row < N_DST; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum * 0.5f; + } + } +} + +[[host_name("kernel_mul_mv_iq3_xxs_f32")]] +kernel void kernel_mul_mv_iq3_xxs_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_iq3_xxs_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); +} + +void kernel_mul_mv_iq3_s_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_values, + uint3 tgpig, + uint tiisg, + uint sgitg) { + + const int nb = ne00/QK_K; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_iq3_s * x = (device const block_iq3_s *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[32]; + float sumf[N_DST]={0.f}, all_sum; + + const int nb32 = nb * (QK_K / 32); + + threadgroup uint32_t * values = (threadgroup uint32_t *)shared_values; + { + int nval = 8; + int pos = (32*sgitg + tiisg)*nval; + for (int i = 0; i < nval; ++i) values[pos + i] = iq3s_grid[pos + i]; + threadgroup_barrier(mem_flags::mem_threadgroup); + } + + const int ix = tiisg; + + device const float * y4 = y + 32 * ix; + + for (int ib32 = ix; ib32 < nb32; ib32 += 32) { + + for (int i = 0; i < 32; ++i) { + yl[i] = y4[i]; + } + + const int ibl = ib32 / (QK_K / 32); + const int ib = ib32 % (QK_K / 32); + + device const block_iq3_s * xr = x + ibl; + device const uint8_t * qs = xr->qs + 8 * ib; + device const uint8_t * qh = xr->qh + ib; + device const uint8_t * sc = xr->scales + (ib/2); + device const uint8_t * signs = xr->signs + 4 * ib; + device const half * dh = &xr->d; + + for (int row = 0; row < N_DST; row++) { + + const float db = dh[0]; + const float d = db * (1 + 2*((sc[0] >> 4*(ib%2)) & 0xf)); + + float2 sum = {0}; + for (int l = 0; l < 4; ++l) { + const threadgroup uint32_t * table1 = qh[0] & kmask_iq2xs[2*l+0] ? values + 256 : values; + const threadgroup uint32_t * table2 = qh[0] & kmask_iq2xs[2*l+1] ? values + 256 : values; + const threadgroup uint8_t * grid1 = (const threadgroup uint8_t *)(table1 + qs[2*l+0]); + const threadgroup uint8_t * grid2 = (const threadgroup uint8_t *)(table2 + qs[2*l+1]); + for (int j = 0; j < 4; ++j) { + sum[0] += yl[8*l + j + 0] * grid1[j] * select(1, -1, signs[l] & kmask_iq2xs[j+0]); + sum[1] += yl[8*l + j + 4] * grid2[j] * select(1, -1, signs[l] & kmask_iq2xs[j+4]); + } + } + sumf[row] += d * (sum[0] + sum[1]); + + dh += nb*sizeof(block_iq3_s)/2; + qs += nb*sizeof(block_iq3_s); + qh += nb*sizeof(block_iq3_s); + sc += nb*sizeof(block_iq3_s); + signs += nb*sizeof(block_iq3_s); + } + + y4 += 32 * 32; + } + + for (int row = 0; row < N_DST; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum; + } + } +} + +[[host_name("kernel_mul_mv_iq3_s_f32")]] +kernel void kernel_mul_mv_iq3_s_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_iq3_s_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); +} + +void kernel_mul_mv_iq2_s_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_values, + uint3 tgpig, + uint tiisg, + uint sgitg) { + + const int nb = ne00/QK_K; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_iq2_s * x = (device const block_iq2_s *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[32]; + float sumf[N_DST]={0.f}, all_sum; + + const int nb32 = nb * (QK_K / 32); + + //threadgroup uint64_t * values = (threadgroup uint64_t *)shared_values; + //{ + // int nval = 32; + // int pos = (32*sgitg + tiisg)*nval; + // for (int i = 0; i < nval; ++i) values[pos + i] = iq2s_grid[pos + i]; + // threadgroup_barrier(mem_flags::mem_threadgroup); + //} + + const int ix = tiisg; + + device const float * y4 = y + 32 * ix; + + for (int ib32 = ix; ib32 < nb32; ib32 += 32) { + + for (int i = 0; i < 32; ++i) { + yl[i] = y4[i]; + } + + const int ibl = ib32 / (QK_K / 32); + const int ib = ib32 % (QK_K / 32); + + device const block_iq2_s * xr = x + ibl; + device const uint8_t * qs = xr->qs + 4 * ib; + device const uint8_t * qh = xr->qh + ib; + device const uint8_t * sc = xr->scales + ib; + device const uint8_t * signs = qs + QK_K/8; + device const half * dh = &xr->d; + + for (int row = 0; row < N_DST; row++) { + + const float db = dh[0]; + const float d1 = db * (0.5f + (sc[0] & 0xf)); + const float d2 = db * (0.5f + (sc[0] >> 4)); + + float2 sum = {0}; + for (int l = 0; l < 2; ++l) { + //const threadgroup uint8_t * grid1 = (const threadgroup uint8_t *)(values + (qs[l+0] | ((qh[0] << (8-2*l)) & 0x300))); + //const threadgroup uint8_t * grid2 = (const threadgroup uint8_t *)(values + (qs[l+2] | ((qh[0] << (4-2*l)) & 0x300))); + constant uint8_t * grid1 = (constant uint8_t *)(iq2s_grid + (qs[l+0] | ((qh[0] << (8-2*l)) & 0x300))); + constant uint8_t * grid2 = (constant uint8_t *)(iq2s_grid + (qs[l+2] | ((qh[0] << (4-2*l)) & 0x300))); + for (int j = 0; j < 8; ++j) { + sum[0] += yl[8*l + j + 0] * grid1[j] * select(1, -1, signs[l+0] & kmask_iq2xs[j]); + sum[1] += yl[8*l + j + 16] * grid2[j] * select(1, -1, signs[l+2] & kmask_iq2xs[j]); + } + } + sumf[row] += d1 * sum[0] + d2 * sum[1]; + + dh += nb*sizeof(block_iq2_s)/2; + qs += nb*sizeof(block_iq2_s); + qh += nb*sizeof(block_iq2_s); + sc += nb*sizeof(block_iq2_s); + signs += nb*sizeof(block_iq2_s); + } + + y4 += 32 * 32; + } + + for (int row = 0; row < N_DST; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum * 0.25f; + } + } +} + +[[host_name("kernel_mul_mv_iq2_s_f32")]] +kernel void kernel_mul_mv_iq2_s_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_iq2_s_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); +} + +void kernel_mul_mv_iq1_s_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_value, + uint3 tgpig, + uint tiisg, + uint sgitg) { + + const int nb = ne00/QK_K; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + device const block_iq1_s * x = (device const block_iq1_s *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[32]; + float sumf[N_DST]={0.f}, all_sum; + + const int nb32 = nb * (QK_K / 32); + + const int ix = tiisg; + + device const float * y4 = y + 32 * ix; + + for (int ib32 = ix; ib32 < nb32; ib32 += 32) { + + float sumy = 0; + for (int i = 0; i < 32; ++i) { + yl[i] = y4[i]; + sumy += yl[i]; + } + + const int ibl = ib32 / (QK_K / 32); + const int ib = ib32 % (QK_K / 32); + + device const block_iq1_s * xr = x + ibl; + device const uint8_t * qs = xr->qs + 4 * ib; + device const uint16_t * qh = xr->qh + ib; + device const half * dh = &xr->d; + + for (int row = 0; row < N_DST; row++) { + + constant uint8_t * grid1 = (constant uint8_t *)(iq1s_grid_gpu + (qs[0] | ((qh[0] << 8) & 0x700))); + constant uint8_t * grid2 = (constant uint8_t *)(iq1s_grid_gpu + (qs[1] | ((qh[0] << 5) & 0x700))); + constant uint8_t * grid3 = (constant uint8_t *)(iq1s_grid_gpu + (qs[2] | ((qh[0] << 2) & 0x700))); + constant uint8_t * grid4 = (constant uint8_t *)(iq1s_grid_gpu + (qs[3] | ((qh[0] >> 1) & 0x700))); + + float sum = 0; + for (int j = 0; j < 4; ++j) { + sum += yl[j+ 0] * (grid1[j] & 0xf) + yl[j+ 4] * (grid1[j] >> 4) + + yl[j+ 8] * (grid2[j] & 0xf) + yl[j+12] * (grid2[j] >> 4) + + yl[j+16] * (grid3[j] & 0xf) + yl[j+20] * (grid3[j] >> 4) + + yl[j+24] * (grid4[j] & 0xf) + yl[j+28] * (grid4[j] >> 4); + } + sumf[row] += (float)dh[0] * (sum + sumy * (qh[0] & 0x8000 ? -1 - IQ1S_DELTA : -1 + IQ1S_DELTA)) * (2*((qh[0] >> 12) & 7) + 1); + + dh += nb*sizeof(block_iq1_s)/2; + qs += nb*sizeof(block_iq1_s); + qh += nb*sizeof(block_iq1_s)/2; + } + + y4 += 32 * 32; + } + + for (int row = 0; row < N_DST; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum; + } + } +} + +void kernel_mul_mv_iq1_m_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_value, + uint3 tgpig, + uint tiisg, + uint sgitg) { + + const int nb = ne00/QK_K; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + device const block_iq1_m * x = (device const block_iq1_m *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[32]; + float sumf[N_DST]={0.f}, all_sum; + + const int nb32 = nb * (QK_K / 32); + + const int ix = tiisg; + + device const float * y4 = y + 32 * ix; + + iq1m_scale_t scale; + + for (int ib32 = ix; ib32 < nb32; ib32 += 32) { + + float4 sumy = {0.f}; + for (int i = 0; i < 8; ++i) { + yl[i+ 0] = y4[i+ 0]; sumy[0] += yl[i+ 0]; + yl[i+ 8] = y4[i+ 8]; sumy[1] += yl[i+ 8]; + yl[i+16] = y4[i+16]; sumy[2] += yl[i+16]; + yl[i+24] = y4[i+24]; sumy[3] += yl[i+24]; + } + + const int ibl = ib32 / (QK_K / 32); + const int ib = ib32 % (QK_K / 32); + + device const block_iq1_m * xr = x + ibl; + device const uint8_t * qs = xr->qs + 4 * ib; + device const uint8_t * qh = xr->qh + 2 * ib; + device const uint16_t * sc = (device const uint16_t *)xr->scales; + + for (int row = 0; row < N_DST; row++) { + scale.u16 = (sc[0] >> 12) | ((sc[1] >> 8) & 0x00f0) | ((sc[2] >> 4) & 0x0f00) | (sc[3] & 0xf000); + + constant uint8_t * grid1 = (constant uint8_t *)(iq1s_grid_gpu + (qs[0] | ((qh[0] << 8) & 0x700))); + constant uint8_t * grid2 = (constant uint8_t *)(iq1s_grid_gpu + (qs[1] | ((qh[0] << 4) & 0x700))); + constant uint8_t * grid3 = (constant uint8_t *)(iq1s_grid_gpu + (qs[2] | ((qh[1] << 8) & 0x700))); + constant uint8_t * grid4 = (constant uint8_t *)(iq1s_grid_gpu + (qs[3] | ((qh[1] << 4) & 0x700))); + + float2 sum = {0.f}; + for (int j = 0; j < 4; ++j) { + sum[0] += yl[j+ 0] * (grid1[j] & 0xf) + yl[j+ 4] * (grid1[j] >> 4) + + yl[j+ 8] * (grid2[j] & 0xf) + yl[j+12] * (grid2[j] >> 4); + sum[1] += yl[j+16] * (grid3[j] & 0xf) + yl[j+20] * (grid3[j] >> 4) + + yl[j+24] * (grid4[j] & 0xf) + yl[j+28] * (grid4[j] >> 4); + } + const float delta1 = sumy[0] * (qh[0] & 0x08 ? -1 - IQ1M_DELTA : -1 + IQ1M_DELTA) + sumy[1] * (qh[0] & 0x80 ? -1 - IQ1M_DELTA : -1 + IQ1M_DELTA); + const float delta2 = sumy[2] * (qh[1] & 0x08 ? -1 - IQ1M_DELTA : -1 + IQ1M_DELTA) + sumy[3] * (qh[1] & 0x80 ? -1 - IQ1M_DELTA : -1 + IQ1M_DELTA); + + sumf[row] += (float)scale.f16 * ((sum[0] + delta1) * (2*((sc[ib/2] >> (6*(ib%2)+0)) & 7) + 1) + + (sum[1] + delta2) * (2*((sc[ib/2] >> (6*(ib%2)+3)) & 7) + 1)); + + sc += nb*sizeof(block_iq1_m)/2; + qs += nb*sizeof(block_iq1_m); + qh += nb*sizeof(block_iq1_m); + } + + y4 += 32 * 32; + } + + for (int row = 0; row < N_DST; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum; + } + } +} + +void kernel_mul_mv_iq4_nl_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_values_i8, + uint3 tgpig, + uint tiisg, + uint sgitg) { + + threadgroup float * shared_values = (threadgroup float *)shared_values_i8; + const int nb = ne00/QK4_NL; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + const int first_row = (r0 * 2 + sgitg) * 2; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + device const block_iq4_nl * x = (device const block_iq4_nl *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + const int ix = tiisg/2; // 0...15 + const int it = tiisg%2; // 0 or 1 + + shared_values[tiisg] = kvalues_iq4nl_f[tiisg%16]; + threadgroup_barrier(mem_flags::mem_threadgroup); + + float4 yl[4]; + float sumf[2]={0.f}, all_sum; + + device const float * yb = y + ix * QK4_NL + it * 8; + + uint32_t aux32[2]; + thread const uint8_t * q8 = (thread const uint8_t *)aux32; + + float4 qf1, qf2; + + for (int ib = ix; ib < nb; ib += 16) { + + device const float4 * y4 = (device const float4 *)yb; + yl[0] = y4[0]; yl[1] = y4[4]; yl[2] = y4[1]; yl[3] = y4[5]; + + for (int row = 0; row < 2 && first_row + row < ne01; ++row) { + + device const block_iq4_nl & xb = x[row*nb + ib]; + device const uint16_t * q4 = (device const uint16_t *)(xb.qs + 8*it); + + float4 acc1 = {0.f}, acc2 = {0.f}; + + aux32[0] = q4[0] | (q4[1] << 16); + aux32[1] = (aux32[0] >> 4) & 0x0f0f0f0f; + aux32[0] &= 0x0f0f0f0f; + qf1 = {shared_values[q8[0]], shared_values[q8[1]], shared_values[q8[2]], shared_values[q8[3]]}; + qf2 = {shared_values[q8[4]], shared_values[q8[5]], shared_values[q8[6]], shared_values[q8[7]]}; + acc1 += yl[0] * qf1; + acc2 += yl[1] * qf2; + + aux32[0] = q4[2] | (q4[3] << 16); + aux32[1] = (aux32[0] >> 4) & 0x0f0f0f0f; + aux32[0] &= 0x0f0f0f0f; + qf1 = {shared_values[q8[0]], shared_values[q8[1]], shared_values[q8[2]], shared_values[q8[3]]}; + qf2 = {shared_values[q8[4]], shared_values[q8[5]], shared_values[q8[6]], shared_values[q8[7]]}; + acc1 += yl[2] * qf1; + acc2 += yl[3] * qf2; + + acc1 += acc2; + + sumf[row] += (float)xb.d * (acc1[0] + acc1[1] + acc1[2] + acc1[3]); + + } + + yb += 16 * QK4_NL; + } + + for (int row = 0; row < 2 && first_row + row < ne01; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum; + } + } +} + +void kernel_mul_mv_iq4_xs_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_values_i8, + uint3 tgpig, + uint tiisg, + uint sgitg) { + + threadgroup float * shared_values = (threadgroup float *)shared_values_i8; + const int nb = ne00/QK_K; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + const int first_row = (r0 * 2 + sgitg) * 2; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + device const block_iq4_xs * x = (device const block_iq4_xs *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + const int ix = tiisg/16; // 0 or 1 + const int it = tiisg%16; // 0...15 + const int ib = it/2; + const int il = it%2; + + shared_values[tiisg] = kvalues_iq4nl_f[tiisg%16]; + threadgroup_barrier(mem_flags::mem_threadgroup); + + float4 yl[4]; + float sumf[2]={0.f}, all_sum; + + device const float * yb = y + ix * QK_K + ib * 32 + il * 8; + + uint32_t aux32[2]; + thread const uint8_t * q8 = (thread const uint8_t *)aux32; + + float4 qf1, qf2; + + for (int ibl = ix; ibl < nb; ibl += 2) { + + device const float4 * y4 = (device const float4 *)yb; + yl[0] = y4[0]; yl[1] = y4[4]; yl[2] = y4[1]; yl[3] = y4[5]; + + for (int row = 0; row < 2; ++row) { + + device const block_iq4_xs & xb = x[row*nb + ibl]; + device const uint32_t * q4 = (device const uint32_t *)(xb.qs + 16*ib + 8*il); + + float4 acc1 = {0.f}, acc2 = {0.f}; + + aux32[0] = q4[0] & 0x0f0f0f0f; + aux32[1] = (q4[0] >> 4) & 0x0f0f0f0f; + qf1 = {shared_values[q8[0]], shared_values[q8[1]], shared_values[q8[2]], shared_values[q8[3]]}; + qf2 = {shared_values[q8[4]], shared_values[q8[5]], shared_values[q8[6]], shared_values[q8[7]]}; + acc1 += yl[0] * qf1; + acc2 += yl[1] * qf2; + + aux32[0] = q4[1] & 0x0f0f0f0f; + aux32[1] = (q4[1] >> 4) & 0x0f0f0f0f; + qf1 = {shared_values[q8[0]], shared_values[q8[1]], shared_values[q8[2]], shared_values[q8[3]]}; + qf2 = {shared_values[q8[4]], shared_values[q8[5]], shared_values[q8[6]], shared_values[q8[7]]}; + acc1 += yl[2] * qf1; + acc2 += yl[3] * qf2; + + acc1 += acc2; + + const int ls = (((xb.scales_l[ib/2] >> 4*(ib%2)) & 0xf) | (((xb.scales_h >> 2*ib) & 3) << 4)) - 32; + sumf[row] += (float)xb.d * ls * (acc1[0] + acc1[1] + acc1[2] + acc1[3]); + + } + + yb += 2 * QK_K; + } + + for (int row = 0; row < 2; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum; + } + } +} + +[[host_name("kernel_mul_mv_iq1_s_f32")]] +kernel void kernel_mul_mv_iq1_s_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_iq1_s_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, nullptr, tgpig, tiisg, sgitg); +} + +[[host_name("kernel_mul_mv_iq1_m_f32")]] +kernel void kernel_mul_mv_iq1_m_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_iq1_m_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, nullptr, tgpig, tiisg, sgitg); +} + +[[host_name("kernel_mul_mv_iq4_nl_f32")]] +kernel void kernel_mul_mv_iq4_nl_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_iq4_nl_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); +} + +[[host_name("kernel_mul_mv_iq4_xs_f32")]] +kernel void kernel_mul_mv_iq4_xs_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_iq4_xs_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); +} + +//============================= templates and their specializations ============================= + +// NOTE: this is not dequantizing - we are simply fitting the template +template +void dequantize_f32(device const float4x4 * src, short il, thread type4x4 & reg) { + float4x4 temp = *(((device float4x4 *)src)); + for (int i = 0; i < 16; i++){ + reg[i/4][i%4] = temp[i/4][i%4]; + } +} + +template +void dequantize_f16(device const half4x4 * src, short il, thread type4x4 & reg) { + half4x4 temp = *(((device half4x4 *)src)); + for (int i = 0; i < 16; i++){ + reg[i/4][i%4] = temp[i/4][i%4]; + } +} + +template +void dequantize_q4_0(device const block_q4_0 *xb, short il, thread type4x4 & reg) { + device const uint16_t * qs = ((device const uint16_t *)xb + 1); + const float d1 = il ? (xb->d / 16.h) : xb->d; + const float d2 = d1 / 256.f; + const float md = -8.h * xb->d; + const ushort mask0 = il ? 0x00F0 : 0x000F; + const ushort mask1 = mask0 << 8; + + for (int i=0;i<8;i++) { + reg[i/2][2*(i%2)+0] = d1 * (qs[i] & mask0) + md; + reg[i/2][2*(i%2)+1] = d2 * (qs[i] & mask1) + md; + } +} + +template +void dequantize_q4_1(device const block_q4_1 *xb, short il, thread type4x4 & reg) { + device const uint16_t * qs = ((device const uint16_t *)xb + 2); + const float d1 = il ? (xb->d / 16.h) : xb->d; + const float d2 = d1 / 256.f; + const float m = xb->m; + const ushort mask0 = il ? 0x00F0 : 0x000F; + const ushort mask1 = mask0 << 8; + + for (int i=0;i<8;i++) { + reg[i/2][2*(i%2)+0] = ((qs[i] & mask0) * d1) + m; + reg[i/2][2*(i%2)+1] = ((qs[i] & mask1) * d2) + m; + } +} + +template +void dequantize_q5_0(device const block_q5_0 *xb, short il, thread type4x4 & reg) { + device const uint16_t * qs = ((device const uint16_t *)xb + 3); + const float d = xb->d; + const float md = -16.h * xb->d; + const ushort mask = il ? 0x00F0 : 0x000F; + + const uint32_t qh = *((device const uint32_t *)xb->qh); + + const int x_mv = il ? 4 : 0; + + const int gh_mv = il ? 12 : 0; + const int gh_bk = il ? 0 : 4; + + for (int i = 0; i < 8; i++) { + // extract the 5-th bits for x0 and x1 + const uint8_t xh_0 = ((qh >> (gh_mv + 2*i )) << gh_bk) & 0x10; + const uint8_t xh_1 = ((qh >> (gh_mv + 2*i+1)) << gh_bk) & 0x10; + + // combine the 4-bits from qs with the 5th bit + const int32_t x0 = ((((qs[i] ) & mask) >> x_mv) | xh_0); + const int32_t x1 = ((((qs[i] >> 8) & mask) >> x_mv) | xh_1); + + reg[i/2][2*(i%2)+0] = d * x0 + md; + reg[i/2][2*(i%2)+1] = d * x1 + md; + } +} + +template +void dequantize_q5_1(device const block_q5_1 *xb, short il, thread type4x4 & reg) { + device const uint16_t * qs = ((device const uint16_t *)xb + 4); + const float d = xb->d; + const float m = xb->m; + const ushort mask = il ? 0x00F0 : 0x000F; + + const uint32_t qh = *((device const uint32_t *)xb->qh); + + const int x_mv = il ? 4 : 0; + + const int gh_mv = il ? 12 : 0; + const int gh_bk = il ? 0 : 4; + + for (int i = 0; i < 8; i++) { + // extract the 5-th bits for x0 and x1 + const uint8_t xh_0 = ((qh >> (gh_mv + 2*i )) << gh_bk) & 0x10; + const uint8_t xh_1 = ((qh >> (gh_mv + 2*i+1)) << gh_bk) & 0x10; + + // combine the 4-bits from qs with the 5th bit + const int32_t x0 = ((((qs[i] ) & mask) >> x_mv) | xh_0); + const int32_t x1 = ((((qs[i] >> 8) & mask) >> x_mv) | xh_1); + + reg[i/2][2*(i%2)+0] = d * x0 + m; + reg[i/2][2*(i%2)+1] = d * x1 + m; + } +} + +template +void dequantize_q8_0(device const block_q8_0 *xb, short il, thread type4x4 & reg) { + device const int8_t * qs = ((device const int8_t *)xb->qs); + const half d = xb->d; + + for (int i = 0; i < 16; i++) { + reg[i/4][i%4] = (qs[i + 16*il] * d); + } +} + +template +void dequantize_q2_K(device const block_q2_K *xb, short il, thread type4x4 & reg) { + const float d = xb->d; + const float min = xb->dmin; + device const uint8_t * q = (device const uint8_t *)xb->qs; + float dl, ml; + uint8_t sc = xb->scales[il]; + + q = q + 32*(il/8) + 16*(il&1); + il = (il/2)%4; + + half coef = il>1 ? (il>2 ? 1/64.h : 1/16.h) : (il>0 ? 1/4.h : 1.h); + uchar mask = il>1 ? (il>2 ? 192 : 48) : (il>0 ? 12 : 3); + dl = d * (sc & 0xF) * coef, ml = min * (sc >> 4); + for (int i = 0; i < 16; ++i) { + reg[i/4][i%4] = dl * (q[i] & mask) - ml; + } +} + +template +void dequantize_q3_K(device const block_q3_K *xb, short il, thread type4x4 & reg) { + const half d_all = xb->d; + device const uint8_t * q = (device const uint8_t *)xb->qs; + device const uint8_t * h = (device const uint8_t *)xb->hmask; + device const int8_t * scales = (device const int8_t *)xb->scales; + + q = q + 32 * (il/8) + 16 * (il&1); + h = h + 16 * (il&1); + uint8_t m = 1 << (il/2); + uint16_t kmask1 = (il/4)>1 ? ((il/4)>2 ? 192 : 48) : \ + ((il/4)>0 ? 12 : 3); + uint16_t kmask2 = il/8 ? 0xF0 : 0x0F; + uint16_t scale_2 = scales[il%8], scale_1 = scales[8 + il%4]; + int16_t dl_int = (il/4)&1 ? (scale_2&kmask2) | ((scale_1&kmask1) << 2) + : (scale_2&kmask2) | ((scale_1&kmask1) << 4); + float dl = il<8 ? d_all * (dl_int - 32.f) : d_all * (dl_int / 16.f - 32.f); + const float ml = 4.f * dl; + + il = (il/2) & 3; + const half coef = il>1 ? (il>2 ? 1/64.h : 1/16.h) : (il>0 ? 1/4.h : 1.h); + const uint8_t mask = il>1 ? (il>2 ? 192 : 48) : (il>0 ? 12 : 3); + dl *= coef; + + for (int i = 0; i < 16; ++i) { + reg[i/4][i%4] = dl * (q[i] & mask) - (h[i] & m ? 0 : ml); + } +} + +static inline uchar2 get_scale_min_k4_just2(int j, int k, device const uchar * q) { + return j < 4 ? uchar2{uchar(q[j+0+k] & 63), uchar(q[j+4+k] & 63)} + : uchar2{uchar((q[j+4+k] & 0xF) | ((q[j-4+k] & 0xc0) >> 2)), uchar((q[j+4+k] >> 4) | ((q[j-0+k] & 0xc0) >> 2))}; +} + +template +void dequantize_q4_K(device const block_q4_K *xb, short il, thread type4x4 & reg) { + device const uchar * q = xb->qs; + + short is = (il/4) * 2; + q = q + (il/4) * 32 + 16 * (il&1); + il = il & 3; + const uchar2 sc = get_scale_min_k4_just2(is, il/2, xb->scales); + const float d = il < 2 ? xb->d : xb->d / 16.h; + const float min = xb->dmin; + const float dl = d * sc[0]; + const float ml = min * sc[1]; + + const ushort mask = il<2 ? 0x0F : 0xF0; + for (int i = 0; i < 16; ++i) { + reg[i/4][i%4] = dl * (q[i] & mask) - ml; + } +} + +template +void dequantize_q5_K(device const block_q5_K *xb, short il, thread type4x4 & reg) { + device const uint8_t * q = xb->qs; + device const uint8_t * qh = xb->qh; + + short is = (il/4) * 2; + q = q + 32 * (il/4) + 16 * (il&1); + qh = qh + 16 * (il&1); + uint8_t ul = 1 << (il/2); + il = il & 3; + const uchar2 sc = get_scale_min_k4_just2(is, il/2, xb->scales); + const float d = il < 2 ? xb->d : xb->d / 16.f; + const float min = xb->dmin; + const float dl = d * sc[0]; + const float ml = min * sc[1]; + + const ushort mask = il<2 ? 0x0F : 0xF0; + const float qh_val = il<2 ? 16.f : 256.f; + for (int i = 0; i < 16; ++i) { + reg[i/4][i%4] = dl * ((q[i] & mask) + (qh[i] & ul ? qh_val : 0)) - ml; + } +} + +template +void dequantize_q6_K(device const block_q6_K *xb, short il, thread type4x4 & reg) { + const half d_all = xb->d; + device const uint8_t * ql = (device const uint8_t *)xb->ql; + device const uint8_t * qh = (device const uint8_t *)xb->qh; + device const int8_t * scales = (device const int8_t *)xb->scales; + + ql = ql + 64*(il/8) + 32*((il/2)&1) + 16*(il&1); + qh = qh + 32*(il/8) + 16*(il&1); + float sc = scales[(il%2) + 2 * ((il/2))]; + il = (il/2) & 3; + + const uint16_t kmask1 = il>1 ? (il>2 ? 192 : 48) : (il>0 ? 12 : 3); + const uint16_t kmask2 = il>1 ? 0xF0 : 0x0F; + const float coef = il>1 ? 1.f/16.f : 1.f; + const float ml = d_all * sc * 32.f; + const float dl = d_all * sc * coef; + for (int i = 0; i < 16; ++i) { + const half q = il&1 ? ((ql[i] & kmask2) | ((qh[i] & kmask1) << 2)) + : ((ql[i] & kmask2) | ((qh[i] & kmask1) << 4)); + reg[i/4][i%4] = dl * q - ml; + } +} + +template +void dequantize_iq2_xxs(device const block_iq2_xxs * xb, short il, thread type4x4 & reg) { + // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 + const float d = xb->d; + const int ib32 = il/2; + il = il%2; + // il = 0 or 1. il = 0 processes the first 16 quants in a block of 32, il = 1 the second 16 + // each block of 32 needs 2 uint32_t's for the quants & scale, so 4 uint16_t's. + device const uint16_t * q2 = xb->qs + 4*ib32; + const uint32_t aux32_g = q2[0] | (q2[1] << 16); + const uint32_t aux32_s = q2[2] | (q2[3] << 16); + thread const uint8_t * aux8 = (thread const uint8_t *)&aux32_g; + const float dl = d * (0.5f + (aux32_s >> 28)) * 0.25f; + constant uint8_t * grid = (constant uint8_t *)(iq2xxs_grid + aux8[2*il+0]); + uint8_t signs = ksigns_iq2xs[(aux32_s >> 14*il) & 127]; + for (int i = 0; i < 8; ++i) { + reg[i/4][i%4] = dl * grid[i] * (signs & kmask_iq2xs[i] ? -1.f : 1.f); + } + grid = (constant uint8_t *)(iq2xxs_grid + aux8[2*il+1]); + signs = ksigns_iq2xs[(aux32_s >> (14*il+7)) & 127]; + for (int i = 0; i < 8; ++i) { + reg[2+i/4][i%4] = dl * grid[i] * (signs & kmask_iq2xs[i] ? -1.f : 1.f); + } +} + +template +void dequantize_iq2_xs(device const block_iq2_xs * xb, short il, thread type4x4 & reg) { + // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 + const float d = xb->d; + const int ib32 = il/2; + il = il%2; + // il = 0 or 1. il = 0 processes the first 16 quants in a block of 32, il = 1 the second 16 + device const uint16_t * q2 = xb->qs + 4*ib32; + const float dl = d * (0.5f + ((xb->scales[ib32] >> 4*il) & 0xf)) * 0.25f; + constant uint8_t * grid = (constant uint8_t *)(iq2xs_grid + (q2[2*il+0] & 511)); + uint8_t signs = ksigns_iq2xs[q2[2*il+0] >> 9]; + for (int i = 0; i < 8; ++i) { + reg[i/4][i%4] = dl * grid[i] * (signs & kmask_iq2xs[i] ? -1.f : 1.f); + } + grid = (constant uint8_t *)(iq2xs_grid + (q2[2*il+1] & 511)); + signs = ksigns_iq2xs[q2[2*il+1] >> 9]; + for (int i = 0; i < 8; ++i) { + reg[2+i/4][i%4] = dl * grid[i] * (signs & kmask_iq2xs[i] ? -1.f : 1.f); + } +} + +template +void dequantize_iq3_xxs(device const block_iq3_xxs * xb, short il, thread type4x4 & reg) { + // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 + const float d = xb->d; + const int ib32 = il/2; + il = il%2; + // il = 0 or 1. il = 0 processes the first 16 quants in a block of 32, il = 1 the second 16 + device const uint8_t * q3 = xb->qs + 8*ib32; + device const uint16_t * gas = (device const uint16_t *)(xb->qs + QK_K/4) + 2*ib32; + const uint32_t aux32 = gas[0] | (gas[1] << 16); + const float dl = d * (0.5f + (aux32 >> 28)) * 0.5f; + constant uint8_t * grid1 = (constant uint8_t *)(iq3xxs_grid + q3[4*il+0]); + constant uint8_t * grid2 = (constant uint8_t *)(iq3xxs_grid + q3[4*il+1]); + uint8_t signs = ksigns_iq2xs[(aux32 >> 14*il) & 127]; + for (int i = 0; i < 4; ++i) { + reg[0][i] = dl * grid1[i] * (signs & kmask_iq2xs[i+0] ? -1.f : 1.f); + reg[1][i] = dl * grid2[i] * (signs & kmask_iq2xs[i+4] ? -1.f : 1.f); + } + grid1 = (constant uint8_t *)(iq3xxs_grid + q3[4*il+2]); + grid2 = (constant uint8_t *)(iq3xxs_grid + q3[4*il+3]); + signs = ksigns_iq2xs[(aux32 >> (14*il+7)) & 127]; + for (int i = 0; i < 4; ++i) { + reg[2][i] = dl * grid1[i] * (signs & kmask_iq2xs[i+0] ? -1.f : 1.f); + reg[3][i] = dl * grid2[i] * (signs & kmask_iq2xs[i+4] ? -1.f : 1.f); + } +} + +template +void dequantize_iq3_s(device const block_iq3_s * xb, short il, thread type4x4 & reg) { + // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 + const float d = xb->d; + const int ib32 = il/2; + il = il%2; + // il = 0 or 1. il = 0 processes the first 16 quants in a block of 32, il = 1 the second 16 + device const uint8_t * qs = xb->qs + 8*ib32; + device const uint8_t * signs = xb->signs + 4*ib32 + 2*il; + const uint8_t qh = xb->qh[ib32] >> 4*il; + const float dl = d * (1 + 2*((xb->scales[ib32/2] >> 4*(ib32%2)) & 0xf)); + constant uint8_t * grid1 = (constant uint8_t *)(iq3s_grid + (qs[4*il+0] | ((qh << 8) & 256))); + constant uint8_t * grid2 = (constant uint8_t *)(iq3s_grid + (qs[4*il+1] | ((qh << 7) & 256))); + for (int i = 0; i < 4; ++i) { + reg[0][i] = dl * grid1[i] * select(1, -1, signs[0] & kmask_iq2xs[i+0]); + reg[1][i] = dl * grid2[i] * select(1, -1, signs[0] & kmask_iq2xs[i+4]); + } + grid1 = (constant uint8_t *)(iq3s_grid + (qs[4*il+2] | ((qh << 6) & 256))); + grid2 = (constant uint8_t *)(iq3s_grid + (qs[4*il+3] | ((qh << 5) & 256))); + for (int i = 0; i < 4; ++i) { + reg[2][i] = dl * grid1[i] * select(1, -1, signs[1] & kmask_iq2xs[i+0]); + reg[3][i] = dl * grid2[i] * select(1, -1, signs[1] & kmask_iq2xs[i+4]); + } +} + +template +void dequantize_iq2_s(device const block_iq2_s * xb, short il, thread type4x4 & reg) { + // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 + const float d = xb->d; + const int ib32 = il/2; + il = il%2; + // il = 0 or 1. il = 0 processes the first 16 quants in a block of 32, il = 1 the second 16 + device const uint8_t * qs = xb->qs + 4*ib32 + 2*il; + device const uint8_t * signs = qs + QK_K/8; + const uint8_t qh = xb->qh[ib32] >> 4*il; + const float dl = d * (0.5f + ((xb->scales[ib32] >> 4*il) & 0xf)) * 0.25f; + constant uint8_t * grid1 = (constant uint8_t *)(iq2s_grid + (qs[0] | ((qh << 8) & 0x300))); + constant uint8_t * grid2 = (constant uint8_t *)(iq2s_grid + (qs[1] | ((qh << 6) & 0x300))); + for (int i = 0; i < 8; ++i) { + reg[i/4+0][i%4] = dl * grid1[i] * select(1, -1, signs[0] & kmask_iq2xs[i]); + reg[i/4+2][i%4] = dl * grid2[i] * select(1, -1, signs[1] & kmask_iq2xs[i]); + } +} + +template +void dequantize_iq1_s(device const block_iq1_s * xb, short il, thread type4x4 & reg) { + // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 + const int ib32 = il/2; + il = il%2; + const float d = xb->d; + device const uint8_t * qs = xb->qs + 4*ib32 + 2*il; + device const uint16_t * qh = xb->qh; + const float dl = d * (2*((qh[ib32] >> 12) & 7) + 1); + const float ml = dl * (qh[ib32] & 0x8000 ? -1 - IQ1S_DELTA : -1 + IQ1S_DELTA); + const uint16_t h = qh[ib32] >> 6*il; + constant uint8_t * grid1 = (constant uint8_t *)(iq1s_grid_gpu + (qs[0] | ((h << 8) & 0x700))); + constant uint8_t * grid2 = (constant uint8_t *)(iq1s_grid_gpu + (qs[1] | ((h << 5) & 0x700))); + for (int i = 0; i < 4; ++i) { + reg[0][i] = dl * (grid1[i] & 0xf) + ml; + reg[1][i] = dl * (grid1[i] >> 4) + ml; + reg[2][i] = dl * (grid2[i] & 0xf) + ml; + reg[3][i] = dl * (grid2[i] >> 4) + ml; + } +} + +template +void dequantize_iq1_m(device const block_iq1_m * xb, short il, thread type4x4 & reg) { + // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 + const int ib32 = il/2; + il = il%2; + device const uint16_t * sc = (device const uint16_t *)xb->scales; + + iq1m_scale_t scale; + scale.u16 = (sc[0] >> 12) | ((sc[1] >> 8) & 0x00f0) | ((sc[2] >> 4) & 0x0f00) | (sc[3] & 0xf000); + const float d = scale.f16; + + device const uint8_t * qs = xb->qs + 4*ib32 + 2*il; + device const uint8_t * qh = xb->qh + 2*ib32 + il; + + const float dl = d * (2*((sc[ib32/2] >> (6*(ib32%2)+3*il)) & 7) + 1); + const float ml1 = dl * (qh[0] & 0x08 ? -1 - IQ1M_DELTA : -1 + IQ1M_DELTA); + const float ml2 = dl * (qh[0] & 0x80 ? -1 - IQ1M_DELTA : -1 + IQ1M_DELTA); + constant uint8_t * grid1 = (constant uint8_t *)(iq1s_grid_gpu + (qs[0] | ((qh[0] << 8) & 0x700))); + constant uint8_t * grid2 = (constant uint8_t *)(iq1s_grid_gpu + (qs[1] | ((qh[0] << 4) & 0x700))); + for (int i = 0; i < 4; ++i) { + reg[0][i] = dl * (grid1[i] & 0xf) + ml1; + reg[1][i] = dl * (grid1[i] >> 4) + ml1; + reg[2][i] = dl * (grid2[i] & 0xf) + ml2; + reg[3][i] = dl * (grid2[i] >> 4) + ml2; + } +} + +template +void dequantize_iq4_nl(device const block_iq4_nl * xb, short il, thread type4x4 & reg) { + device const uint16_t * q4 = (device const uint16_t *)xb->qs; + const float d = xb->d; + uint32_t aux32; + thread const uint8_t * q8 = (thread const uint8_t *)&aux32; + for (int i = 0; i < 4; ++i) { + aux32 = ((q4[2*i] | (q4[2*i+1] << 16)) >> 4*il) & 0x0f0f0f0f; + reg[i][0] = d * kvalues_iq4nl_f[q8[0]]; + reg[i][1] = d * kvalues_iq4nl_f[q8[1]]; + reg[i][2] = d * kvalues_iq4nl_f[q8[2]]; + reg[i][3] = d * kvalues_iq4nl_f[q8[3]]; + } +} + +template +void dequantize_iq4_xs(device const block_iq4_xs * xb, short il, thread type4x4 & reg) { + // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 + const int ib32 = il/2; + il = il%2; + // il = 0 or 1. il = 0 processes the first 16 quants in a block of 32, il = 1 the second 16 + device const uint32_t * q4 = (device const uint32_t *)xb->qs + 4*ib32; + const int ls = ((xb->scales_l[ib32/2] >> 4*(ib32%2)) & 0xf) | (((xb->scales_h >> 2*ib32) & 3) << 4); + const float d = (float)xb->d * (ls - 32); + uint32_t aux32; + thread const uint8_t * q8 = (thread const uint8_t *)&aux32; + for (int i = 0; i < 4; ++i) { + aux32 = (q4[i] >> 4*il) & 0x0f0f0f0f; + reg[i][0] = d * kvalues_iq4nl_f[q8[0]]; + reg[i][1] = d * kvalues_iq4nl_f[q8[1]]; + reg[i][2] = d * kvalues_iq4nl_f[q8[2]]; + reg[i][3] = d * kvalues_iq4nl_f[q8[3]]; + } +} + +template +kernel void kernel_get_rows_q( + device const void * src0, + device const void * src1, + device float * dst, + constant int64_t & ne00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb1, + constant uint64_t & nb2, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint3 tptg [[threads_per_threadgroup]]) { + const int64_t i10 = tgpig.x; + const int64_t i11 = tgpig.y; + + const int64_t r = ((const device int32_t *) ((const device char *) src1 + i11*nb11 + i10*nb10))[0]; + + const int64_t i02 = i11; + + for (int64_t ind = tiitg; ind < ne00/16; ind += tptg.x) { + float4x4 temp; + dequantize_func(((device const block_q *) ((const device char *) src0 + r*nb01 + i02*nb02)) + ind/nl, ind%nl, temp); + *(((device float4x4 *) ((device char *) dst + i11*nb2 + i10*nb1)) + ind) = temp; + } +} + +template +kernel void kernel_get_rows_f( + device const void * src0, + device const void * src1, + device float * dst, + constant int64_t & ne00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb1, + constant uint64_t & nb2, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint3 tptg [[threads_per_threadgroup]]) { + const int64_t i10 = tgpig.x; + const int64_t i11 = tgpig.y; + + const int64_t r = ((const device int32_t *) ((const device char *) src1 + i11*nb11 + i10*nb10))[0]; + + const int64_t i02 = i11; + + for (int ind = tiitg; ind < ne00; ind += tptg.x) { + (( device float *) (( device char *) dst + i11*nb2 + i10*nb1))[ind] = + ((const device T *) ((const device char *) src0 + i02*nb02 + r*nb01))[ind]; + } +} + +kernel void kernel_get_rows_i32( + device const void * src0, + device const void * src1, + device int32_t * dst, + constant int64_t & ne00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb1, + constant uint64_t & nb2, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint3 tptg [[threads_per_threadgroup]]) { + const int64_t i10 = tgpig.x; + const int64_t i11 = tgpig.y; + + const int64_t r = ((const device int32_t *) ((const device char *) src1 + i11*nb11 + i10*nb10))[0]; + + const int64_t i02 = i11; + + for (int ind = tiitg; ind < ne00; ind += tptg.x) { + (( device int32_t *) (( device char *) dst + i11*nb2 + i10*nb1))[ind] = + ((const device int32_t *) ((const device char *) src0 + i02*nb02 + r*nb01))[ind]; + } +} + + +#define BLOCK_SIZE_M 64 // 8 simdgroup matrices from matrix A +#define BLOCK_SIZE_N 32 // 4 simdgroup matrices from matrix B +#define BLOCK_SIZE_K 32 +#define THREAD_MAT_M 4 // each thread take 4 simdgroup matrices from matrix A +#define THREAD_MAT_N 2 // each thread take 2 simdgroup matrices from matrix B +#define THREAD_PER_BLOCK 128 +#define THREAD_PER_ROW 2 // 2 thread for each row in matrix A to load numbers +#define THREAD_PER_COL 4 // 4 thread for each row in matrix B to load numbers +#define SG_MAT_SIZE 64 // simdgroup matrix is of shape 8x8 +#define SG_MAT_ROW 8 + +// each block_q contains 16*nl weights +template +kernel void kernel_mul_mm(device const uchar * src0, + device const uchar * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne02, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup uchar * shared_memory [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + threadgroup T * sa = (threadgroup T *)(shared_memory); + threadgroup float * sb = (threadgroup float *)(shared_memory + 4096); + + const uint r0 = tgpig.y; + const uint r1 = tgpig.x; + const uint im = tgpig.z; + + // if this block is of 64x32 shape or smaller + short n_rows = (ne0 - r0 * BLOCK_SIZE_M < BLOCK_SIZE_M) ? (ne0 - r0 * BLOCK_SIZE_M) : BLOCK_SIZE_M; + short n_cols = (ne1 - r1 * BLOCK_SIZE_N < BLOCK_SIZE_N) ? (ne1 - r1 * BLOCK_SIZE_N) : BLOCK_SIZE_N; + + // a thread shouldn't load data outside of the matrix + short thread_row = ((short)tiitg/THREAD_PER_ROW) < n_rows ? ((short)tiitg/THREAD_PER_ROW) : n_rows - 1; + short thread_col = ((short)tiitg/THREAD_PER_COL) < n_cols ? ((short)tiitg/THREAD_PER_COL) : n_cols - 1; + + simdgroup_T8x8 ma[4]; + simdgroup_float8x8 mb[2]; + simdgroup_float8x8 c_res[8]; + for (int i = 0; i < 8; i++){ + c_res[i] = make_filled_simdgroup_matrix(0.f); + } + + short il = (tiitg % THREAD_PER_ROW); + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + uint offset0 = (i12/r2)*nb02 + (i13/r3)*(nb02*ne02); + ushort offset1 = il/nl; + + device const block_q * x = (device const block_q *)(src0 + (r0 * BLOCK_SIZE_M + thread_row) * nb01 + offset0) + offset1; + device const float * y = (device const float *)(src1 + + nb12 * im + + nb11 * (r1 * BLOCK_SIZE_N + thread_col) + + nb10 * (BLOCK_SIZE_K / THREAD_PER_COL * (tiitg % THREAD_PER_COL))); + + for (int loop_k = 0; loop_k < ne00; loop_k += BLOCK_SIZE_K) { + // load data and store to threadgroup memory + T4x4 temp_a; + dequantize_func(x, il, temp_a); + threadgroup_barrier(mem_flags::mem_threadgroup); + + #pragma unroll(16) + for (int i = 0; i < 16; i++) { + *(sa + SG_MAT_SIZE * ((tiitg / THREAD_PER_ROW / 8) \ + + (tiitg % THREAD_PER_ROW) * 16 + (i / 8) * 8) \ + + (tiitg / THREAD_PER_ROW) % 8 + (i & 7) * 8) = temp_a[i/4][i%4]; + } + + *(threadgroup float2x4 *)(sb + (tiitg % THREAD_PER_COL) * 8 * 32 + 8 * (tiitg / THREAD_PER_COL)) = *((device float2x4 *)y); + + il = (il + 2 < nl) ? il + 2 : il % 2; + x = (il < 2) ? x + (2+nl-1)/nl : x; + y += BLOCK_SIZE_K; + + threadgroup_barrier(mem_flags::mem_threadgroup); + + // load matrices from threadgroup memory and conduct outer products + threadgroup T * lsma = (sa + THREAD_MAT_M * SG_MAT_SIZE * (sgitg % 2)); + threadgroup float * lsmb = (sb + THREAD_MAT_N * SG_MAT_SIZE * (sgitg / 2)); + + #pragma unroll(4) + for (int ik = 0; ik < BLOCK_SIZE_K / 8; ik++) { + #pragma unroll(4) + for (int i = 0; i < 4; i++) { + simdgroup_load(ma[i],lsma + SG_MAT_SIZE * i); + } + simdgroup_barrier(mem_flags::mem_none); + #pragma unroll(2) + for (int i = 0; i < 2; i++) { + simdgroup_load(mb[i],lsmb + SG_MAT_SIZE * i); + } + + lsma += BLOCK_SIZE_M / SG_MAT_ROW * SG_MAT_SIZE; + lsmb += BLOCK_SIZE_N / SG_MAT_ROW * SG_MAT_SIZE; + + #pragma unroll(8) + for (int i = 0; i < 8; i++){ + simdgroup_multiply_accumulate(c_res[i], mb[i/4], ma[i%4], c_res[i]); + } + } + } + + if ((r0 + 1) * BLOCK_SIZE_M <= ne0 && (r1 + 1) * BLOCK_SIZE_N <= ne1) { + device float * C = dst + (BLOCK_SIZE_M * r0 + 32 * (sgitg & 1)) \ + + (BLOCK_SIZE_N * r1 + 16 * (sgitg >> 1)) * ne0 + im*ne1*ne0; + for (int i = 0; i < 8; i++) { + simdgroup_store(c_res[i], C + 8 * (i%4) + 8 * ne0 * (i/4), ne0); + } + } else { + // block is smaller than 64x32, we should avoid writing data outside of the matrix + threadgroup_barrier(mem_flags::mem_threadgroup); + threadgroup float * temp_str = ((threadgroup float *)shared_memory) \ + + 32 * (sgitg&1) + (16 * (sgitg>>1)) * BLOCK_SIZE_M; + for (int i = 0; i < 8; i++) { + simdgroup_store(c_res[i], temp_str + 8 * (i%4) + 8 * BLOCK_SIZE_M * (i/4), BLOCK_SIZE_M); + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + device float * C = dst + (BLOCK_SIZE_M * r0) + (BLOCK_SIZE_N * r1) * ne0 + im*ne1*ne0; + if (sgitg == 0) { + for (int i = 0; i < n_rows; i++) { + for (int j = tiitg; j < n_cols; j += BLOCK_SIZE_N) { + *(C + i + j * ne0) = *(temp_str + i + j * BLOCK_SIZE_M); + } + } + } + } +} + +// same as kernel_mul_mm_impl, but src1 and dst are accessed via indices stored in rowids +template +void kernel_mul_mm_id_impl( + device const uchar * src0, + device const uchar * src1, + threadgroup ushort2 * rowids, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne02, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + int64_t ne1, + int64_t ne0ne1, + threadgroup uchar * shared_memory, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + threadgroup half * sa = (threadgroup half *)(shared_memory); + threadgroup float * sb = (threadgroup float *)(shared_memory + 4096); + + const uint r0 = tgpig.y; + const uint r1 = tgpig.x; + + if (r1 * BLOCK_SIZE_N >= ne1) return; + + // if this block is of 64x32 shape or smaller + short n_rows = (ne0 - r0 * BLOCK_SIZE_M < BLOCK_SIZE_M) ? (ne0 - r0 * BLOCK_SIZE_M) : BLOCK_SIZE_M; + short n_cols = (ne1 - r1 * BLOCK_SIZE_N < BLOCK_SIZE_N) ? (ne1 - r1 * BLOCK_SIZE_N) : BLOCK_SIZE_N; + + // a thread shouldn't load data outside of the matrix + short thread_row = ((short)tiitg/THREAD_PER_ROW) < n_rows ? ((short)tiitg/THREAD_PER_ROW) : n_rows - 1; + short thread_col = ((short)tiitg/THREAD_PER_COL) < n_cols ? ((short)tiitg/THREAD_PER_COL) : n_cols - 1; + + simdgroup_half8x8 ma[4]; + simdgroup_float8x8 mb[2]; + simdgroup_float8x8 c_res[8]; + for (int i = 0; i < 8; i++){ + c_res[i] = make_filled_simdgroup_matrix(0.f); + } + short il = (tiitg % THREAD_PER_ROW); + + ushort offset1 = il/nl; + + threadgroup const auto & id = rowids[r1 * BLOCK_SIZE_N + thread_col]; + + device const block_q * x = (device const block_q *)(src0 + (r0 * BLOCK_SIZE_M + thread_row) * nb01) + offset1; + device const float * y = (device const float *)(src1 + + nb12 * id[1] + + nb11 * (id[0] % ne11) + + nb10 * (BLOCK_SIZE_K / THREAD_PER_COL * (tiitg % THREAD_PER_COL))); + + for (int loop_k = 0; loop_k < ne00; loop_k += BLOCK_SIZE_K) { + // load data and store to threadgroup memory + half4x4 temp_a; + dequantize_func(x, il, temp_a); + threadgroup_barrier(mem_flags::mem_threadgroup); + + for (int i = 0; i < 16; i++) { + *(sa + SG_MAT_SIZE * ((tiitg / THREAD_PER_ROW / 8) \ + + (tiitg % THREAD_PER_ROW) * 16 + (i / 8) * 8) \ + + (tiitg / THREAD_PER_ROW) % 8 + (i & 7) * 8) = temp_a[i/4][i%4]; + } + + *(threadgroup float2x4 *)(sb + (tiitg % THREAD_PER_COL) * 8 * 32 + 8 * (tiitg / THREAD_PER_COL)) = *((device float2x4 *)y); + + il = (il + 2 < nl) ? il + 2 : il % 2; + x = (il < 2) ? x + (2+nl-1)/nl : x; + y += BLOCK_SIZE_K; + + threadgroup_barrier(mem_flags::mem_threadgroup); + + // load matrices from threadgroup memory and conduct outer products + threadgroup half * lsma = (sa + THREAD_MAT_M * SG_MAT_SIZE * (sgitg % 2)); + threadgroup float * lsmb = (sb + THREAD_MAT_N * SG_MAT_SIZE * (sgitg / 2)); + + for (int ik = 0; ik < BLOCK_SIZE_K / 8; ik++) { + for (int i = 0; i < 4; i++) { + simdgroup_load(ma[i], lsma + SG_MAT_SIZE * i); + } + simdgroup_barrier(mem_flags::mem_none); + for (int i = 0; i < 2; i++) { + simdgroup_load(mb[i], lsmb + SG_MAT_SIZE * i); + } + + lsma += BLOCK_SIZE_M / SG_MAT_ROW * SG_MAT_SIZE; + lsmb += BLOCK_SIZE_N / SG_MAT_ROW * SG_MAT_SIZE; + + for (int i = 0; i < 8; i++){ + simdgroup_multiply_accumulate(c_res[i], mb[i/4], ma[i%4], c_res[i]); + } + } + } + + { + threadgroup_barrier(mem_flags::mem_threadgroup); + threadgroup float * temp_str = ((threadgroup float *)shared_memory) \ + + 32 * (sgitg&1) + (16 * (sgitg>>1)) * BLOCK_SIZE_M; + for (int i = 0; i < 8; i++) { + simdgroup_store(c_res[i], temp_str + 8 * (i%4) + 8 * BLOCK_SIZE_M * (i/4), BLOCK_SIZE_M); + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + device float * C = dst + (BLOCK_SIZE_M * r0); + if (sgitg == 0) { + for (int j = tiitg; j < n_cols; j += BLOCK_SIZE_N) { + threadgroup const auto & jid = rowids[r1 * BLOCK_SIZE_N + j]; + int joff = jid[0] * ne0 + jid[1] * ne0ne1; + for (int i = 0; i < n_rows; i++) { + *(C + i + joff) = *(temp_str + i + j * BLOCK_SIZE_M); + } + } + } + } +} + +template +kernel void kernel_mul_mm_id( + device const uchar * src0s, + device const uchar * src1, + device float * dst, + device const uchar * ids, + constant int64_t & nei0, + constant int64_t & nei1, + constant uint64_t & nbi1, + constant int64_t & ne00, + constant int64_t & ne02, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint64_t & nb1, + threadgroup uchar * shared_memory [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + const int32_t i02 = tgpig.z; + tgpig.z = 0; + + device const uchar * src0 = src0s + i02*nb02; + + // row indices + threadgroup ushort2 * rowids = (threadgroup ushort2 *)(shared_memory + 8192); + + // TODO: parallelize this loop + int64_t _ne1 = 0; + for (ushort ii1 = 0; ii1 < nei1; ii1++) { + for (ushort ii0 = 0; ii0 < nei0; ii0++) { + int32_t id = ((device int32_t *) (ids + ii1*nbi1))[ii0]; + if (id == i02) { + //if (tiitg == 0) { + rowids[_ne1] = ushort2(ii0, ii1); + //} + _ne1++; + } + } + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + kernel_mul_mm_id_impl( + src0, + src1, + rowids, + dst, + ne00, + ne02, + nb01, + nb02, + ne11, + ne12, + nb10, + nb11, + nb12, + ne0, + _ne1, + ne0*ne1, + shared_memory, + tgpig, + tiitg, + sgitg); +} + +#define QK_NL 16 + +// +// get rows +// + +typedef decltype(kernel_get_rows_f) get_rows_f_t; + +template [[host_name("kernel_get_rows_f32")]] kernel get_rows_f_t kernel_get_rows_f; +template [[host_name("kernel_get_rows_f16")]] kernel get_rows_f_t kernel_get_rows_f; + +typedef decltype(kernel_get_rows_q) get_rows_q_t; + +template [[host_name("kernel_get_rows_q4_0")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_q4_1")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_q5_0")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_q5_1")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_q8_0")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_q2_K")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_q3_K")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_q4_K")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_q5_K")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_q6_K")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_iq2_xxs")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_iq2_xs")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_iq3_xxs")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_iq3_s")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_iq2_s")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_iq1_s")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_iq1_m")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_iq4_nl")]] kernel get_rows_q_t kernel_get_rows_q; +template [[host_name("kernel_get_rows_iq4_xs")]] kernel get_rows_q_t kernel_get_rows_q; + +// +// matrix-matrix multiplication +// + +typedef decltype(kernel_mul_mm) mat_mm_t; + +template [[host_name("kernel_mul_mm_f32_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_f16_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_q4_0_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_q4_1_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_q5_0_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_q5_1_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_q8_0_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_q2_K_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_q3_K_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_q4_K_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_q5_K_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_q6_K_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq2_xxs_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq2_xs_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq3_xxs_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq3_s_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq2_s_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq1_s_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq1_m_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq4_nl_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq4_xs_f32")]] kernel mat_mm_t kernel_mul_mm; + +// +// indirect matrix-matrix multiplication +// + +typedef decltype(kernel_mul_mm_id) mat_mm_id_t; + +template [[host_name("kernel_mul_mm_id_f32_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_f16_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_q4_0_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_q4_1_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_q5_0_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_q5_1_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_q8_0_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_q2_K_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_q3_K_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_q4_K_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_q5_K_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_q6_K_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq2_xxs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq2_xs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq3_xxs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq3_s_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq2_s_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq1_s_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq1_m_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq4_nl_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq4_xs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; + +// +// matrix-vector multiplication +// + +typedef void (kernel_mul_mv_impl_t)( + device const char * src0, + device const char * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + uint64_t nb00, + uint64_t nb01, + uint64_t nb02, + int64_t ne10, + int64_t ne11, + int64_t ne12, + uint64_t nb10, + uint64_t nb11, + uint64_t nb12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + uint3 tgpig, + uint tiisg); + +typedef void (kernel_mul_mv2_impl_t)( + device const void * src0, + device const float * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + int64_t ne10, + int64_t ne12, + int64_t ne0, + int64_t ne1, + uint r2, + uint r3, + threadgroup int8_t * shared_values, + uint3 tgpig, + uint tiisg, + uint sgitg); + +template +void mmv_fn( + device const char * src0, + device const char * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + uint64_t nb00, + uint64_t nb01, + uint64_t nb02, + int64_t ne10, + int64_t ne11, + int64_t ne12, + int64_t ne13, + uint64_t nb10, + uint64_t nb11, + uint64_t nb12, + int64_t ne0, + int64_t ne1, + uint64_t nb1, + uint r2, + uint r3, + threadgroup int8_t * shared_values, + uint3 tgpig, + uint tiitg, + uint tiisg, + uint sgitg) { + impl_fn(src0,src1,dst,ne00,ne01,ne02,nb00,nb01,nb02,ne10,ne11,ne12,nb10,nb11,nb12,ne0,ne1,r2,r3,tgpig,tiisg); +} + +template +void mmv_fn( + device const char * src0, + device const char * src1, + device float * dst, + int64_t ne00, + int64_t ne01, + int64_t ne02, + uint64_t nb00, + uint64_t nb01, + uint64_t nb02, + int64_t ne10, + int64_t ne11, + int64_t ne12, + int64_t ne13, + uint64_t nb10, + uint64_t nb11, + uint64_t nb12, + int64_t ne0, + int64_t ne1, + uint64_t nb1, + uint r2, + uint r3, + threadgroup int8_t * shared_values, + uint3 tgpig, + uint tiitg, + uint tiisg, + uint sgitg) { + impl_fn(src0,(const device float *)src1,dst,ne00,ne01,ne02,ne10,ne12,ne0,ne1,r2,r3,shared_values,tgpig,tiisg,sgitg); +} + +typedef decltype(mmv_fn>) mul_mv_impl_fn_t; + +template +kernel void kernel_mul_mv_id( + device const char * src0s, + device const char * src1, + device float * dst, + device const char * ids, + constant int64_t & nei0, + constant int64_t & nei1, + constant uint64_t & nbi1, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint64_t & nb1, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + const int iid1 = tgpig.z/nei0; + const int idx = tgpig.z%nei0; + + tgpig.z = 0; + + const int32_t i02 = ((device const int32_t *) (ids + iid1*nbi1))[idx]; + + const int64_t i11 = idx % ne11; + const int64_t i12 = iid1; + + const int64_t i1 = idx; + const int64_t i2 = i12; + + device const char * src0_cur = src0s + i02*nb02; + device const char * src1_cur = src1 + i11*nb11 + i12*nb12; + device float * dst_cur = dst + i1*ne0 + i2*ne1*ne0; + + impl_fn( + /* src0 */ src0_cur, + /* src1 */ src1_cur, + /* dst */ dst_cur, + /* ne00 */ ne00, + /* ne01 */ ne01, + /* ne02 */ 1,//ne02, + /* nb00 */ nb00, + /* nb01 */ nb01, + /* nb02 */ nb02, + /* ne10 */ ne10, + /* ne11 */ 1,//ne11, + /* ne12 */ 1,//ne12, + /* ne13 */ 1,//ne13, + /* nb10 */ nb10, + /* nb11 */ nb11, + /* nb12 */ nb12, + /* ne0 */ ne0, + /* ne1 */ 1,//ne1, + /* nb1 */ nb1, + /* r2 */ 1, + /* r3 */ 1, + shared_values, + tgpig, + tiitg, + tiisg, + sgitg); +} + +typedef decltype(kernel_mul_mv_id>>) kernel_mul_mv_id_t; + +template [[host_name("kernel_mul_mv_id_f32_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>>; +template [[host_name("kernel_mul_mv_id_f16_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>>; +template [[host_name("kernel_mul_mv_id_q8_0_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>; +template [[host_name("kernel_mul_mv_id_q4_0_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>>; +template [[host_name("kernel_mul_mv_id_q4_1_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>>; +template [[host_name("kernel_mul_mv_id_q5_0_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>>; +template [[host_name("kernel_mul_mv_id_q5_1_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>>; +template [[host_name("kernel_mul_mv_id_q2_K_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>; +template [[host_name("kernel_mul_mv_id_q3_K_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>; +template [[host_name("kernel_mul_mv_id_q4_K_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>; +template [[host_name("kernel_mul_mv_id_q5_K_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>; +template [[host_name("kernel_mul_mv_id_q6_K_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>; +template [[host_name("kernel_mul_mv_id_iq1_s_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>; +template [[host_name("kernel_mul_mv_id_iq1_m_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>; +template [[host_name("kernel_mul_mv_id_iq2_xxs_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>; +template [[host_name("kernel_mul_mv_id_iq2_xs_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>; +template [[host_name("kernel_mul_mv_id_iq3_xxs_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>; +template [[host_name("kernel_mul_mv_id_iq3_s_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>; +template [[host_name("kernel_mul_mv_id_iq2_s_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>; +template [[host_name("kernel_mul_mv_id_iq4_nl_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>; +template [[host_name("kernel_mul_mv_id_iq4_xs_f32")]] kernel kernel_mul_mv_id_t kernel_mul_mv_id>; diff --git a/ml/backend/ggml/ggml-metal_darwin_arm64.m b/ml/backend/ggml/ggml-metal_darwin_arm64.m new file mode 100644 index 000000000..41d66f92f --- /dev/null +++ b/ml/backend/ggml/ggml-metal_darwin_arm64.m @@ -0,0 +1,3669 @@ +/** + * llama.cpp - commit 3f1ae2e32cde00c39b96be6d01c2997c29bae555 - do not edit this file + * + * MIT License + * + * Copyright (c) 2023-2024 The ggml authors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +#import "ggml-metal.h" + +#import "ggml-impl.h" +#import "ggml-backend-impl.h" + +#import + +#import + +#undef MIN +#undef MAX +#define MIN(a, b) ((a) < (b) ? (a) : (b)) +#define MAX(a, b) ((a) > (b) ? (a) : (b)) + +// max memory buffers that can be mapped to the device +#define GGML_METAL_MAX_BUFFERS 64 + +// max number of MTLCommandBuffer used to submit a graph for processing +#define GGML_METAL_MAX_COMMAND_BUFFERS 8 + +#ifdef GGML_METAL_NDEBUG +#define GGML_METAL_LOG(...) +#define GGML_METAL_LOG_INFO(...) +#define GGML_METAL_LOG_WARN(...) +#define GGML_METAL_LOG_ERROR(...) +#else +#define GGML_METAL_LOG(...) ggml_metal_log(GGML_LOG_LEVEL_NONE, __VA_ARGS__) +#define GGML_METAL_LOG_INFO(...) ggml_metal_log(GGML_LOG_LEVEL_INFO, __VA_ARGS__) +#define GGML_METAL_LOG_WARN(...) ggml_metal_log(GGML_LOG_LEVEL_WARN, __VA_ARGS__) +#define GGML_METAL_LOG_ERROR(...) ggml_metal_log(GGML_LOG_LEVEL_ERROR, __VA_ARGS__) +#define GGML_METAL_LOG_DEBUG(...) ggml_metal_log(GGML_LOG_LEVEL_DEBUG, __VA_ARGS__) +#endif + +#define UNUSED(x) (void)(x) + +struct ggml_metal_kernel { + id pipeline; +}; + +enum ggml_metal_kernel_type { + GGML_METAL_KERNEL_TYPE_ADD, + GGML_METAL_KERNEL_TYPE_ADD_ROW, + GGML_METAL_KERNEL_TYPE_SUB, + GGML_METAL_KERNEL_TYPE_SUB_ROW, + GGML_METAL_KERNEL_TYPE_MUL, + GGML_METAL_KERNEL_TYPE_MUL_ROW, + GGML_METAL_KERNEL_TYPE_DIV, + GGML_METAL_KERNEL_TYPE_DIV_ROW, + GGML_METAL_KERNEL_TYPE_REPEAT_F32, + GGML_METAL_KERNEL_TYPE_REPEAT_F16, + GGML_METAL_KERNEL_TYPE_REPEAT_I32, + GGML_METAL_KERNEL_TYPE_REPEAT_I16, + GGML_METAL_KERNEL_TYPE_SCALE, + GGML_METAL_KERNEL_TYPE_SCALE_4, + GGML_METAL_KERNEL_TYPE_CLAMP, + GGML_METAL_KERNEL_TYPE_TANH, + GGML_METAL_KERNEL_TYPE_RELU, + GGML_METAL_KERNEL_TYPE_SIGMOID, + GGML_METAL_KERNEL_TYPE_GELU, + GGML_METAL_KERNEL_TYPE_GELU_4, + GGML_METAL_KERNEL_TYPE_GELU_QUICK, + GGML_METAL_KERNEL_TYPE_GELU_QUICK_4, + GGML_METAL_KERNEL_TYPE_SILU, + GGML_METAL_KERNEL_TYPE_SILU_4, + GGML_METAL_KERNEL_TYPE_SOFT_MAX_F16, + GGML_METAL_KERNEL_TYPE_SOFT_MAX_F16_4, + GGML_METAL_KERNEL_TYPE_SOFT_MAX_F32, + GGML_METAL_KERNEL_TYPE_SOFT_MAX_F32_4, + GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF, + GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF_8, + GGML_METAL_KERNEL_TYPE_GET_ROWS_F32, + GGML_METAL_KERNEL_TYPE_GET_ROWS_F16, + GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_0, + GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_1, + GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_0, + GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_1, + GGML_METAL_KERNEL_TYPE_GET_ROWS_Q8_0, + GGML_METAL_KERNEL_TYPE_GET_ROWS_Q2_K, + GGML_METAL_KERNEL_TYPE_GET_ROWS_Q3_K, + GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_K, + GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_K, + GGML_METAL_KERNEL_TYPE_GET_ROWS_Q6_K, + GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XXS, + GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS, + GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_XXS, + GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_S, + GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_S, + GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_S, + GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_M, + GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_NL, + GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_XS, + GGML_METAL_KERNEL_TYPE_GET_ROWS_I32, + GGML_METAL_KERNEL_TYPE_RMS_NORM, + GGML_METAL_KERNEL_TYPE_GROUP_NORM, + GGML_METAL_KERNEL_TYPE_NORM, + GGML_METAL_KERNEL_TYPE_SSM_CONV_F32, + GGML_METAL_KERNEL_TYPE_SSM_SCAN_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_F32_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F16, + GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32_1ROW, + GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32_L4, + GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_1_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_1_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_Q8_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_Q2_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_Q3_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_Q6_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_S_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_S_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_IQ1_S_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_IQ1_M_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_IQ4_NL_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_IQ4_XS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32, + //GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F16, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32, + //GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32_1ROW, + //GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32_L4, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_1_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_1_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q8_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q2_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q3_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q6_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_S_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_S_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ1_S_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ1_M_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ4_NL_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ4_XS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_F16_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_1_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_1_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_Q8_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_Q2_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_Q3_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_Q6_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_S_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_S_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_S_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_M_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_NL_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_XS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F16_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_1_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_1_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q8_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q2_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q3_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q6_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_S_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_S_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_S_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_M_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_NL_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_XS_F32, + GGML_METAL_KERNEL_TYPE_ROPE_NORM_F32, + GGML_METAL_KERNEL_TYPE_ROPE_NORM_F16, + GGML_METAL_KERNEL_TYPE_ROPE_NEOX_F32, + GGML_METAL_KERNEL_TYPE_ROPE_NEOX_F16, + GGML_METAL_KERNEL_TYPE_IM2COL_F16, + GGML_METAL_KERNEL_TYPE_IM2COL_F32, + GGML_METAL_KERNEL_TYPE_UPSCALE_F32, + GGML_METAL_KERNEL_TYPE_PAD_F32, + GGML_METAL_KERNEL_TYPE_UNPAD_F32, + GGML_METAL_KERNEL_TYPE_ARANGE_F32, + GGML_METAL_KERNEL_TYPE_TIMESTEP_EMBEDDING_F32, + GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_ASC, + GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_DESC, + GGML_METAL_KERNEL_TYPE_LEAKY_RELU_F32, + GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H64, + GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H80, + GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H96, + GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H112, + GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H128, + //GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H256, // https://github.com/ggerganov/llama.cpp/issues/7261 + GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_VEC_F16_H128, + //GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_VEC_F16_H256, // https://github.com/ggerganov/llama.cpp/issues/7261 + GGML_METAL_KERNEL_TYPE_CPY_F32_F32, + GGML_METAL_KERNEL_TYPE_CPY_F32_F16, + GGML_METAL_KERNEL_TYPE_CPY_F16_F16, + GGML_METAL_KERNEL_TYPE_CPY_F16_F32, + GGML_METAL_KERNEL_TYPE_CPY_F32_Q8_0, + GGML_METAL_KERNEL_TYPE_CPY_F32_Q4_0, + GGML_METAL_KERNEL_TYPE_CPY_F32_Q4_1, + GGML_METAL_KERNEL_TYPE_CPY_F32_Q5_0, + GGML_METAL_KERNEL_TYPE_CPY_F32_Q5_1, + GGML_METAL_KERNEL_TYPE_CPY_F32_IQ4_NL, + GGML_METAL_KERNEL_TYPE_CONCAT, + GGML_METAL_KERNEL_TYPE_SQR, + GGML_METAL_KERNEL_TYPE_SQRT, + GGML_METAL_KERNEL_TYPE_SIN, + GGML_METAL_KERNEL_TYPE_COS, + GGML_METAL_KERNEL_TYPE_SUM_ROWS, + + GGML_METAL_KERNEL_TYPE_COUNT +}; + +struct ggml_backend_metal_context { + id device; + id queue; + + MTLComputePassDescriptor * edesc; + + dispatch_queue_t d_queue; + + struct ggml_metal_kernel kernels[GGML_METAL_KERNEL_TYPE_COUNT]; + + bool support_simdgroup_reduction; + bool support_simdgroup_mm; + + // capture state + bool capture_next_compute; + bool capture_started; + + id capture_scope; + + // command buffer state + int n_cb; // number of extra threads used to submit the command buffers + int n_nodes_0; // number of nodes submitted by the main thread + int n_nodes_1; // remaining number of nodes submitted by the n_cb threads + int n_nodes_per_cb; + + struct ggml_cgraph * gf; + + // the callback given to the thread pool + // TODO: ideally, this should be created once, utilizing the command buffer state above + // for some reason, doing it like this leads to a crash + void (^encode_async)(size_t ith); + + // n_cb command buffers + 1 used by the main thread + id command_buffers[GGML_METAL_MAX_COMMAND_BUFFERS + 1]; + + // abort ggml_metal_graph_compute if callback returns true + ggml_abort_callback abort_callback; + void * abort_callback_data; +}; + +// MSL code +// TODO: move the contents here when ready +// for now it is easier to work in a separate file +// static NSString * const msl_library_source = @"see metal.metal"; + +// Here to assist with NSBundle Path Hack +@interface GGMLMetalClass : NSObject +@end +@implementation GGMLMetalClass +@end + +static void ggml_metal_default_log_callback(enum ggml_log_level level, const char * msg, void * user_data) { + fprintf(stderr, "%s", msg); + + UNUSED(level); + UNUSED(user_data); +} + +ggml_log_callback ggml_metal_log_callback = ggml_metal_default_log_callback; +void * ggml_metal_log_user_data = NULL; + +GGML_ATTRIBUTE_FORMAT(2, 3) +static void ggml_metal_log(enum ggml_log_level level, const char * format, ...){ + if (ggml_metal_log_callback != NULL) { + va_list args; + va_start(args, format); + char buffer[128]; + int len = vsnprintf(buffer, 128, format, args); + if (len < 128) { + ggml_metal_log_callback(level, buffer, ggml_metal_log_user_data); + } else { + char* buffer2 = malloc(len+1); + va_end(args); + va_start(args, format); + vsnprintf(buffer2, len+1, format, args); + buffer2[len] = 0; + ggml_metal_log_callback(level, buffer2, ggml_metal_log_user_data); + free(buffer2); + } + va_end(args); + } +} + +static void * ggml_metal_host_malloc(size_t n) { + void * data = NULL; + +#if TARGET_OS_OSX + kern_return_t err = vm_allocate((vm_map_t) mach_task_self(), (void *) &data, n, VM_FLAGS_ANYWHERE); + if (err != KERN_SUCCESS) { + GGML_METAL_LOG_ERROR("%s: error: vm_allocate failed\n", __func__); + return NULL; + } +#else + const int result = posix_memalign((void **) &data, sysconf(_SC_PAGESIZE), n); + if (result != 0) { + GGML_METAL_LOG_ERROR("%s: error: posix_memalign failed\n", __func__); + return NULL; + } +#endif + + return data; +} + +static struct ggml_backend_metal_context * ggml_metal_init(void) { + GGML_METAL_LOG_INFO("%s: allocating\n", __func__); + +#if TARGET_OS_OSX && !GGML_METAL_NDEBUG + // Show all the Metal device instances in the system + NSArray * devices = MTLCopyAllDevices(); + for (id device in devices) { + GGML_METAL_LOG_INFO("%s: found device: %s\n", __func__, [[device name] UTF8String]); + } + [devices release]; // since it was created by a *Copy* C method +#endif + + // Pick and show default Metal device + id device = MTLCreateSystemDefaultDevice(); + GGML_METAL_LOG_INFO("%s: picking default device: %s\n", __func__, [[device name] UTF8String]); + + // Configure context + struct ggml_backend_metal_context * ctx = calloc(1, sizeof(struct ggml_backend_metal_context)); + ctx->device = device; + ctx->queue = [ctx->device newCommandQueue]; + ctx->edesc = MTLComputePassDescriptor.computePassDescriptor; + ctx->edesc.dispatchType = MTLDispatchTypeSerial; + ctx->d_queue = dispatch_queue_create("ggml-metal", DISPATCH_QUEUE_CONCURRENT); + + id metal_library; + + // load library + // + // - first check if the library is embedded + // - then check if the library is in the bundle + // - if not found, load the source and compile it + // - if that fails, return NULL + { + NSBundle * bundle = nil; +#ifdef SWIFT_PACKAGE + bundle = SWIFTPM_MODULE_BUNDLE; +#else + bundle = [NSBundle bundleForClass:[GGMLMetalClass class]]; +#endif + + NSError * error = nil; + +#if GGML_METAL_EMBED_LIBRARY + const bool try_metallib = false; +#else + const bool try_metallib = true; +#endif + + NSString * path_lib = [bundle pathForResource:@"default" ofType:@"metallib"]; + if (try_metallib && path_lib != nil) { + // pre-compiled library found + NSURL * libURL = [NSURL fileURLWithPath:path_lib]; + GGML_METAL_LOG_INFO("%s: loading '%s'\n", __func__, [path_lib UTF8String]); + + metal_library = [ctx->device newLibraryWithURL:libURL error:&error]; + if (error) { + GGML_METAL_LOG_ERROR("%s: error: %s\n", __func__, [[error description] UTF8String]); + return NULL; + } + } else { +#if GGML_METAL_EMBED_LIBRARY + GGML_METAL_LOG_INFO("%s: using embedded metal library\n", __func__); + + extern const char ggml_metallib_start[]; + extern const char ggml_metallib_end[]; + + NSString * src = [[NSString alloc] initWithBytes:ggml_metallib_start length:(ggml_metallib_end-ggml_metallib_start) encoding:NSUTF8StringEncoding]; +#else + GGML_METAL_LOG_INFO("%s: default.metallib not found, loading from source\n", __func__); + + NSString * path_source; + NSString * path_resource = [[NSProcessInfo processInfo].environment objectForKey:@"GGML_METAL_PATH_RESOURCES"]; + + GGML_METAL_LOG_INFO("%s: GGML_METAL_PATH_RESOURCES = %s\n", __func__, path_resource ? [path_resource UTF8String] : "nil"); + + if (path_resource) { + path_source = [path_resource stringByAppendingPathComponent:@"ggml-metal.metal"]; + } else { + path_source = [bundle pathForResource:@"ggml-metal" ofType:@"metal"]; + } + + if (path_source == nil) { + GGML_METAL_LOG_WARN("%s: error: could not use bundle path to find ggml-metal.metal, falling back to trying cwd\n", __func__); + path_source = @"ggml-metal.metal"; + } + + GGML_METAL_LOG_INFO("%s: loading '%s'\n", __func__, [path_source UTF8String]); + + NSString * src = [NSString stringWithContentsOfFile:path_source encoding:NSUTF8StringEncoding error:&error]; + if (error) { + GGML_METAL_LOG_ERROR("%s: error: %s\n", __func__, [[error description] UTF8String]); + return NULL; + } +#endif // GGML_METAL_EMBED_LIBRARY + + @autoreleasepool { + // dictionary of preprocessor macros + NSMutableDictionary * prep = [NSMutableDictionary dictionary]; + + MTLCompileOptions* options = [MTLCompileOptions new]; + options.preprocessorMacros = prep; + + //[options setFastMathEnabled:false]; + + metal_library = [ctx->device newLibraryWithSource:src options:options error:&error]; + if (error) { + GGML_METAL_LOG_ERROR("%s: error: %s\n", __func__, [[error description] UTF8String]); + return NULL; + } + } + } + } + + // print MTL GPU family: + GGML_METAL_LOG_INFO("%s: GPU name: %s\n", __func__, [[ctx->device name] UTF8String]); + + const NSInteger MTLGPUFamilyMetal3 = 5001; + + // determine max supported GPU family + // https://developer.apple.com/metal/Metal-Shading-Language-Specification.pdf + // https://developer.apple.com/metal/Metal-Feature-Set-Tables.pdf + { + for (int i = MTLGPUFamilyApple1 + 20; i >= MTLGPUFamilyApple1; --i) { + if ([ctx->device supportsFamily:i]) { + GGML_METAL_LOG_INFO("%s: GPU family: MTLGPUFamilyApple%d (%d)\n", __func__, i - (int) MTLGPUFamilyApple1 + 1, i); + break; + } + } + + for (int i = MTLGPUFamilyCommon1 + 5; i >= MTLGPUFamilyCommon1; --i) { + if ([ctx->device supportsFamily:i]) { + GGML_METAL_LOG_INFO("%s: GPU family: MTLGPUFamilyCommon%d (%d)\n", __func__, i - (int) MTLGPUFamilyCommon1 + 1, i); + break; + } + } + + for (int i = MTLGPUFamilyMetal3 + 5; i >= MTLGPUFamilyMetal3; --i) { + if ([ctx->device supportsFamily:i]) { + GGML_METAL_LOG_INFO("%s: GPU family: MTLGPUFamilyMetal%d (%d)\n", __func__, i - (int) MTLGPUFamilyMetal3 + 3, i); + break; + } + } + } + + ctx->support_simdgroup_reduction = [ctx->device supportsFamily:MTLGPUFamilyApple7]; + ctx->support_simdgroup_reduction |= [ctx->device supportsFamily:MTLGPUFamilyMetal3]; + + ctx->support_simdgroup_mm = [ctx->device supportsFamily:MTLGPUFamilyApple7]; + + GGML_METAL_LOG_INFO("%s: simdgroup reduction support = %s\n", __func__, ctx->support_simdgroup_reduction ? "true" : "false"); + GGML_METAL_LOG_INFO("%s: simdgroup matrix mul. support = %s\n", __func__, ctx->support_simdgroup_mm ? "true" : "false"); + GGML_METAL_LOG_INFO("%s: hasUnifiedMemory = %s\n", __func__, ctx->device.hasUnifiedMemory ? "true" : "false"); + + ctx->capture_next_compute = false; + ctx->capture_started = false; + ctx->capture_scope = nil; + + ctx->gf = nil; + ctx->encode_async = nil; + for (int i = 0; i < GGML_METAL_MAX_COMMAND_BUFFERS; ++i) { + ctx->command_buffers[i] = nil; + } + +#if TARGET_OS_OSX || (TARGET_OS_IOS && __clang_major__ >= 15) + if (@available(macOS 10.12, iOS 16.0, *)) { + GGML_METAL_LOG_INFO("%s: recommendedMaxWorkingSetSize = %8.2f MB\n", __func__, ctx->device.recommendedMaxWorkingSetSize / 1e6); + } +#elif TARGET_OS_OSX + if (ctx->device.maxTransferRate != 0) { + GGML_METAL_LOG_INFO("%s: maxTransferRate = %8.2f MB/s\n", __func__, ctx->device.maxTransferRate / 1e6); + } else { + GGML_METAL_LOG_INFO("%s: maxTransferRate = built-in GPU\n", __func__); + } +#endif + + // load kernels + { + NSError * error = nil; + + for (int i = 0; i < GGML_METAL_KERNEL_TYPE_COUNT; ++i) { + ctx->kernels[i].pipeline = nil; + } + + /* + GGML_METAL_LOG_INFO("%s: loaded %-40s %16p | th_max = %4d | th_width = %4d\n", __func__, "kernel_"#name, (void *) kernel->pipeline, \ + (int) kernel->pipeline.maxTotalThreadsPerThreadgroup, \ + (int) kernel->pipeline.threadExecutionWidth); \ + */ +#define GGML_METAL_ADD_KERNEL(e, name, supported) \ + if (supported) { \ + struct ggml_metal_kernel * kernel = &ctx->kernels[e]; \ + id metal_function = [metal_library newFunctionWithName:@"kernel_"#name]; \ + kernel->pipeline = [ctx->device newComputePipelineStateWithFunction:metal_function error:&error]; \ + [metal_function release]; \ + if (error) { \ + GGML_METAL_LOG_ERROR("%s: error: load pipeline error: %s\n", __func__, [[error description] UTF8String]); \ + [metal_library release]; \ + return NULL; \ + } \ + } else { \ + GGML_METAL_LOG_WARN("%s: skipping %-40s (not supported)\n", __func__, "kernel_"#name); \ + } + + // simd_sum and simd_max requires MTLGPUFamilyApple7 + + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ADD, add, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ADD_ROW, add_row, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SUB, sub, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SUB_ROW, sub_row, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL, mul, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_ROW, mul_row, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_DIV, div, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_DIV_ROW, div_row, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_REPEAT_F32, repeat_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_REPEAT_F16, repeat_f16, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_REPEAT_I32, repeat_i32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_REPEAT_I16, repeat_i16, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SCALE, scale, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SCALE_4, scale_4, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CLAMP, clamp, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_TANH, tanh, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_RELU, relu, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SIGMOID, sigmoid, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GELU, gelu, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GELU_4, gelu_4, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GELU_QUICK, gelu_quick, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GELU_QUICK_4, gelu_quick_4, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SILU, silu, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SILU_4, silu_4, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SOFT_MAX_F16, soft_max_f16, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SOFT_MAX_F16_4, soft_max_f16_4, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SOFT_MAX_F32, soft_max_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SOFT_MAX_F32_4, soft_max_f32_4, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF, diag_mask_inf, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF_8, diag_mask_inf_8, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_F32, get_rows_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_F16, get_rows_f16, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_0, get_rows_q4_0, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_1, get_rows_q4_1, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_0, get_rows_q5_0, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_1, get_rows_q5_1, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q8_0, get_rows_q8_0, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q2_K, get_rows_q2_K, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q3_K, get_rows_q3_K, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_K, get_rows_q4_K, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_K, get_rows_q5_K, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q6_K, get_rows_q6_K, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XXS, get_rows_iq2_xxs, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS, get_rows_iq2_xs, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_XXS, get_rows_iq3_xxs, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_S, get_rows_iq3_s, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_S, get_rows_iq2_s, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_S, get_rows_iq1_s, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_M, get_rows_iq1_m, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_NL, get_rows_iq4_nl, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_XS, get_rows_iq4_xs, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_I32, get_rows_i32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_RMS_NORM, rms_norm, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GROUP_NORM, group_norm, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_NORM, norm, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SSM_CONV_F32, ssm_conv_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SSM_SCAN_F32, ssm_scan_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_F32_F32, mul_mv_f32_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F16, mul_mv_f16_f16, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32, mul_mv_f16_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32_1ROW, mul_mv_f16_f32_1row, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32_L4, mul_mv_f16_f32_l4, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_0_F32, mul_mv_q4_0_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_1_F32, mul_mv_q4_1_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_0_F32, mul_mv_q5_0_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_1_F32, mul_mv_q5_1_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q8_0_F32, mul_mv_q8_0_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q2_K_F32, mul_mv_q2_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q3_K_F32, mul_mv_q3_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_K_F32, mul_mv_q4_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_K_F32, mul_mv_q5_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q6_K_F32, mul_mv_q6_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XXS_F32, mul_mv_iq2_xxs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XS_F32, mul_mv_iq2_xs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_XXS_F32, mul_mv_iq3_xxs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_S_F32, mul_mv_iq3_s_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_S_F32, mul_mv_iq2_s_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ1_S_F32, mul_mv_iq1_s_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ1_M_F32, mul_mv_iq1_m_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ4_NL_F32, mul_mv_iq4_nl_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ4_XS_F32, mul_mv_iq4_xs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32, mul_mv_id_f32_f32, ctx->support_simdgroup_reduction); + //GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F16, mul_mv_id_f16_f16, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32, mul_mv_id_f16_f32, ctx->support_simdgroup_reduction); + //GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32_1ROW, mul_mv_id_f16_f32_1row, ctx->support_simdgroup_reduction); + //GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32_L4, mul_mv_id_f16_f32_l4, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_0_F32, mul_mv_id_q4_0_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_1_F32, mul_mv_id_q4_1_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_0_F32, mul_mv_id_q5_0_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_1_F32, mul_mv_id_q5_1_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q8_0_F32, mul_mv_id_q8_0_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q2_K_F32, mul_mv_id_q2_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q3_K_F32, mul_mv_id_q3_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_K_F32, mul_mv_id_q4_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_K_F32, mul_mv_id_q5_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q6_K_F32, mul_mv_id_q6_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XXS_F32, mul_mv_id_iq2_xxs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XS_F32, mul_mv_id_iq2_xs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_XXS_F32, mul_mv_id_iq3_xxs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_S_F32, mul_mv_id_iq3_s_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_S_F32, mul_mv_id_iq2_s_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ1_S_F32, mul_mv_id_iq1_s_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ1_M_F32, mul_mv_id_iq1_m_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ4_NL_F32, mul_mv_id_iq4_nl_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ4_XS_F32, mul_mv_id_iq4_xs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32, mul_mm_f32_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_F16_F32, mul_mm_f16_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_0_F32, mul_mm_q4_0_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_1_F32, mul_mm_q4_1_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_0_F32, mul_mm_q5_0_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_1_F32, mul_mm_q5_1_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q8_0_F32, mul_mm_q8_0_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q2_K_F32, mul_mm_q2_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q3_K_F32, mul_mm_q3_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_K_F32, mul_mm_q4_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_K_F32, mul_mm_q5_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q6_K_F32, mul_mm_q6_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XXS_F32, mul_mm_iq2_xxs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32, mul_mm_iq2_xs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_XXS_F32, mul_mm_iq3_xxs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_S_F32, mul_mm_iq3_s_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_S_F32, mul_mm_iq2_s_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_S_F32, mul_mm_iq1_s_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_M_F32, mul_mm_iq1_m_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_NL_F32, mul_mm_iq4_nl_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_XS_F32, mul_mm_iq4_xs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32, mul_mm_id_f32_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F16_F32, mul_mm_id_f16_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_0_F32, mul_mm_id_q4_0_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_1_F32, mul_mm_id_q4_1_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_0_F32, mul_mm_id_q5_0_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_1_F32, mul_mm_id_q5_1_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q8_0_F32, mul_mm_id_q8_0_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q2_K_F32, mul_mm_id_q2_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q3_K_F32, mul_mm_id_q3_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_K_F32, mul_mm_id_q4_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_K_F32, mul_mm_id_q5_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q6_K_F32, mul_mm_id_q6_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XXS_F32, mul_mm_id_iq2_xxs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32, mul_mm_id_iq2_xs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_XXS_F32, mul_mm_id_iq3_xxs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_S_F32, mul_mm_id_iq3_s_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_S_F32, mul_mm_id_iq2_s_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_S_F32, mul_mm_id_iq1_s_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_M_F32, mul_mm_id_iq1_m_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_NL_F32, mul_mm_id_iq4_nl_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_XS_F32, mul_mm_id_iq4_xs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ROPE_NORM_F32, rope_norm_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ROPE_NORM_F16, rope_norm_f16, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ROPE_NEOX_F32, rope_neox_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ROPE_NEOX_F16, rope_neox_f16, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_IM2COL_F16, im2col_f16, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_IM2COL_F32, im2col_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_UPSCALE_F32, upscale_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_PAD_F32, pad_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_UNPAD_F32, unpad_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_TIMESTEP_EMBEDDING_F32, timestep_embedding_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ARANGE_F32, arange_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_ASC, argsort_f32_i32_asc, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_DESC, argsort_f32_i32_desc, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_LEAKY_RELU_F32, leaky_relu_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H64, flash_attn_ext_f16_h64, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H80, flash_attn_ext_f16_h80, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H96, flash_attn_ext_f16_h96, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H112, flash_attn_ext_f16_h112, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H128, flash_attn_ext_f16_h128, ctx->support_simdgroup_mm); + //GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H256, flash_attn_ext_f16_h256, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_VEC_F16_H128, flash_attn_ext_vec_f16_h128, ctx->support_simdgroup_reduction); + //GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_VEC_F16_H256, flash_attn_ext_vec_f16_h256, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_F16, cpy_f32_f16, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_F32, cpy_f32_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F16_F16, cpy_f16_f16, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F16_F32, cpy_f16_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_Q8_0, cpy_f32_q8_0, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_Q4_0, cpy_f32_q4_0, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_Q4_1, cpy_f32_q4_1, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_Q5_0, cpy_f32_q5_0, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_Q5_1, cpy_f32_q5_1, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_IQ4_NL, cpy_f32_iq4_nl, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CONCAT, concat, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SQR, sqr, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SQRT, sqrt, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SIN, sin, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_COS, cos, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SUM_ROWS, sum_rows, true); + } + + [metal_library release]; + + return ctx; +} + +static void ggml_metal_free(struct ggml_backend_metal_context * ctx) { + GGML_METAL_LOG_INFO("%s: deallocating\n", __func__); + + for (int i = 0; i < GGML_METAL_KERNEL_TYPE_COUNT; ++i) { + [ctx->kernels[i].pipeline release]; + } + + [ctx->queue release]; + [ctx->device release]; + + dispatch_release(ctx->d_queue); + + free(ctx); +} + +// temporarily defined here for compatibility between ggml-backend and the old API + +struct ggml_backend_metal_buffer { + void * data; + size_t size; + + id metal; +}; + +struct ggml_backend_metal_buffer_context { + void * all_data; + size_t all_size; + bool owned; + + // multiple buffers are used only to avoid the maximum buffer size limitation when using mmap + int n_buffers; + struct ggml_backend_metal_buffer buffers[GGML_METAL_MAX_BUFFERS]; +}; + +// finds the Metal buffer that contains the tensor data on the GPU device +// the assumption is that there is 1-to-1 mapping between the host and device memory buffers, so we can find the +// Metal buffer based on the host memory pointer +// +static id ggml_metal_get_buffer(struct ggml_tensor * t, size_t * offs) { + //GGML_METAL_LOG_INFO("%s: data tensor '%16s', offs_data = %8ld, offs_eval = %8ld, offs_cach = %8ld\n", __func__, t->name, offs_data, offs_eval, offs_cach); + + const int64_t tsize = ggml_nbytes(t); + + ggml_backend_buffer_t buffer = t->view_src ? t->view_src->buffer : t->buffer; + + struct ggml_backend_metal_buffer_context * buf_ctx = (struct ggml_backend_metal_buffer_context *) buffer->context; + + // find the view that contains the tensor fully + for (int i = 0; i < buf_ctx->n_buffers; ++i) { + const int64_t ioffs = (int64_t) t->data - (int64_t) buf_ctx->buffers[i].data; + + //GGML_METAL_LOG_INFO("ioffs = %10ld, tsize = %10ld, sum = %10ld, buf_ctx->buffers[%d].size = %10ld\n", ioffs, tsize, ioffs + tsize, i, buf_ctx->buffers[i].size); + if (ioffs >= 0 && ioffs + tsize <= (int64_t) buf_ctx->buffers[i].size) { + *offs = (size_t) ioffs; + + //GGML_METAL_LOG_INFO("%s: tensor '%16s', offs = %8ld\n", __func__, t->name, *offs); + + return buf_ctx->buffers[i].metal; + } + } + + GGML_METAL_LOG_ERROR("%s: error: tensor '%s' buffer is nil\n", __func__, t->name); + + return nil; +} + +static bool ggml_metal_supports_op(const struct ggml_backend_metal_context * ctx, const struct ggml_tensor * op) { + for (size_t i = 0, n = 3; i < n; ++i) { + if (op->src[i] != NULL && op->src[i]->type == GGML_TYPE_BF16) { + return false; + } + } + + switch (op->op) { + case GGML_OP_UNARY: + switch (ggml_get_unary_op(op)) { + case GGML_UNARY_OP_TANH: + case GGML_UNARY_OP_RELU: + case GGML_UNARY_OP_SIGMOID: + case GGML_UNARY_OP_GELU: + case GGML_UNARY_OP_GELU_QUICK: + case GGML_UNARY_OP_SILU: + return ggml_is_contiguous(op->src[0]); + default: + return false; + } + case GGML_OP_NONE: + case GGML_OP_RESHAPE: + case GGML_OP_VIEW: + case GGML_OP_TRANSPOSE: + case GGML_OP_PERMUTE: + case GGML_OP_CONCAT: + case GGML_OP_ADD: + case GGML_OP_SUB: + case GGML_OP_ACC: + case GGML_OP_MUL: + case GGML_OP_DIV: + case GGML_OP_REPEAT: + case GGML_OP_SCALE: + case GGML_OP_CLAMP: + return true; + case GGML_OP_SQR: + case GGML_OP_SQRT: + case GGML_OP_SIN: + case GGML_OP_COS: + return ggml_is_contiguous(op->src[0]); + case GGML_OP_SUM_ROWS: + case GGML_OP_SOFT_MAX: + case GGML_OP_RMS_NORM: + case GGML_OP_GROUP_NORM: + return ctx->support_simdgroup_reduction; + case GGML_OP_NORM: + case GGML_OP_ROPE: + return true; + case GGML_OP_IM2COL: + return op->src[0]->type == GGML_TYPE_F16; + case GGML_OP_POOL_1D: + case GGML_OP_POOL_2D: + return false; + case GGML_OP_UPSCALE: + case GGML_OP_PAD: + case GGML_OP_UNPAD: + case GGML_OP_ARANGE: + case GGML_OP_TIMESTEP_EMBEDDING: + case GGML_OP_ARGSORT: + case GGML_OP_LEAKY_RELU: + return true; + case GGML_OP_FLASH_ATTN_EXT: + if (op->src[1]->type != GGML_TYPE_F16) { + return false; + } + if (op->src[2]->type != GGML_TYPE_F16) { + return false; + } + if (op->src[0]->ne[0] == 256) { + return false; + } + return ctx->support_simdgroup_mm; // TODO: over-restricted for vec-kernels + case GGML_OP_SSM_CONV: + case GGML_OP_SSM_SCAN: + return true; + case GGML_OP_MUL_MAT: + case GGML_OP_MUL_MAT_ID: + return ctx->support_simdgroup_reduction && + (op->src[0]->type != GGML_TYPE_F32 || op->src[1]->type == GGML_TYPE_F32); + case GGML_OP_CPY: + case GGML_OP_DUP: + case GGML_OP_CONT: + { + switch (op->src[0]->type) { + case GGML_TYPE_F32: + switch (op->type) { + case GGML_TYPE_F32: + case GGML_TYPE_F16: + case GGML_TYPE_Q8_0: + case GGML_TYPE_Q4_0: + case GGML_TYPE_Q4_1: + case GGML_TYPE_Q5_0: + case GGML_TYPE_Q5_1: + case GGML_TYPE_IQ4_NL: + return true; + default: + return false; + } + case GGML_TYPE_F16: + switch (op->type) { + case GGML_TYPE_F32: + case GGML_TYPE_F16: + return true; + default: + return false; + } + default: + return false; + }; + } + case GGML_OP_DIAG_MASK_INF: + case GGML_OP_GET_ROWS: + { + return op->ne[3] == 1; + } + default: + return false; + } +} + +static void ggml_metal_encode_node( + struct ggml_backend_metal_context * ctx, + int idx, + id encoder) { + struct ggml_cgraph * gf = ctx->gf; + + struct ggml_tensor * node = ggml_graph_node(gf, idx); + + //GGML_METAL_LOG_INFO("%s: encoding node %3d, op = %8s\n", __func__, idx, ggml_op_name(node->op)); + + struct ggml_tensor * src0 = node->src[0]; + struct ggml_tensor * src1 = node->src[1]; + struct ggml_tensor * src2 = node->src[2]; + struct ggml_tensor * dst = node; + + if (ggml_is_empty(dst)) { + return; + } + + switch (dst->op) { + case GGML_OP_NONE: + case GGML_OP_RESHAPE: + case GGML_OP_VIEW: + case GGML_OP_TRANSPOSE: + case GGML_OP_PERMUTE: + { + // noop -> next node + } return; + default: + { + } break; + } + + if (!ggml_metal_supports_op(ctx, dst)) { + GGML_METAL_LOG_ERROR("%s: error: unsupported op '%s'\n", __func__, ggml_op_desc(dst)); + GGML_ABORT("unsupported op"); + } + + const int64_t ne00 = src0 ? src0->ne[0] : 0; + const int64_t ne01 = src0 ? src0->ne[1] : 0; + const int64_t ne02 = src0 ? src0->ne[2] : 0; + const int64_t ne03 = src0 ? src0->ne[3] : 0; + + const uint64_t nb00 = src0 ? src0->nb[0] : 0; + const uint64_t nb01 = src0 ? src0->nb[1] : 0; + const uint64_t nb02 = src0 ? src0->nb[2] : 0; + const uint64_t nb03 = src0 ? src0->nb[3] : 0; + + const int64_t ne10 = src1 ? src1->ne[0] : 0; + const int64_t ne11 = src1 ? src1->ne[1] : 0; + const int64_t ne12 = src1 ? src1->ne[2] : 0; + const int64_t ne13 = src1 ? src1->ne[3] : 0; + + const uint64_t nb10 = src1 ? src1->nb[0] : 0; + const uint64_t nb11 = src1 ? src1->nb[1] : 0; + const uint64_t nb12 = src1 ? src1->nb[2] : 0; + const uint64_t nb13 = src1 ? src1->nb[3] : 0; + + const int64_t ne20 = src2 ? src2->ne[0] : 0; + const int64_t ne21 = src2 ? src2->ne[1] : 0; + const int64_t ne22 = src2 ? src2->ne[2] : 0; GGML_UNUSED(ne22); + const int64_t ne23 = src2 ? src2->ne[3] : 0; GGML_UNUSED(ne23); + + const uint64_t nb20 = src2 ? src2->nb[0] : 0; GGML_UNUSED(nb20); + const uint64_t nb21 = src2 ? src2->nb[1] : 0; + const uint64_t nb22 = src2 ? src2->nb[2] : 0; + const uint64_t nb23 = src2 ? src2->nb[3] : 0; + + const int64_t ne0 = dst ? dst->ne[0] : 0; + const int64_t ne1 = dst ? dst->ne[1] : 0; + const int64_t ne2 = dst ? dst->ne[2] : 0; + const int64_t ne3 = dst ? dst->ne[3] : 0; + + const uint64_t nb0 = dst ? dst->nb[0] : 0; + const uint64_t nb1 = dst ? dst->nb[1] : 0; + const uint64_t nb2 = dst ? dst->nb[2] : 0; + const uint64_t nb3 = dst ? dst->nb[3] : 0; + + const enum ggml_type src0t = src0 ? src0->type : GGML_TYPE_COUNT; + const enum ggml_type src1t = src1 ? src1->type : GGML_TYPE_COUNT; + const enum ggml_type dstt = dst ? dst->type : GGML_TYPE_COUNT; + + size_t offs_src0 = 0; + size_t offs_src1 = 0; + size_t offs_src2 = 0; + size_t offs_dst = 0; + + id id_src0 = src0 ? ggml_metal_get_buffer(src0, &offs_src0) : nil; + id id_src1 = src1 ? ggml_metal_get_buffer(src1, &offs_src1) : nil; + id id_src2 = src2 ? ggml_metal_get_buffer(src2, &offs_src2) : nil; + id id_dst = dst ? ggml_metal_get_buffer(dst, &offs_dst) : nil; + + //GGML_METAL_LOG_INFO("%s: op - %s\n", __func__, ggml_op_name(dst->op)); + //if (src0) { + // GGML_METAL_LOG_INFO("%s: src0 - %4s [%5lld, %5lld, %5lld], %d, %s\n", __func__, ggml_type_name(src0t), ne00, ne01, ne02, + // ggml_is_contiguous(src0), src0->name); + //} + //if (src1) { + // GGML_METAL_LOG_INFO("%s: src1 - %4s [%5lld, %5lld, %5lld], %d, %s\n", __func__, ggml_type_name(src1t), ne10, ne11, ne12, + // ggml_is_contiguous(src1), src1->name); + //} + //if (dst) { + // GGML_METAL_LOG_INFO("%s: dst - %4s [%5lld, %5lld, %5lld], 1, %s\n", __func__, ggml_type_name(dstt), ne0, ne1, ne2, + // dst->name); + //} + + switch (dst->op) { + case GGML_OP_CONCAT: + { + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CONCAT].pipeline; + + const int32_t dim = ((const int32_t *) dst->op_params)[0]; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:5]; + [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:6]; + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:7]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:8]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:9]; + [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:10]; + [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:11]; + [encoder setBytes:&ne11 length:sizeof(ne11) atIndex:12]; + [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:13]; + [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:14]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:15]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:16]; + [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:17]; + [encoder setBytes:&nb13 length:sizeof(nb13) atIndex:18]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:19]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:20]; + [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:21]; + [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:22]; + [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:23]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:24]; + [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:25]; + [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:26]; + [encoder setBytes:&dim length:sizeof(dim) atIndex:27]; + + const int nth = MIN(1024, ne0); + + [encoder dispatchThreadgroups:MTLSizeMake(ne1, ne2, ne3) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_ADD: + case GGML_OP_SUB: + case GGML_OP_MUL: + case GGML_OP_DIV: + { + GGML_ASSERT(src0t == GGML_TYPE_F32); + GGML_ASSERT(src1t == GGML_TYPE_F32); + + const size_t offs = 0; + + bool bcast_row = false; + + int64_t nb = ne00; // used by the "row" kernels + + id pipeline = nil; + + if (ggml_nelements(src1) == ne10 && ggml_is_contiguous(src1) && ne00 % 4 == 0 && ne10 % 4 == 0) { + GGML_ASSERT(ggml_is_contiguous(src0)); + + // src1 is a row + GGML_ASSERT(ne11 == 1); + + nb = ne00 / 4; + switch (dst->op) { + case GGML_OP_ADD: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ADD_ROW].pipeline; break; + case GGML_OP_SUB: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SUB_ROW].pipeline; break; + case GGML_OP_MUL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_ROW].pipeline; break; + case GGML_OP_DIV: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_DIV_ROW].pipeline; break; + default: GGML_ABORT("fatal error"); + } + + bcast_row = true; + } else { + switch (dst->op) { + case GGML_OP_ADD: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ADD].pipeline; break; + case GGML_OP_SUB: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SUB].pipeline; break; + case GGML_OP_MUL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL].pipeline; break; + case GGML_OP_DIV: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_DIV].pipeline; break; + default: GGML_ABORT("fatal error"); + } + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:5]; + [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:6]; + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:7]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:8]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:9]; + [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:10]; + [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:11]; + [encoder setBytes:&ne11 length:sizeof(ne11) atIndex:12]; + [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:13]; + [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:14]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:15]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:16]; + [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:17]; + [encoder setBytes:&nb13 length:sizeof(nb13) atIndex:18]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:19]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:20]; + [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:21]; + [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:22]; + [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:23]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:24]; + [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:25]; + [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:26]; + [encoder setBytes:&offs length:sizeof(offs) atIndex:27]; + [encoder setBytes:&nb length:sizeof(nb) atIndex:28]; + + if (bcast_row) { + const int64_t n = ggml_nelements(dst)/4; + + [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } else { + const int nth = MIN((int) pipeline.maxTotalThreadsPerThreadgroup, ne0); + + [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } + } break; + case GGML_OP_REPEAT: + { + id pipeline; + + switch (src0t) { + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_REPEAT_F32].pipeline; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_REPEAT_F16].pipeline; break; + case GGML_TYPE_I32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_REPEAT_I32].pipeline; break; + case GGML_TYPE_I16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_REPEAT_I16].pipeline; break; + default: GGML_ABORT("fatal error"); + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:3]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:4]; + [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:5]; + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:6]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:7]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:8]; + [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:9]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:10]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:11]; + [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:12]; + [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:13]; + [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:14]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:15]; + [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:16]; + [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:17]; + + const int nth = MIN((int) pipeline.maxTotalThreadsPerThreadgroup, ne0); + + [encoder dispatchThreadgroups:MTLSizeMake(ne1, ne2, ne3) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_ACC: + { + GGML_ASSERT(src0t == GGML_TYPE_F32); + GGML_ASSERT(src1t == GGML_TYPE_F32); + GGML_ASSERT(dstt == GGML_TYPE_F32); + + GGML_ASSERT(ggml_is_contiguous(src0)); + GGML_ASSERT(ggml_is_contiguous(src1)); + + const size_t pnb1 = ((const int32_t *) dst->op_params)[0]; + const size_t pnb2 = ((const int32_t *) dst->op_params)[1]; + const size_t pnb3 = ((const int32_t *) dst->op_params)[2]; + const size_t offs = ((const int32_t *) dst->op_params)[3]; + + const bool inplace = (bool) ((const int32_t *) dst->op_params)[4]; + + if (!inplace) { + // run a separete kernel to cpy src->dst + // not sure how to avoid this + // TODO: make a simpler cpy_bytes kernel + + const id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_F32].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; + [encoder setBytes:&ne01 length:sizeof( int64_t) atIndex:3]; + [encoder setBytes:&ne02 length:sizeof( int64_t) atIndex:4]; + [encoder setBytes:&ne03 length:sizeof( int64_t) atIndex:5]; + [encoder setBytes:&nb00 length:sizeof(uint64_t) atIndex:6]; + [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:7]; + [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:8]; + [encoder setBytes:&nb03 length:sizeof(uint64_t) atIndex:9]; + [encoder setBytes:&ne0 length:sizeof( int64_t) atIndex:10]; + [encoder setBytes:&ne1 length:sizeof( int64_t) atIndex:11]; + [encoder setBytes:&ne2 length:sizeof( int64_t) atIndex:12]; + [encoder setBytes:&ne3 length:sizeof( int64_t) atIndex:13]; + [encoder setBytes:&nb0 length:sizeof(uint64_t) atIndex:14]; + [encoder setBytes:&nb1 length:sizeof(uint64_t) atIndex:15]; + [encoder setBytes:&nb2 length:sizeof(uint64_t) atIndex:16]; + [encoder setBytes:&nb3 length:sizeof(uint64_t) atIndex:17]; + + const int nth = MIN((int) pipeline.maxTotalThreadsPerThreadgroup, ne00); + + [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } + + const id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ADD].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:5]; + [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:6]; + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:7]; + [encoder setBytes:&pnb1 length:sizeof(pnb1) atIndex:8]; + [encoder setBytes:&pnb2 length:sizeof(pnb2) atIndex:9]; + [encoder setBytes:&pnb3 length:sizeof(pnb3) atIndex:10]; + [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:11]; + [encoder setBytes:&ne11 length:sizeof(ne11) atIndex:12]; + [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:13]; + [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:14]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:15]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:16]; + [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:17]; + [encoder setBytes:&nb13 length:sizeof(nb13) atIndex:18]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:19]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:20]; + [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:21]; + [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:22]; + [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:23]; + [encoder setBytes:&pnb1 length:sizeof(pnb1) atIndex:24]; + [encoder setBytes:&pnb2 length:sizeof(pnb2) atIndex:25]; + [encoder setBytes:&pnb3 length:sizeof(pnb3) atIndex:26]; + [encoder setBytes:&offs length:sizeof(offs) atIndex:27]; + + const int nth = MIN((int) pipeline.maxTotalThreadsPerThreadgroup, ne00); + + [encoder dispatchThreadgroups:MTLSizeMake(ne11, ne12, ne13) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_SCALE: + { + GGML_ASSERT(ggml_is_contiguous(src0)); + + float scale; + memcpy(&scale, dst->op_params, sizeof(scale)); + + int64_t n = ggml_nelements(dst); + + id pipeline = nil; + + if (n % 4 == 0) { + n /= 4; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SCALE_4].pipeline; + } else { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SCALE].pipeline; + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&scale length:sizeof(scale) atIndex:2]; + + [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_OP_CLAMP: + { + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CLAMP].pipeline; + + float min; + float max; + memcpy(&min, ((const int32_t *) dst->op_params) + 0, sizeof(float)); + memcpy(&max, ((const int32_t *) dst->op_params) + 1, sizeof(float)); + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&min length:sizeof(min) atIndex:2]; + [encoder setBytes:&max length:sizeof(max) atIndex:3]; + + const int64_t n = ggml_nelements(dst); + + [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_OP_UNARY: + switch (ggml_get_unary_op(node)) { + // we are not taking into account the strides, so for now require contiguous tensors + GGML_ASSERT(ggml_is_contiguous(src0)); + + case GGML_UNARY_OP_TANH: + { + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_TANH].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + + const int64_t n = ggml_nelements(dst); + + [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_UNARY_OP_RELU: + { + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_RELU].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + + const int64_t n = ggml_nelements(dst); + + [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_UNARY_OP_SIGMOID: + { + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SIGMOID].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + + const int64_t n = ggml_nelements(dst); + + [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_UNARY_OP_GELU: + { + int64_t n = ggml_nelements(dst); + + id pipeline = nil; + + if (n % 4 == 0) { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GELU_4].pipeline; + n /= 4; + } else { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GELU].pipeline; + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + + [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_UNARY_OP_GELU_QUICK: + { + int64_t n = ggml_nelements(dst); + + id pipeline = nil; + + if (n % 4 == 0) { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GELU_QUICK_4].pipeline; + n /= 4; + } else { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GELU_QUICK].pipeline; + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + + [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_UNARY_OP_SILU: + { + int64_t n = ggml_nelements(dst); + + id pipeline = nil; + + if (n % 4 == 0) { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SILU_4].pipeline; + n /= 4; + } else { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SILU].pipeline; + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + + [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + default: + { + GGML_METAL_LOG_WARN("%s: node %3d, op = %8s not implemented\n", __func__, idx, ggml_op_name(dst->op)); + GGML_ABORT("fatal error"); + } + } break; + case GGML_OP_SQR: + { + GGML_ASSERT(ggml_is_contiguous(src0)); + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SQR].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + + const int64_t n = ggml_nelements(dst); + + [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_OP_SQRT: + { + GGML_ASSERT(ggml_is_contiguous(src0)); + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SQRT].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + + const int64_t n = ggml_nelements(dst); + + [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_OP_SIN: + { + GGML_ASSERT(ggml_is_contiguous(src0)); + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SIN].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + + const int64_t n = ggml_nelements(dst); + + [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_OP_COS: + { + GGML_ASSERT(ggml_is_contiguous(src0)); + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_COS].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + + const int64_t n = ggml_nelements(dst); + + [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_OP_SUM_ROWS: + { + GGML_ASSERT(src0->nb[0] == ggml_type_size(src0->type)); + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SUM_ROWS].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:3]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:4]; + [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:5]; + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:6]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:7]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:8]; + [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:9]; + [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:10]; + [encoder setBytes:&ne11 length:sizeof(ne11) atIndex:11]; + [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:12]; + [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:13]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:14]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:15]; + [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:16]; + [encoder setBytes:&nb13 length:sizeof(nb13) atIndex:17]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:18]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:19]; + [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:20]; + [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:21]; + [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:22]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:23]; + [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:24]; + [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:25]; + + [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_OP_SOFT_MAX: + { + GGML_ASSERT(!src1 || src1->type == GGML_TYPE_F16 || src1->type == GGML_TYPE_F32); + + int nth = 32; // SIMD width + + id pipeline = nil; + + const bool use_f16 = (src1 && src1->type == GGML_TYPE_F16); + + if (ne00%4 == 0) { + while (nth < ne00/4 && nth*ne01*ne02*ne03 < 256) { + nth *= 2; + } + if (use_f16) { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SOFT_MAX_F16_4].pipeline; + } else { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SOFT_MAX_F32_4].pipeline; + } + } else { + while (nth < ne00 && nth*ne01*ne02*ne03 < 256) { + nth *= 2; + } + if (use_f16) { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SOFT_MAX_F16].pipeline; + } else { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SOFT_MAX_F32].pipeline; + } + } + + float scale; + float max_bias; + + memcpy(&scale, ((const int32_t *) dst->op_params) + 0, sizeof(scale)); + memcpy(&max_bias, ((const int32_t *) dst->op_params) + 1, sizeof(max_bias)); + + const int64_t nrows_x = ggml_nrows(src0); + const int64_t nrows_y = src0->ne[1]; + + const uint32_t n_head = nrows_x/nrows_y; + const uint32_t n_head_log2 = 1u << (uint32_t) floorf(log2f((float) n_head)); + + const float m0 = powf(2.0f, -(max_bias ) / n_head_log2); + const float m1 = powf(2.0f, -(max_bias / 2.0f) / n_head_log2); + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + if (id_src1) { + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + } else { + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:1]; + } + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:5]; + [encoder setBytes:&scale length:sizeof(scale) atIndex:6]; + [encoder setBytes:&max_bias length:sizeof(max_bias) atIndex:7]; + [encoder setBytes:&m0 length:sizeof(m0) atIndex:8]; + [encoder setBytes:&m1 length:sizeof(m1) atIndex:9]; + [encoder setBytes:&n_head_log2 length:sizeof(n_head_log2) atIndex:10]; + [encoder setThreadgroupMemoryLength:32*sizeof(float) atIndex:0]; + + [encoder dispatchThreadgroups:MTLSizeMake(ne01*ne02*ne03, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_DIAG_MASK_INF: + { + const int n_past = ((const int32_t *)(dst->op_params))[0]; + + id pipeline = nil; + + if (ne00%8 == 0) { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF_8].pipeline; + } else { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF].pipeline; + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:3]; + [encoder setBytes:&n_past length:sizeof(int) atIndex:4]; + + if (ne00%8 == 0) { + [encoder dispatchThreadgroups:MTLSizeMake(ne00*ne01*ne02/8, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } + else { + [encoder dispatchThreadgroups:MTLSizeMake(ne00, ne01, ne02) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } + } break; + case GGML_OP_SSM_CONV: + { + GGML_ASSERT(src0t == GGML_TYPE_F32); + GGML_ASSERT(src1t == GGML_TYPE_F32); + + GGML_ASSERT(ggml_is_contiguous(src0)); + GGML_ASSERT(ggml_is_contiguous(src1)); + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SSM_CONV_F32].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:5]; + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:6]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:7]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:8]; + [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:9]; + [encoder setBytes:&ne11 length:sizeof(ne11) atIndex:10]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:11]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:12]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:13]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:14]; + [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:15]; + [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:16]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:17]; + [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:18]; + + [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne1, ne02) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_OP_SSM_SCAN: + { + struct ggml_tensor * src3 = node->src[3]; + struct ggml_tensor * src4 = node->src[4]; + struct ggml_tensor * src5 = node->src[5]; + + GGML_ASSERT(src3); + GGML_ASSERT(src4); + GGML_ASSERT(src5); + + size_t offs_src3 = 0; + size_t offs_src4 = 0; + size_t offs_src5 = 0; + + id id_src3 = src3 ? ggml_metal_get_buffer(src3, &offs_src3) : nil; + id id_src4 = src4 ? ggml_metal_get_buffer(src4, &offs_src4) : nil; + id id_src5 = src5 ? ggml_metal_get_buffer(src5, &offs_src5) : nil; + + const int64_t ne30 = src3->ne[0]; GGML_UNUSED(ne30); + const int64_t ne31 = src3->ne[1]; GGML_UNUSED(ne31); + + const uint64_t nb30 = src3->nb[0]; + const uint64_t nb31 = src3->nb[1]; + + const int64_t ne40 = src4->ne[0]; GGML_UNUSED(ne40); + const int64_t ne41 = src4->ne[1]; GGML_UNUSED(ne41); + const int64_t ne42 = src4->ne[2]; GGML_UNUSED(ne42); + + const uint64_t nb40 = src4->nb[0]; + const uint64_t nb41 = src4->nb[1]; + const uint64_t nb42 = src4->nb[2]; + + const int64_t ne50 = src5->ne[0]; GGML_UNUSED(ne50); + const int64_t ne51 = src5->ne[1]; GGML_UNUSED(ne51); + const int64_t ne52 = src5->ne[2]; GGML_UNUSED(ne52); + + const uint64_t nb50 = src5->nb[0]; + const uint64_t nb51 = src5->nb[1]; + const uint64_t nb52 = src5->nb[2]; + + const int64_t d_state = ne00; + const int64_t d_inner = ne01; + const int64_t n_seq_tokens = ne11; + const int64_t n_seqs = ne02; + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SSM_SCAN_F32].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_src2 offset:offs_src2 atIndex:2]; + [encoder setBuffer:id_src3 offset:offs_src3 atIndex:3]; + [encoder setBuffer:id_src4 offset:offs_src4 atIndex:4]; + [encoder setBuffer:id_src5 offset:offs_src5 atIndex:5]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:6]; + + [encoder setBytes:&d_state length:sizeof(d_state) atIndex:7]; + [encoder setBytes:&d_inner length:sizeof(d_inner) atIndex:8]; + [encoder setBytes:&n_seq_tokens length:sizeof(n_seq_tokens) atIndex:9]; + [encoder setBytes:&n_seqs length:sizeof(n_seqs) atIndex:10]; + + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:11]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:12]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:13]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:14]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:15]; + [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:16]; + [encoder setBytes:&nb13 length:sizeof(nb13) atIndex:17]; + [encoder setBytes:&nb20 length:sizeof(nb20) atIndex:18]; + [encoder setBytes:&nb21 length:sizeof(nb21) atIndex:19]; + [encoder setBytes:&nb22 length:sizeof(nb22) atIndex:20]; + [encoder setBytes:&nb30 length:sizeof(nb30) atIndex:21]; + [encoder setBytes:&nb31 length:sizeof(nb31) atIndex:22]; + [encoder setBytes:&nb40 length:sizeof(nb40) atIndex:23]; + [encoder setBytes:&nb41 length:sizeof(nb41) atIndex:24]; + [encoder setBytes:&nb42 length:sizeof(nb42) atIndex:25]; + [encoder setBytes:&nb50 length:sizeof(nb50) atIndex:26]; + [encoder setBytes:&nb51 length:sizeof(nb51) atIndex:27]; + [encoder setBytes:&nb52 length:sizeof(nb52) atIndex:28]; + + [encoder dispatchThreadgroups:MTLSizeMake(d_inner, n_seqs, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_OP_MUL_MAT: + { + GGML_ASSERT(ne00 == ne10); + + GGML_ASSERT(ne12 % ne02 == 0); + GGML_ASSERT(ne13 % ne03 == 0); + + const uint r2 = ne12/ne02; + const uint r3 = ne13/ne03; + + // find the break-even point where the matrix-matrix kernel becomes more efficient compared + // to the matrix-vector kernel + int ne11_mm_min = 1; + + // the numbers below are measured on M2 Ultra for 7B and 13B models + // these numbers do not translate to other devices or model sizes + // TODO: need to find a better approach + switch (src0t) { + case GGML_TYPE_F16: ne11_mm_min = 2; break; + case GGML_TYPE_Q8_0: ne11_mm_min = 7; break; + case GGML_TYPE_Q2_K: ne11_mm_min = 15; break; + case GGML_TYPE_Q3_K: ne11_mm_min = 7; break; + case GGML_TYPE_Q4_0: + case GGML_TYPE_Q4_1: ne11_mm_min = 15; break; + case GGML_TYPE_Q4_K: ne11_mm_min = 11; break; + case GGML_TYPE_Q5_0: // not tested yet + case GGML_TYPE_Q5_1: ne11_mm_min = 13; break; // not tested yet + case GGML_TYPE_Q5_K: ne11_mm_min = 7; break; + case GGML_TYPE_Q6_K: ne11_mm_min = 7; break; + default: ne11_mm_min = 1; break; + } + + // for now the matrix-matrix multiplication kernel only works on A14+/M1+ SoCs + // AMD GPU and older A-chips will reuse matrix-vector multiplication kernel + if ([ctx->device supportsFamily:MTLGPUFamilyApple7] && + !ggml_is_transposed(src0) && + !ggml_is_transposed(src1) && + src1t == GGML_TYPE_F32 && + ne00 % 32 == 0 && ne00 >= 64 && + (ne11 > ne11_mm_min || (ggml_is_quantized(src0t) && ne12 > 1))) { + //printf("matrix: ne00 = %6d, ne01 = %6d, ne02 = %6d, ne11 = %6d, ne12 = %6d\n", ne00, ne01, ne02, ne11, ne12); + + // some Metal matrix data types require aligned pointers + // ref: https://developer.apple.com/metal/Metal-Shading-Language-Specification.pdf (Table 2.5) + switch (src0->type) { + case GGML_TYPE_F32: GGML_ASSERT(nb01 % 16 == 0); break; + case GGML_TYPE_F16: GGML_ASSERT(nb01 % 8 == 0); break; + default: break; + } + + id pipeline = nil; + + switch (src0->type) { + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32 ].pipeline; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_F16_F32 ].pipeline; break; + case GGML_TYPE_Q4_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_0_F32 ].pipeline; break; + case GGML_TYPE_Q4_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_1_F32 ].pipeline; break; + case GGML_TYPE_Q5_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_0_F32 ].pipeline; break; + case GGML_TYPE_Q5_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_1_F32 ].pipeline; break; + case GGML_TYPE_Q8_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q8_0_F32 ].pipeline; break; + case GGML_TYPE_Q2_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q2_K_F32 ].pipeline; break; + case GGML_TYPE_Q3_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q3_K_F32 ].pipeline; break; + case GGML_TYPE_Q4_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_K_F32 ].pipeline; break; + case GGML_TYPE_Q5_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_K_F32 ].pipeline; break; + case GGML_TYPE_Q6_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q6_K_F32 ].pipeline; break; + case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XXS_F32].pipeline; break; + case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32 ].pipeline; break; + case GGML_TYPE_IQ3_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_XXS_F32].pipeline; break; + case GGML_TYPE_IQ3_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_S_F32 ].pipeline; break; + case GGML_TYPE_IQ2_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_S_F32 ].pipeline; break; + case GGML_TYPE_IQ1_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_S_F32 ].pipeline; break; + case GGML_TYPE_IQ1_M: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_M_F32 ].pipeline; break; + case GGML_TYPE_IQ4_NL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_NL_F32 ].pipeline; break; + case GGML_TYPE_IQ4_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_XS_F32 ].pipeline; break; + default: GGML_ABORT("MUL MAT-MAT not implemented"); + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:4]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:5]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:6]; + [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:7]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:8]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:9]; + [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:10]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:11]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:12]; + [encoder setBytes:&r2 length:sizeof(r2) atIndex:13]; + [encoder setBytes:&r3 length:sizeof(r3) atIndex:14]; + [encoder setThreadgroupMemoryLength:8192 atIndex:0]; + [encoder dispatchThreadgroups:MTLSizeMake( (ne11 + 31)/32, (ne01 + 63)/64, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(128, 1, 1)]; + } else { + int nth0 = 32; + int nth1 = 1; + int nrows = 1; + //printf("vector: ne00 = %6d, ne01 = %6d, ne02 = %6d, ne11 = %6d, ne12 = %6d\n", ne00, ne01, ne02, ne11, ne12); + + id pipeline = nil; + + // use custom matrix x vector kernel + switch (src0t) { + case GGML_TYPE_F32: + { + GGML_ASSERT(src1t == GGML_TYPE_F32); + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F32_F32].pipeline; + nrows = 4; + } break; + case GGML_TYPE_F16: + { + nth0 = 32; + nth1 = 1; + if (src1t == GGML_TYPE_F32) { + if (ne11 * ne12 < 4) { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32_1ROW].pipeline; + } else if (ne00 >= 128 && ne01 >= 8 && ne00%4 == 0) { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32_L4].pipeline; + nrows = ne11; + } else { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32].pipeline; + nrows = 4; + } + } else { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F16].pipeline; + nrows = 4; + } + } break; + case GGML_TYPE_Q4_0: + { + nth0 = 8; + nth1 = 8; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_0_F32].pipeline; + } break; + case GGML_TYPE_Q4_1: + { + nth0 = 8; + nth1 = 8; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_1_F32].pipeline; + } break; + case GGML_TYPE_Q5_0: + { + nth0 = 8; + nth1 = 8; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_0_F32].pipeline; + } break; + case GGML_TYPE_Q5_1: + { + nth0 = 8; + nth1 = 8; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_1_F32].pipeline; + } break; + case GGML_TYPE_Q8_0: + { + nth0 = 8; + nth1 = 8; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q8_0_F32].pipeline; + } break; + case GGML_TYPE_Q2_K: + { + nth0 = 2; + nth1 = 32; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q2_K_F32].pipeline; + } break; + case GGML_TYPE_Q3_K: + { + nth0 = 2; + nth1 = 32; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q3_K_F32].pipeline; + } break; + case GGML_TYPE_Q4_K: + { + nth0 = 4; //1; + nth1 = 8; //32; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_K_F32].pipeline; + } break; + case GGML_TYPE_Q5_K: + { + nth0 = 2; + nth1 = 32; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_K_F32].pipeline; + } break; + case GGML_TYPE_Q6_K: + { + nth0 = 2; + nth1 = 32; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q6_K_F32].pipeline; + } break; + case GGML_TYPE_IQ2_XXS: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XXS_F32].pipeline; + } break; + case GGML_TYPE_IQ2_XS: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XS_F32].pipeline; + } break; + case GGML_TYPE_IQ3_XXS: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_XXS_F32].pipeline; + } break; + case GGML_TYPE_IQ3_S: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_S_F32].pipeline; + } break; + case GGML_TYPE_IQ2_S: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_S_F32].pipeline; + } break; + case GGML_TYPE_IQ1_S: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ1_S_F32].pipeline; + } break; + case GGML_TYPE_IQ1_M: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ1_M_F32].pipeline; + } break; + case GGML_TYPE_IQ4_NL: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ4_NL_F32].pipeline; + } break; + case GGML_TYPE_IQ4_XS: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ4_XS_F32].pipeline; + } break; + default: + { + GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src0t); + GGML_ABORT("not implemented"); + } + }; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:5]; + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:6]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:7]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:8]; + [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:9]; + [encoder setBytes:&ne11 length:sizeof(ne11) atIndex:10]; + [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:11]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:12]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:13]; + [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:14]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:15]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:16]; + [encoder setBytes:&r2 length:sizeof(r2) atIndex:17]; + [encoder setBytes:&r3 length:sizeof(r3) atIndex:18]; + + if (src0t == GGML_TYPE_Q4_0 || src0t == GGML_TYPE_Q4_1 || src0t == GGML_TYPE_Q5_0 || + src0t == GGML_TYPE_Q5_1 || src0t == GGML_TYPE_Q8_0 || src0t == GGML_TYPE_Q2_K || + src0t == GGML_TYPE_IQ1_S || src0t == GGML_TYPE_IQ1_M || src0t == GGML_TYPE_IQ2_S) { + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src0t == GGML_TYPE_IQ2_XXS || src0t == GGML_TYPE_IQ2_XS) { + const int mem_size = src0t == GGML_TYPE_IQ2_XXS ? 256*8+128 : 512*8+128; + [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src0t == GGML_TYPE_IQ3_XXS || src0t == GGML_TYPE_IQ3_S) { + const int mem_size = src0t == GGML_TYPE_IQ3_XXS ? 256*4+128 : 512*4; + [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src0t == GGML_TYPE_IQ4_NL || src0t == GGML_TYPE_IQ4_XS) { + const int mem_size = 32*sizeof(float); + [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src0t == GGML_TYPE_Q4_K) { + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src0t == GGML_TYPE_Q3_K) { + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src0t == GGML_TYPE_Q5_K) { + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src0t == GGML_TYPE_Q6_K) { + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 1)/2, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } else { + const int64_t ny = (ne11 + nrows - 1)/nrows; + [encoder dispatchThreadgroups:MTLSizeMake(ne01, ny, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + } + } break; + case GGML_OP_MUL_MAT_ID: + { + const int n_as = src0->ne[2]; + + // src2 = ids + const enum ggml_type src2t = src2->type; GGML_UNUSED(src2t); + + GGML_ASSERT(src2t == GGML_TYPE_I32); + + GGML_ASSERT(!ggml_is_transposed(src0)); + GGML_ASSERT(!ggml_is_transposed(src1)); + + GGML_ASSERT(src1t == GGML_TYPE_F32); + + // find the break-even point where the matrix-matrix kernel becomes more efficient compared + // to the matrix-vector kernel + // ne20 = n_used_experts + // ne21 = n_rows + const int dst_rows = ne20*ne21; + const int dst_rows_min = n_as; + const int dst_rows_max = (ctx->device.maxThreadgroupMemoryLength - 32 - 8192)/4; + + // max size of the rowids array in the kernel shared buffer + GGML_ASSERT(dst_rows <= dst_rows_max); + + // for now the matrix-matrix multiplication kernel only works on A14+/M1+ SoCs + // AMD GPU and older A-chips will reuse matrix-vector multiplication kernel + // !!! + // TODO: for now, always use mat-vec kernels until we figure out how to improve the + // indirect matrix multiplication + // !!! + if ([ctx->device supportsFamily:MTLGPUFamilyApple7] && + ne00 % 32 == 0 && ne00 >= 64 && + dst_rows > dst_rows_min) { + + // some Metal matrix data types require aligned pointers + // ref: https://developer.apple.com/metal/Metal-Shading-Language-Specification.pdf (Table 2.5) + switch (src0->type) { + case GGML_TYPE_F32: GGML_ASSERT(nb01 % 16 == 0); break; + case GGML_TYPE_F16: GGML_ASSERT(nb01 % 8 == 0); break; + default: break; + } + + id pipeline = nil; + + switch (src0->type) { + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32 ].pipeline; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F16_F32 ].pipeline; break; + case GGML_TYPE_Q4_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_0_F32 ].pipeline; break; + case GGML_TYPE_Q4_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_1_F32 ].pipeline; break; + case GGML_TYPE_Q5_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_0_F32 ].pipeline; break; + case GGML_TYPE_Q5_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_1_F32 ].pipeline; break; + case GGML_TYPE_Q8_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q8_0_F32 ].pipeline; break; + case GGML_TYPE_Q2_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q2_K_F32 ].pipeline; break; + case GGML_TYPE_Q3_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q3_K_F32 ].pipeline; break; + case GGML_TYPE_Q4_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_K_F32 ].pipeline; break; + case GGML_TYPE_Q5_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_K_F32 ].pipeline; break; + case GGML_TYPE_Q6_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q6_K_F32 ].pipeline; break; + case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XXS_F32].pipeline; break; + case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32 ].pipeline; break; + case GGML_TYPE_IQ3_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_XXS_F32].pipeline; break; + case GGML_TYPE_IQ3_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_S_F32 ].pipeline; break; + case GGML_TYPE_IQ2_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_S_F32 ].pipeline; break; + case GGML_TYPE_IQ1_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_S_F32 ].pipeline; break; + case GGML_TYPE_IQ1_M: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_M_F32 ].pipeline; break; + case GGML_TYPE_IQ4_NL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_NL_F32 ].pipeline; break; + case GGML_TYPE_IQ4_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_XS_F32 ].pipeline; break; + default: GGML_ABORT("MUL_MAT_ID not implemented"); + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBuffer:id_src2 offset:offs_src2 atIndex:3]; + [encoder setBytes:&ne20 length:sizeof(ne20) atIndex:4]; + [encoder setBytes:&ne21 length:sizeof(ne21) atIndex:5]; + [encoder setBytes:&nb21 length:sizeof(nb21) atIndex:6]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:7]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:8]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:9]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:10]; + [encoder setBytes:&ne11 length:sizeof(ne11) atIndex:11]; + [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:12]; + [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:13]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:14]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:15]; + [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:16]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:17]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:18]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:19]; + + [encoder setThreadgroupMemoryLength:GGML_PAD(8192 + dst_rows*4/*sizeof(ushort2)*/, 16) atIndex:0]; + + [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 31)/32, (ne01 + 63)/64, n_as) threadsPerThreadgroup:MTLSizeMake(128, 1, 1)]; + } else { + int nth0 = 32; + int nth1 = 1; + int nrows = 1; + //printf("vector: ne00 = %6d, ne01 = %6d, ne02 = %6d, ne11 = %6d, ne12 = %6d\n", ne00, ne01, ne02, ne11, ne12); + + id pipeline = nil; + + // use custom matrix x vector kernel + switch (src0t) { + case GGML_TYPE_F32: + { + GGML_ASSERT(src1t == GGML_TYPE_F32); + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32].pipeline; + } break; + case GGML_TYPE_F16: + { + GGML_ASSERT(src1t == GGML_TYPE_F32); + nth0 = 32; + nth1 = 1; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32].pipeline; + } break; + case GGML_TYPE_Q4_0: + { + nth0 = 8; + nth1 = 8; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_0_F32].pipeline; + } break; + case GGML_TYPE_Q4_1: + { + nth0 = 8; + nth1 = 8; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_1_F32].pipeline; + } break; + case GGML_TYPE_Q5_0: + { + nth0 = 8; + nth1 = 8; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_0_F32].pipeline; + } break; + case GGML_TYPE_Q5_1: + { + nth0 = 8; + nth1 = 8; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_1_F32].pipeline; + } break; + case GGML_TYPE_Q8_0: + { + nth0 = 8; + nth1 = 8; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q8_0_F32].pipeline; + } break; + case GGML_TYPE_Q2_K: + { + nth0 = 2; + nth1 = 32; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q2_K_F32].pipeline; + } break; + case GGML_TYPE_Q3_K: + { + nth0 = 2; + nth1 = 32; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q3_K_F32].pipeline; + } break; + case GGML_TYPE_Q4_K: + { + nth0 = 4; //1; + nth1 = 8; //32; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_K_F32].pipeline; + } break; + case GGML_TYPE_Q5_K: + { + nth0 = 2; + nth1 = 32; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_K_F32].pipeline; + } break; + case GGML_TYPE_Q6_K: + { + nth0 = 2; + nth1 = 32; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q6_K_F32].pipeline; + } break; + case GGML_TYPE_IQ2_XXS: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XXS_F32].pipeline; + } break; + case GGML_TYPE_IQ2_XS: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XS_F32].pipeline; + } break; + case GGML_TYPE_IQ3_XXS: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_XXS_F32].pipeline; + } break; + case GGML_TYPE_IQ3_S: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_S_F32].pipeline; + } break; + case GGML_TYPE_IQ2_S: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_S_F32].pipeline; + } break; + case GGML_TYPE_IQ1_S: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ1_S_F32].pipeline; + } break; + case GGML_TYPE_IQ1_M: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ1_M_F32].pipeline; + } break; + case GGML_TYPE_IQ4_NL: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ4_NL_F32].pipeline; + } break; + case GGML_TYPE_IQ4_XS: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ4_XS_F32].pipeline; + } break; + default: + { + GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src2t); + GGML_ABORT("not implemented"); + } + }; + + if (ggml_is_quantized(src0t)) { + GGML_ASSERT(ne00 >= nth0*nth1); + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBuffer:id_src2 offset:offs_src2 atIndex:3]; + [encoder setBytes:&ne20 length:sizeof(ne20) atIndex:4]; + [encoder setBytes:&ne21 length:sizeof(ne21) atIndex:5]; + [encoder setBytes:&nb21 length:sizeof(nb21) atIndex:6]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:7]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:8]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:9]; + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:10]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:11]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:12]; + [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:13]; + [encoder setBytes:&ne11 length:sizeof(ne11) atIndex:14]; + [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:15]; + [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:16]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:17]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:18]; + [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:19]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:20]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:21]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:22]; + + const int64_t _ne1 = 1; + const int tgz = dst_rows; + + if (src0t == GGML_TYPE_Q4_0 || src0t == GGML_TYPE_Q4_1 || src0t == GGML_TYPE_Q5_0 || + src0t == GGML_TYPE_Q5_1 || src0t == GGML_TYPE_Q8_0 || src0t == GGML_TYPE_Q2_K || + src0t == GGML_TYPE_IQ1_S || src0t == GGML_TYPE_IQ1_M || src0t == GGML_TYPE_IQ2_S) { + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, _ne1, tgz) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src0t == GGML_TYPE_IQ2_XXS || src0t == GGML_TYPE_IQ2_XS) { + const int mem_size = src0t == GGML_TYPE_IQ2_XXS ? 256*8+128 : 512*8+128; + [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, _ne1, tgz) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src0t == GGML_TYPE_IQ3_XXS || src0t == GGML_TYPE_IQ3_S) { + const int mem_size = src0t == GGML_TYPE_IQ3_XXS ? 256*4+128 : 512*4; + [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, _ne1, tgz) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src0t == GGML_TYPE_IQ4_NL || src0t == GGML_TYPE_IQ4_XS) { + const int mem_size = 32*sizeof(float); + [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, _ne1, tgz) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src0t == GGML_TYPE_Q4_K) { + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, _ne1, tgz) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src0t == GGML_TYPE_Q3_K) { + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, _ne1, tgz) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src0t == GGML_TYPE_Q5_K) { + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, _ne1, tgz) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src0t == GGML_TYPE_Q6_K) { + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 1)/2, _ne1, tgz) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } else { + const int64_t ny = (_ne1 + nrows - 1)/nrows; // = _ne1 + [encoder dispatchThreadgroups:MTLSizeMake(ne01, ny, tgz) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + } + } break; + case GGML_OP_GET_ROWS: + { + id pipeline = nil; + + switch (src0->type) { + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_F32 ].pipeline; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_F16 ].pipeline; break; + case GGML_TYPE_Q4_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_0 ].pipeline; break; + case GGML_TYPE_Q4_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_1 ].pipeline; break; + case GGML_TYPE_Q5_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_0 ].pipeline; break; + case GGML_TYPE_Q5_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_1 ].pipeline; break; + case GGML_TYPE_Q8_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q8_0 ].pipeline; break; + case GGML_TYPE_Q2_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q2_K ].pipeline; break; + case GGML_TYPE_Q3_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q3_K ].pipeline; break; + case GGML_TYPE_Q4_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_K ].pipeline; break; + case GGML_TYPE_Q5_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_K ].pipeline; break; + case GGML_TYPE_Q6_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q6_K ].pipeline; break; + case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XXS].pipeline; break; + case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS ].pipeline; break; + case GGML_TYPE_IQ3_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_XXS].pipeline; break; + case GGML_TYPE_IQ3_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_S ].pipeline; break; + case GGML_TYPE_IQ2_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_S ].pipeline; break; + case GGML_TYPE_IQ1_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_S ].pipeline; break; + case GGML_TYPE_IQ1_M: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_M ].pipeline; break; + case GGML_TYPE_IQ4_NL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_NL ].pipeline; break; + case GGML_TYPE_IQ4_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_XS ].pipeline; break; + case GGML_TYPE_I32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_I32 ].pipeline; break; + default: GGML_ABORT("not implemented"); + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:3]; + [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:4]; + [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:5]; + [encoder setBytes:&ne10 length:sizeof( int64_t) atIndex:6]; + [encoder setBytes:&nb10 length:sizeof( int64_t) atIndex:7]; + [encoder setBytes:&nb11 length:sizeof( int64_t) atIndex:8]; + [encoder setBytes:&nb1 length:sizeof(uint64_t) atIndex:9]; + [encoder setBytes:&nb2 length:sizeof(uint64_t) atIndex:10]; + + [encoder dispatchThreadgroups:MTLSizeMake(ne10, ne11, 1) threadsPerThreadgroup:MTLSizeMake(32, 1, 1)]; + } break; + case GGML_OP_RMS_NORM: + { + GGML_ASSERT(ne00 % 4 == 0); + GGML_ASSERT(ggml_is_contiguous_1(src0)); + + float eps; + memcpy(&eps, dst->op_params, sizeof(float)); + + int nth = 32; // SIMD width + + while (nth < ne00/4 && nth < 1024) { + nth *= 2; + } + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_RMS_NORM].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; + [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:3]; + [encoder setBytes:&eps length:sizeof( float) atIndex:4]; + [encoder setThreadgroupMemoryLength:32*sizeof(float) atIndex:0]; + + const int64_t nrows = ggml_nrows(src0); + + [encoder dispatchThreadgroups:MTLSizeMake(nrows, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_GROUP_NORM: + { + GGML_ASSERT(ne00 % 4 == 0); + GGML_ASSERT(ggml_is_contiguous(src0)); + + float eps; + memcpy(&eps, dst->op_params + 1, sizeof(float)); + + const int32_t n_groups = ((const int32_t *) dst->op_params)[0]; + + int nth = 32; // SIMD width + + //while (nth < ne00/4 && nth < 1024) { + // nth *= 2; + //} + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GROUP_NORM].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; + [encoder setBytes:&ne01 length:sizeof( int64_t) atIndex:3]; + [encoder setBytes:&ne02 length:sizeof( int64_t) atIndex:4]; + [encoder setBytes:&nb00 length:sizeof(uint64_t) atIndex:5]; + [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:6]; + [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:7]; + [encoder setBytes:&n_groups length:sizeof( int32_t) atIndex:8]; + [encoder setBytes:&eps length:sizeof( float) atIndex:9]; + [encoder setThreadgroupMemoryLength:32*sizeof(float) atIndex:0]; + + [encoder dispatchThreadgroups:MTLSizeMake(n_groups, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_NORM: + { + GGML_ASSERT(ggml_is_contiguous_1(src0)); + + float eps; + memcpy(&eps, dst->op_params, sizeof(float)); + + const int nth = MIN(256, ne00); + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_NORM].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; + [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:3]; + [encoder setBytes:&eps length:sizeof( float) atIndex:4]; + [encoder setThreadgroupMemoryLength:GGML_PAD(nth*sizeof(float), 16) atIndex:0]; + + const int64_t nrows = ggml_nrows(src0); + + [encoder dispatchThreadgroups:MTLSizeMake(nrows, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_ROPE: + { + GGML_ASSERT(ne10 == ne02); + + const int nth = MIN(1024, ne00); + + const int n_past = ((const int32_t *) dst->op_params)[0]; + const int n_dims = ((const int32_t *) dst->op_params)[1]; + const int mode = ((const int32_t *) dst->op_params)[2]; + // skip 3, n_ctx, used in GLM RoPE, unimplemented in metal + const int n_ctx_orig = ((const int32_t *) dst->op_params)[4]; + + float freq_base; + float freq_scale; + float ext_factor; + float attn_factor; + float beta_fast; + float beta_slow; + + memcpy(&freq_base, (const int32_t *) dst->op_params + 5, sizeof(float)); + memcpy(&freq_scale, (const int32_t *) dst->op_params + 6, sizeof(float)); + memcpy(&ext_factor, (const int32_t *) dst->op_params + 7, sizeof(float)); + memcpy(&attn_factor, (const int32_t *) dst->op_params + 8, sizeof(float)); + memcpy(&beta_fast, (const int32_t *) dst->op_params + 9, sizeof(float)); + memcpy(&beta_slow, (const int32_t *) dst->op_params + 10, sizeof(float)); + + const bool is_neox = mode & GGML_ROPE_TYPE_NEOX; + + id pipeline = nil; + + if (!is_neox) { + switch (src0->type) { + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ROPE_NORM_F32].pipeline; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ROPE_NORM_F16].pipeline; break; + default: GGML_ABORT("fatal error"); + }; + } else { + switch (src0->type) { + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ROPE_NEOX_F32].pipeline; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ROPE_NEOX_F16].pipeline; break; + default: GGML_ABORT("fatal error"); + }; + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + if (id_src2 != nil) { + [encoder setBuffer:id_src2 offset:offs_src2 atIndex:2]; + } else { + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:2]; + } + [encoder setBuffer:id_dst offset:offs_dst atIndex:3]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:4]; + [encoder setBytes:&ne01 length:sizeof( int64_t) atIndex:5]; + [encoder setBytes:&ne02 length:sizeof( int64_t) atIndex:6]; + [encoder setBytes:&ne03 length:sizeof( int64_t) atIndex:7]; + [encoder setBytes:&nb00 length:sizeof(uint64_t) atIndex:8]; + [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:9]; + [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:10]; + [encoder setBytes:&nb03 length:sizeof(uint64_t) atIndex:11]; + [encoder setBytes:&ne0 length:sizeof( int64_t) atIndex:12]; + [encoder setBytes:&ne1 length:sizeof( int64_t) atIndex:13]; + [encoder setBytes:&ne2 length:sizeof( int64_t) atIndex:14]; + [encoder setBytes:&ne3 length:sizeof( int64_t) atIndex:15]; + [encoder setBytes:&nb0 length:sizeof(uint64_t) atIndex:16]; + [encoder setBytes:&nb1 length:sizeof(uint64_t) atIndex:17]; + [encoder setBytes:&nb2 length:sizeof(uint64_t) atIndex:18]; + [encoder setBytes:&nb3 length:sizeof(uint64_t) atIndex:19]; + [encoder setBytes:&n_past length:sizeof( int) atIndex:20]; + [encoder setBytes:&n_dims length:sizeof( int) atIndex:21]; + [encoder setBytes:&n_ctx_orig length:sizeof( int) atIndex:22]; + [encoder setBytes:&freq_base length:sizeof( float) atIndex:23]; + [encoder setBytes:&freq_scale length:sizeof( float) atIndex:24]; + [encoder setBytes:&ext_factor length:sizeof( float) atIndex:25]; + [encoder setBytes:&attn_factor length:sizeof( float) atIndex:26]; + [encoder setBytes:&beta_fast length:sizeof( float) atIndex:27]; + [encoder setBytes:&beta_slow length:sizeof( float) atIndex:28]; + + [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_IM2COL: + { + GGML_ASSERT(src0->type == GGML_TYPE_F16); + GGML_ASSERT(src1->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F16 || dst->type == GGML_TYPE_F32); + + const int32_t s0 = ((const int32_t *)(dst->op_params))[0]; + const int32_t s1 = ((const int32_t *)(dst->op_params))[1]; + const int32_t p0 = ((const int32_t *)(dst->op_params))[2]; + const int32_t p1 = ((const int32_t *)(dst->op_params))[3]; + const int32_t d0 = ((const int32_t *)(dst->op_params))[4]; + const int32_t d1 = ((const int32_t *)(dst->op_params))[5]; + + const bool is_2D = ((const int32_t *)(dst->op_params))[6] == 1; + + const int32_t N = src1->ne[is_2D ? 3 : 2]; + const int32_t IC = src1->ne[is_2D ? 2 : 1]; + const int32_t IH = is_2D ? src1->ne[1] : 1; + const int32_t IW = src1->ne[0]; + + const int32_t KH = is_2D ? src0->ne[1] : 1; + const int32_t KW = src0->ne[0]; + + const int32_t OH = is_2D ? dst->ne[2] : 1; + const int32_t OW = dst->ne[1]; + + const int32_t CHW = IC * KH * KW; + + const int32_t ofs0 = src1->nb[is_2D ? 3 : 2] / 4; + const int32_t ofs1 = src1->nb[is_2D ? 2 : 1] / 4; + + id pipeline = nil; + + switch (dst->type) { + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_IM2COL_F32].pipeline; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_IM2COL_F16].pipeline; break; + default: GGML_ABORT("fatal error"); + }; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ofs0 length:sizeof( int32_t) atIndex:2]; + [encoder setBytes:&ofs1 length:sizeof( int32_t) atIndex:3]; + [encoder setBytes:&IW length:sizeof( int32_t) atIndex:4]; + [encoder setBytes:&IH length:sizeof( int32_t) atIndex:5]; + [encoder setBytes:&CHW length:sizeof( int32_t) atIndex:6]; + [encoder setBytes:&s0 length:sizeof( int32_t) atIndex:7]; + [encoder setBytes:&s1 length:sizeof( int32_t) atIndex:8]; + [encoder setBytes:&p0 length:sizeof( int32_t) atIndex:9]; + [encoder setBytes:&p1 length:sizeof( int32_t) atIndex:10]; + [encoder setBytes:&d0 length:sizeof( int32_t) atIndex:11]; + [encoder setBytes:&d1 length:sizeof( int32_t) atIndex:12]; + + [encoder dispatchThreadgroups:MTLSizeMake(IC, OH, OW) threadsPerThreadgroup:MTLSizeMake(N, KH, KW)]; + } break; + case GGML_OP_UPSCALE: + { + GGML_ASSERT(src0->type == GGML_TYPE_F32); + + const float sf0 = (float)ne0/src0->ne[0]; + const float sf1 = (float)ne1/src0->ne[1]; + const float sf2 = (float)ne2/src0->ne[2]; + const float sf3 = (float)ne3/src0->ne[3]; + + const id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_UPSCALE_F32].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:3]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:4]; + [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:5]; + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:6]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:7]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:8]; + [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:9]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:10]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:11]; + [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:12]; + [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:13]; + [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:14]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:15]; + [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:16]; + [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:17]; + [encoder setBytes:&sf0 length:sizeof(sf0) atIndex:18]; + [encoder setBytes:&sf1 length:sizeof(sf1) atIndex:19]; + [encoder setBytes:&sf2 length:sizeof(sf2) atIndex:20]; + [encoder setBytes:&sf3 length:sizeof(sf3) atIndex:21]; + + const int nth = MIN((int) pipeline.maxTotalThreadsPerThreadgroup, ne0); + + [encoder dispatchThreadgroups:MTLSizeMake(ne1, ne2, ne3) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_PAD: + { + GGML_ASSERT(src0->type == GGML_TYPE_F32); + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_PAD_F32].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:3]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:4]; + [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:5]; + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:6]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:7]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:8]; + [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:9]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:10]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:11]; + [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:12]; + [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:13]; + [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:14]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:15]; + [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:16]; + [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:17]; + + const int nth = MIN(1024, ne0); + + [encoder dispatchThreadgroups:MTLSizeMake(ne1, ne2, ne3) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_UNPAD: + { + GGML_ASSERT(src0->type == GGML_TYPE_F32); + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_UNPAD_F32].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:3]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:4]; + [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:5]; + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:6]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:7]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:8]; + [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:9]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:10]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:11]; + [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:12]; + [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:13]; + [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:14]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:15]; + [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:16]; + [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:17]; + + const int nth = MIN(1024, ne0); + + [encoder dispatchThreadgroups:MTLSizeMake(ne1, ne2, ne3) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_ARANGE: + { + GGML_ASSERT(dst->type == GGML_TYPE_F32); + + float start; + float step; + + memcpy(&start, ((const int32_t *) dst->op_params) + 0, sizeof(float)); + memcpy(&step, ((const int32_t *) dst->op_params) + 2, sizeof(float)); + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ARANGE_F32].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:0]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:1]; + [encoder setBytes:&start length:sizeof(start) atIndex:2]; + [encoder setBytes:&step length:sizeof(step) atIndex:3]; + + const int nth = MIN(1024, ne0); + + [encoder dispatchThreadgroups:MTLSizeMake(1, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_TIMESTEP_EMBEDDING: + { + GGML_ASSERT(src0->type == GGML_TYPE_F32); + + const int dim = dst->op_params[0]; + const int max_period = dst->op_params[1]; + + const int half = dim / 2; + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_TIMESTEP_EMBEDDING_F32].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:2]; + [encoder setBytes:&dim length:sizeof(dim) atIndex:3]; + [encoder setBytes:&max_period length:sizeof(max_period) atIndex:4]; + + const int nth = MIN(1024, half); + + [encoder dispatchThreadgroups:MTLSizeMake(ne00, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_ARGSORT: + { + GGML_ASSERT(src0->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_I32); + + const int nrows = ggml_nrows(src0); + + enum ggml_sort_order order = (enum ggml_sort_order) dst->op_params[0]; + + // bitonic sort requires the number of elements to be power of 2 + int64_t ne00_padded = 1; + while (ne00_padded < ne00) { + ne00_padded *= 2; + } + + // Metal kernels require the buffer size to be multiple of 16 bytes + // https://developer.apple.com/documentation/metal/mtlcomputecommandencoder/1443142-setthreadgroupmemorylength + const int mem_size = GGML_PAD(ne00_padded*sizeof(int32_t), 16); + + id pipeline = nil; + + switch (order) { + case GGML_SORT_ORDER_ASC: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_ASC].pipeline; break; + case GGML_SORT_ORDER_DESC: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_DESC].pipeline; break; + default: GGML_ABORT("fatal error"); + }; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; + [encoder setBytes:&ne00_padded length:sizeof( int64_t) atIndex:3]; + [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; + + [encoder dispatchThreadgroups:MTLSizeMake(1, nrows, 1) threadsPerThreadgroup:MTLSizeMake(ne00_padded, 1, 1)]; + } break; + case GGML_OP_LEAKY_RELU: + { + GGML_ASSERT(src0->type == GGML_TYPE_F32); + + float slope; + memcpy(&slope, dst->op_params, sizeof(float)); + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_LEAKY_RELU_F32].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&slope length:sizeof(slope) atIndex:2]; + + const int64_t n = ggml_nelements(dst); + + [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_OP_FLASH_ATTN_EXT: + { + GGML_ASSERT(ne00 % 4 == 0); + GGML_ASSERT(ne11 % 32 == 0); + + GGML_ASSERT(src0->type == GGML_TYPE_F32); + + GGML_ASSERT(ggml_are_same_shape (src1, src2)); + + struct ggml_tensor * src3 = node->src[3]; + + size_t offs_src3 = 0; + + id id_src3 = src3 ? ggml_metal_get_buffer(src3, &offs_src3) : nil; + + GGML_ASSERT(!src3 || src3->type == GGML_TYPE_F16); + GGML_ASSERT(!src3 || src3->ne[1] >= GGML_PAD(src0->ne[1], 8) && + "the Flash-Attention Metal kernel requires the mask to be padded to 8 and at least n_queries big"); + + const int64_t ne30 = src3 ? src3->ne[0] : 0; GGML_UNUSED(ne30); + //const int64_t ne31 = src3 ? src3->ne[1] : 0; + const int64_t ne32 = src3 ? src3->ne[2] : 0; GGML_UNUSED(ne32); + const int64_t ne33 = src3 ? src3->ne[3] : 0; GGML_UNUSED(ne33); + + const uint64_t nb30 = src3 ? src3->nb[0] : 0; GGML_UNUSED(nb30); + const uint64_t nb31 = src3 ? src3->nb[1] : 0; + const uint64_t nb32 = src3 ? src3->nb[2] : 0; GGML_UNUSED(nb32); + const uint64_t nb33 = src3 ? src3->nb[3] : 0; GGML_UNUSED(nb33); + + const enum ggml_type src2t = src2 ? src2->type : GGML_TYPE_COUNT; GGML_UNUSED(src2t); + + float scale; + float max_bias; + float logit_softcap; + memcpy(&scale, ((const int32_t *) dst->op_params) + 0, sizeof(scale)); + memcpy(&max_bias, ((const int32_t *) dst->op_params) + 1, sizeof(max_bias)); + memcpy(&logit_softcap, ((const int32_t *) dst->op_params) + 2, sizeof(logit_softcap)); + + if (logit_softcap != 0.0f) { + scale /= logit_softcap; + } + + const uint32_t n_head = src0->ne[2]; + const uint32_t n_head_log2 = 1u << (uint32_t) floorf(log2f((float) n_head)); + + const float m0 = powf(2.0f, -(max_bias ) / n_head_log2); + const float m1 = powf(2.0f, -(max_bias / 2.0f) / n_head_log2); + + id pipeline = nil; + + bool use_vec_kernel = false; + + if (ne01 >= 4 || (ne00%128 != 0)) { + switch (ne00) { + case 64: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H64 ].pipeline; break; + case 80: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H80 ].pipeline; break; + case 96: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H96 ].pipeline; break; + case 112: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H112].pipeline; break; + case 128: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H128].pipeline; break; + //case 256: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_F16_H256].pipeline; break; + default: + { + GGML_METAL_LOG_ERROR("unsupported size: %lld\n", ne00); + GGML_METAL_LOG_ERROR("add template specialization for this size\n"); + GGML_ABORT("add template specialization for this size"); + } + } + } else { + use_vec_kernel = true; + + switch (ne00) { + case 128: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_VEC_F16_H128].pipeline; break; + //case 256: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_FLASH_ATTN_EXT_VEC_F16_H256].pipeline; break; + default: + { + GGML_METAL_LOG_ERROR("unsupported size: %lld\n", ne00); + GGML_METAL_LOG_ERROR("add template specialization for this size\n"); + GGML_ABORT("add template specialization for this size"); + } + } + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_src2 offset:offs_src2 atIndex:2]; + if (id_src3) { + [encoder setBuffer:id_src3 offset:offs_src3 atIndex:3]; + } else { + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:3]; + } + [encoder setBuffer:id_dst offset:offs_dst atIndex:4]; + [encoder setBytes:&ne01 length:sizeof( int64_t) atIndex:5]; + [encoder setBytes:&ne02 length:sizeof( int64_t) atIndex:6]; + [encoder setBytes:&ne03 length:sizeof( int64_t) atIndex:7]; + [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:8]; + [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:9]; + [encoder setBytes:&nb03 length:sizeof(uint64_t) atIndex:10]; + [encoder setBytes:&ne11 length:sizeof( int64_t) atIndex:11]; + [encoder setBytes:&ne12 length:sizeof( int64_t) atIndex:12]; + [encoder setBytes:&ne13 length:sizeof( int64_t) atIndex:13]; + [encoder setBytes:&nb11 length:sizeof(uint64_t) atIndex:14]; + [encoder setBytes:&nb12 length:sizeof(uint64_t) atIndex:15]; + [encoder setBytes:&nb13 length:sizeof(uint64_t) atIndex:16]; + [encoder setBytes:&nb21 length:sizeof(uint64_t) atIndex:17]; + [encoder setBytes:&nb22 length:sizeof(uint64_t) atIndex:18]; + [encoder setBytes:&nb23 length:sizeof(uint64_t) atIndex:19]; + [encoder setBytes:&nb31 length:sizeof(uint64_t) atIndex:20]; + [encoder setBytes:&ne1 length:sizeof( int64_t) atIndex:21]; + [encoder setBytes:&ne2 length:sizeof( int64_t) atIndex:22]; + [encoder setBytes:&scale length:sizeof( float) atIndex:23]; + [encoder setBytes:&max_bias length:sizeof( float) atIndex:24]; + [encoder setBytes:&m0 length:sizeof(m0) atIndex:25]; + [encoder setBytes:&m1 length:sizeof(m1) atIndex:26]; + [encoder setBytes:&n_head_log2 length:sizeof(n_head_log2) atIndex:27]; + [encoder setBytes:&logit_softcap length:sizeof(logit_softcap) atIndex:28]; + + if (!use_vec_kernel) { + // half8x8 kernel + const int64_t nqptg = 8; // queries per threadgroup !! sync with kernel template arguments !! + const int64_t ncpsg = 32; // cache values per simdgroup !! sync with kernel template arguments !! + + GGML_ASSERT(nqptg <= 32); + GGML_ASSERT(nqptg % 8 == 0); + GGML_ASSERT(ncpsg % 32 == 0); + + int64_t nsgmax = 2; + + while (true) { + const size_t smem = nqptg*(ne00 + 2*nsgmax*(ncpsg + nqptg))*(sizeof(float)/2); + if (smem > ctx->device.maxThreadgroupMemoryLength) { + break; + } + nsgmax *= 2; + } + nsgmax /= 2; + + // simdgroups per threadgroup (a.k.a. warps) + const int64_t nsg = ne01 <= nqptg ? MAX(4, MIN(nsgmax, MIN(ne11/ncpsg, (int64_t) pipeline.maxTotalThreadsPerThreadgroup/32))) : 4; + + const size_t smem = nqptg*(ne00 + 2*nsg*(ncpsg + nqptg))*(sizeof(float)/2); + + //printf("smem: %zu, max: %zu\n", smem, ctx->device.maxThreadgroupMemoryLength); + GGML_ASSERT(smem <= ctx->device.maxThreadgroupMemoryLength); + + [encoder setThreadgroupMemoryLength:GGML_PAD(smem, 16) atIndex:0]; + + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + nqptg - 1)/nqptg, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(32, nsg, 1)]; + } else { + // half1x4 kernel + const int64_t nqptg = 1; // queries per threadgroup !! sync with kernel template arguments !! + const int64_t ncpsg = 32; // cache values per simdgroup !! sync with kernel template arguments !! + + GGML_ASSERT(nqptg <= 32); + GGML_ASSERT(nqptg % 1 == 0); + GGML_ASSERT(ncpsg % 32 == 0); + + // simdgroups per threadgroup (a.k.a. warps) + const int64_t nsgt = MAX(2, MIN(ne11/ncpsg, (int64_t) pipeline.maxTotalThreadsPerThreadgroup/32)); + + int64_t nsg = 1; + while (nsg <= nsgt) { + nsg *= 2; + } + nsg /= 2; + + const size_t smem = (nqptg*(ne00 + 2*nsg*(ncpsg + nqptg)) + nsg*ne00)*(sizeof(float)/2); + + //printf("smem: %zu, max: %zu\n", smem, ctx->device.maxThreadgroupMemoryLength); + GGML_ASSERT(smem <= ctx->device.maxThreadgroupMemoryLength); + [encoder setThreadgroupMemoryLength:GGML_PAD(smem, 16) atIndex:0]; + + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + nqptg - 1)/nqptg, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(32, nsg, 1)]; + } + } break; + case GGML_OP_DUP: + case GGML_OP_CPY: + case GGML_OP_CONT: + { + GGML_ASSERT(ne00 % ggml_blck_size(src0->type) == 0); + + int nth = MIN(1024, ne00/ggml_blck_size(src0->type)); + + id pipeline = nil; + + switch (src0t) { + case GGML_TYPE_F32: + { + GGML_ASSERT(ne0 % ggml_blck_size(dst->type) == 0); + + switch (dstt) { + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_F32].pipeline; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_F16].pipeline; break; + case GGML_TYPE_Q8_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q8_0].pipeline; break; + case GGML_TYPE_Q4_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q4_0].pipeline; break; + case GGML_TYPE_Q4_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q4_1].pipeline; break; + case GGML_TYPE_Q5_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q5_0].pipeline; break; + case GGML_TYPE_Q5_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q5_1].pipeline; break; + case GGML_TYPE_IQ4_NL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_IQ4_NL].pipeline; break; + default: GGML_ABORT("not implemented"); + }; + } break; + case GGML_TYPE_F16: + { + switch (dstt) { + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F16_F32].pipeline; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F16_F16].pipeline; break; + default: GGML_ABORT("not implemented"); + }; + } break; + default: GGML_ABORT("not implemented"); + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; + [encoder setBytes:&ne01 length:sizeof( int64_t) atIndex:3]; + [encoder setBytes:&ne02 length:sizeof( int64_t) atIndex:4]; + [encoder setBytes:&ne03 length:sizeof( int64_t) atIndex:5]; + [encoder setBytes:&nb00 length:sizeof(uint64_t) atIndex:6]; + [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:7]; + [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:8]; + [encoder setBytes:&nb03 length:sizeof(uint64_t) atIndex:9]; + [encoder setBytes:&ne0 length:sizeof( int64_t) atIndex:10]; + [encoder setBytes:&ne1 length:sizeof( int64_t) atIndex:11]; + [encoder setBytes:&ne2 length:sizeof( int64_t) atIndex:12]; + [encoder setBytes:&ne3 length:sizeof( int64_t) atIndex:13]; + [encoder setBytes:&nb0 length:sizeof(uint64_t) atIndex:14]; + [encoder setBytes:&nb1 length:sizeof(uint64_t) atIndex:15]; + [encoder setBytes:&nb2 length:sizeof(uint64_t) atIndex:16]; + [encoder setBytes:&nb3 length:sizeof(uint64_t) atIndex:17]; + + [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + default: + { + GGML_METAL_LOG_ERROR("%s: error: node %3d, op = %8s not implemented\n", __func__, idx, ggml_op_name(dst->op)); + GGML_ABORT("fatal error"); + } + } +} + +static enum ggml_status ggml_metal_graph_compute( + struct ggml_backend_metal_context * ctx, + struct ggml_cgraph * gf) { + // number of nodes encoded by the main thread (empirically determined) + const int n_main = 128; + + // number of threads in addition to the main thread + const int n_cb = ctx->n_cb; + + // submit the ggml compute graph to the GPU by creating command buffers and encoding the ops in them + // the first n_nodes_0 are encoded and submitted for processing directly by the calling thread + // while these nodes are processing, we start n_cb threads to enqueue the rest of the nodes + // each thread creates it's own command buffer and enqueues the ops in parallel + // + // tests on M1 Pro and M2 Ultra using LLaMA models, show that optimal values for n_cb are 1 or 2 + + @autoreleasepool { + ctx->gf = gf; + + ctx->n_nodes_0 = MIN(n_main, gf->n_nodes); + ctx->n_nodes_1 = gf->n_nodes - ctx->n_nodes_0; + + ctx->n_nodes_per_cb = (ctx->n_nodes_1 + ctx->n_cb - 1) / ctx->n_cb; + + const bool should_capture = ctx->capture_next_compute; + if (should_capture) { + ctx->capture_next_compute = false; + + if (!ctx->capture_started) { + // create capture scope + ctx->capture_scope = [[MTLCaptureManager sharedCaptureManager] newCaptureScopeWithDevice:ctx->device]; + + MTLCaptureDescriptor * descriptor = [MTLCaptureDescriptor new]; + descriptor.captureObject = ctx->capture_scope; + descriptor.destination = MTLCaptureDestinationGPUTraceDocument; + descriptor.outputURL = [NSURL fileURLWithPath:[NSString stringWithFormat:@"/tmp/perf-metal.gputrace"]]; + + NSError * error = nil; + if (![[MTLCaptureManager sharedCaptureManager] startCaptureWithDescriptor:descriptor error:&error]) { + GGML_METAL_LOG_ERROR("%s: error: unable to start capture '%s'\n", __func__, [[error localizedDescription] UTF8String]); + GGML_ABORT("capture failed"); + } else { + [ctx->capture_scope beginScope]; + ctx->capture_started = true; + } + } + } + + // TODO: how to avoid this allocation? I tried initializing it in ggml_backend_metal_set_n_cb but it crashes. + ctx->encode_async = ^(size_t iter) { + const int cb_idx = iter; + const int n_cb_l = ctx->n_cb; + + const int n_nodes_0 = ctx->n_nodes_0; + const int n_nodes_1 = ctx->n_nodes_1; + + const int n_nodes_per_cb = ctx->n_nodes_per_cb; + + id command_buffer = ctx->command_buffers[cb_idx]; + id encoder = [command_buffer computeCommandEncoderWithDescriptor: ctx->edesc]; + + int node_start = 0; + int node_end = n_nodes_0; + + if (cb_idx < n_cb_l) { + node_start = n_nodes_0 + ( (cb_idx + 0) * n_nodes_per_cb); + node_end = n_nodes_0 + (MIN((cb_idx == n_cb_l - 1) ? n_nodes_1 : (cb_idx + 1) * n_nodes_per_cb, n_nodes_1)); + } + + for (int idx = node_start; idx < node_end; ++idx) { + if (should_capture) { + [encoder pushDebugGroup:[NSString stringWithCString:ggml_op_desc(ggml_graph_node(gf, idx)) encoding:NSUTF8StringEncoding]]; + } + + ggml_metal_encode_node(ctx, idx, encoder); + + if (should_capture) { + [encoder popDebugGroup]; + } + } + + [encoder endEncoding]; + + if (cb_idx < 2 || ctx->abort_callback == NULL) { + [command_buffer commit]; + } + }; + + // the main thread commits the first few commands immediately + // command_buffer[n_cb] + { + id command_buffer = [ctx->queue commandBufferWithUnretainedReferences]; + ctx->command_buffers[n_cb] = command_buffer; + + [command_buffer enqueue]; + ctx->encode_async(n_cb); + } + + // prepare the rest of the command buffers asynchronously + // command_buffer[0.. n_cb) + for (int cb_idx = 0; cb_idx < n_cb; ++cb_idx) { + id command_buffer = [ctx->queue commandBufferWithUnretainedReferences]; + ctx->command_buffers[cb_idx] = command_buffer; + + // always enqueue the first two command buffers + // enqueue all of the command buffers if we don't need to abort + if (cb_idx < 2 || ctx->abort_callback == NULL) { + [command_buffer enqueue]; + } + } + + dispatch_apply(n_cb, ctx->d_queue, ctx->encode_async); + + // wait for completion and check status of each command buffer + // needed to detect if the device ran out-of-memory for example (#1881) + { + id command_buffer = ctx->command_buffers[n_cb]; + [command_buffer waitUntilCompleted]; + + MTLCommandBufferStatus status = [command_buffer status]; + if (status != MTLCommandBufferStatusCompleted) { + GGML_METAL_LOG_INFO("%s: command buffer %d failed with status %lu\n", __func__, n_cb, status); + if (status == MTLCommandBufferStatusError) { + GGML_METAL_LOG_INFO("error: %s\n", [[command_buffer error].localizedDescription UTF8String]); + } + + return GGML_STATUS_FAILED; + } + } + + for (int i = 0; i < n_cb; ++i) { + id command_buffer = ctx->command_buffers[i]; + [command_buffer waitUntilCompleted]; + + MTLCommandBufferStatus status = [command_buffer status]; + if (status != MTLCommandBufferStatusCompleted) { + GGML_METAL_LOG_INFO("%s: command buffer %d failed with status %lu\n", __func__, i, status); + if (status == MTLCommandBufferStatusError) { + GGML_METAL_LOG_INFO("error: %s\n", [[command_buffer error].localizedDescription UTF8String]); + } + + return GGML_STATUS_FAILED; + } + + id next_buffer = (i + 1 < n_cb ? ctx->command_buffers[i + 1] : nil); + if (!next_buffer) { + continue; + } + + const bool next_queued = ([next_buffer status] != MTLCommandBufferStatusNotEnqueued); + if (next_queued) { + continue; + } + + if (ctx->abort_callback && ctx->abort_callback(ctx->abort_callback_data)) { + GGML_METAL_LOG_INFO("%s: command buffer %d aborted", __func__, i); + return GGML_STATUS_ABORTED; + } + + [next_buffer commit]; + } + + if (!should_capture && ctx->capture_started) { + [ctx->capture_scope endScope]; + [[MTLCaptureManager sharedCaptureManager] stopCapture]; + } + } + + return GGML_STATUS_SUCCESS; +} + +//////////////////////////////////////////////////////////////////////////////// + +// backend interface + +// default buffer +static id g_backend_device = nil; +static int g_backend_device_ref_count = 0; + +static id ggml_backend_metal_get_device(void) { + if (g_backend_device == nil) { + g_backend_device = MTLCreateSystemDefaultDevice(); + } + + g_backend_device_ref_count++; + + return g_backend_device; +} + +static void ggml_backend_metal_free_device(void) { + assert(g_backend_device_ref_count > 0); + + g_backend_device_ref_count--; + + if (g_backend_device_ref_count == 0) { + [g_backend_device release]; + g_backend_device = nil; + } +} + +GGML_CALL static const char * ggml_backend_metal_buffer_get_name(ggml_backend_buffer_t buffer) { + return "Metal"; + + UNUSED(buffer); +} + +GGML_CALL static void ggml_backend_metal_buffer_free_buffer(ggml_backend_buffer_t buffer) { + struct ggml_backend_metal_buffer_context * ctx = (struct ggml_backend_metal_buffer_context *)buffer->context; + + for (int i = 0; i < ctx->n_buffers; i++) { + [ctx->buffers[i].metal release]; + } + ggml_backend_metal_free_device(); + + if (ctx->owned) { +#if TARGET_OS_OSX + vm_deallocate((vm_map_t)mach_task_self(), (vm_address_t)ctx->all_data, ctx->all_size); +#else + free(ctx->all_data); +#endif + } + + free(ctx); +} + +GGML_CALL static void * ggml_backend_metal_buffer_get_base(ggml_backend_buffer_t buffer) { + struct ggml_backend_metal_buffer_context * ctx = (struct ggml_backend_metal_buffer_context *)buffer->context; + + return ctx->all_data; +} + +GGML_CALL static void ggml_backend_metal_buffer_set_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { + memcpy((char *)tensor->data + offset, data, size); + + UNUSED(buffer); +} + +GGML_CALL static void ggml_backend_metal_buffer_get_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { + memcpy(data, (const char *)tensor->data + offset, size); + + UNUSED(buffer); +} + +GGML_CALL static bool ggml_backend_metal_buffer_cpy_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * src, struct ggml_tensor * dst) { + if (ggml_backend_buffer_is_host(src->buffer)) { + memcpy(dst->data, src->data, ggml_nbytes(src)); + return true; + } + return false; + + UNUSED(buffer); +} + +GGML_CALL static void ggml_backend_metal_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { + struct ggml_backend_metal_buffer_context * ctx = (struct ggml_backend_metal_buffer_context *)buffer->context; + + memset(ctx->all_data, value, ctx->all_size); +} + +static struct ggml_backend_buffer_i ggml_backend_metal_buffer_i = { + /* .get_name = */ ggml_backend_metal_buffer_get_name, + /* .free_buffer = */ ggml_backend_metal_buffer_free_buffer, + /* .get_base = */ ggml_backend_metal_buffer_get_base, + /* .init_tensor = */ NULL, + /* .memset_tensor = */ NULL, + /* .set_tensor = */ ggml_backend_metal_buffer_set_tensor, + /* .get_tensor = */ ggml_backend_metal_buffer_get_tensor, + /* .cpy_tensor = */ ggml_backend_metal_buffer_cpy_tensor, + /* .clear = */ ggml_backend_metal_buffer_clear, + /* .reset = */ NULL, +}; + +// default buffer type + +GGML_CALL static const char * ggml_backend_metal_buffer_type_get_name(ggml_backend_buffer_type_t buft) { + return "Metal"; + + UNUSED(buft); +} + +static void ggml_backend_metal_log_allocated_size(id device, size_t size_aligned) { +#ifndef GGML_METAL_NDEBUG +#if TARGET_OS_OSX || (TARGET_OS_IOS && __clang_major__ >= 15) + if (@available(macOS 10.12, iOS 16.0, *)) { + GGML_METAL_LOG_DEBUG("%s: allocated buffer, size = %8.2f MiB, (%8.2f / %8.2f)\n", + __func__, + size_aligned / 1024.0 / 1024.0, + device.currentAllocatedSize / 1024.0 / 1024.0, + device.recommendedMaxWorkingSetSize / 1024.0 / 1024.0); + + if (device.currentAllocatedSize > device.recommendedMaxWorkingSetSize) { + GGML_METAL_LOG_WARN("%s: warning: current allocated size is greater than the recommended max working set size\n", __func__); + } + } else { + GGML_METAL_LOG_INFO("%s: allocated buffer, size = %8.2f MiB, (%8.2f)\n", + __func__, + size_aligned / 1024.0 / 1024.0, + device.currentAllocatedSize / 1024.0 / 1024.0); + } +#endif +#endif + UNUSED(device); + UNUSED(size_aligned); +} + +GGML_CALL static ggml_backend_buffer_t ggml_backend_metal_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { + struct ggml_backend_metal_buffer_context * ctx = malloc(sizeof(struct ggml_backend_metal_buffer_context)); + + const size_t size_page = sysconf(_SC_PAGESIZE); + + size_t size_aligned = size; + if ((size_aligned % size_page) != 0) { + size_aligned += (size_page - (size_aligned % size_page)); + } + + id device = ggml_backend_metal_get_device(); + + ctx->all_data = ggml_metal_host_malloc(size_aligned); + ctx->all_size = size_aligned; + ctx->owned = true; + ctx->n_buffers = 1; + + if (ctx->all_data != NULL) { + ctx->buffers[0].data = ctx->all_data; + ctx->buffers[0].size = size; + ctx->buffers[0].metal = nil; + + if (size_aligned > 0) { + ctx->buffers[0].metal = [device newBufferWithBytesNoCopy:ctx->all_data + length:size_aligned + options:MTLResourceStorageModeShared + deallocator:nil]; + } + } + + if (size_aligned > 0 && (ctx->all_data == NULL || ctx->buffers[0].metal == nil)) { + GGML_METAL_LOG_ERROR("%s: error: failed to allocate buffer, size = %8.2f MiB\n", __func__, size_aligned / 1024.0 / 1024.0); + free(ctx); + ggml_backend_metal_free_device(); + return NULL; + } + + //ggml_backend_metal_log_allocated_size(device, size_aligned); + + return ggml_backend_buffer_init(buft, ggml_backend_metal_buffer_i, ctx, size); +} + +GGML_CALL static size_t ggml_backend_metal_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { + return 32; + UNUSED(buft); +} + +GGML_CALL static size_t ggml_backend_metal_buffer_type_get_max_size(ggml_backend_buffer_type_t buft) { + id device = ggml_backend_metal_get_device(); + size_t max_size = device.maxBufferLength; + ggml_backend_metal_free_device(); + + return max_size; + + UNUSED(buft); +} + +GGML_CALL static bool ggml_backend_metal_buffer_type_is_host(ggml_backend_buffer_type_t buft) { + return true; + + UNUSED(buft); +} + +GGML_CALL ggml_backend_buffer_type_t ggml_backend_metal_buffer_type(void) { + static struct ggml_backend_buffer_type ggml_backend_buffer_type_metal = { + /* .iface = */ { + /* .get_name = */ ggml_backend_metal_buffer_type_get_name, + /* .alloc_buffer = */ ggml_backend_metal_buffer_type_alloc_buffer, + /* .get_alignment = */ ggml_backend_metal_buffer_type_get_alignment, + /* .get_max_size = */ ggml_backend_metal_buffer_type_get_max_size, + /* .get_alloc_size = */ NULL, // defaults to ggml_nbytes + /* .is_host = */ ggml_backend_metal_buffer_type_is_host, + }, + /* .context = */ NULL, + }; + + return &ggml_backend_buffer_type_metal; +} + +// buffer from ptr + +GGML_CALL ggml_backend_buffer_t ggml_backend_metal_buffer_from_ptr(void * data, size_t size, size_t max_size) { + struct ggml_backend_metal_buffer_context * ctx = malloc(sizeof(struct ggml_backend_metal_buffer_context)); + + ctx->all_data = data; + ctx->all_size = size; + ctx->owned = false; + ctx->n_buffers = 0; + + const size_t size_page = sysconf(_SC_PAGESIZE); + + // page-align the data ptr + { + const uintptr_t offs = (uintptr_t) data % size_page; + data = (void *) ((char *) data - offs); + size += offs; + } + + size_t size_aligned = size; + if ((size_aligned % size_page) != 0) { + size_aligned += (size_page - (size_aligned % size_page)); + } + + id device = ggml_backend_metal_get_device(); + + // the buffer fits into the max buffer size allowed by the device + if (size_aligned <= device.maxBufferLength) { + ctx->buffers[ctx->n_buffers].data = data; + ctx->buffers[ctx->n_buffers].size = size; + ctx->buffers[ctx->n_buffers].metal = nil; + + if (size_aligned > 0) { + ctx->buffers[ctx->n_buffers].metal = [device newBufferWithBytesNoCopy:data length:size_aligned options:MTLResourceStorageModeShared deallocator:nil]; + + if (ctx->buffers[ctx->n_buffers].metal == nil) { + GGML_METAL_LOG_ERROR("%s: error: failed to allocate buffer, size = %8.2f MiB\n", __func__, size_aligned / 1024.0 / 1024.0); + return false; + } + } + + ggml_backend_metal_log_allocated_size(device, size_aligned); + + ++ctx->n_buffers; + } else { + // this overlap between the views will guarantee that the tensor with the maximum size will fully fit into + // one of the views + const size_t size_ovlp = ((max_size + size_page - 1) / size_page + 1) * size_page; // round-up 2 pages just in case + const size_t size_step = device.maxBufferLength - size_ovlp; + const size_t size_view = device.maxBufferLength; + + for (size_t i = 0; i < size; i += size_step) { + const size_t size_step_aligned = (i + size_view <= size) ? size_view : (size_aligned - i); + + ctx->buffers[ctx->n_buffers].data = (void *) ((uint8_t *) data + i); + ctx->buffers[ctx->n_buffers].size = size_step_aligned; + ctx->buffers[ctx->n_buffers].metal = nil; + + if (size_step_aligned > 0) { + ctx->buffers[ctx->n_buffers].metal = [device newBufferWithBytesNoCopy:(void *) ((uint8_t *) data + i) length:size_step_aligned options:MTLResourceStorageModeShared deallocator:nil]; + + if (ctx->buffers[ctx->n_buffers].metal == nil) { + GGML_METAL_LOG_ERROR("%s: error: failed to allocate buffer, size = %8.2f MiB\n", __func__, size_step_aligned / 1024.0 / 1024.0); + return false; + } + } + + ggml_backend_metal_log_allocated_size(device, size_step_aligned); + + if (i + size_step < size) { + GGML_METAL_LOG_INFO("\n"); + } + + ++ctx->n_buffers; + } + } + + return ggml_backend_buffer_init(ggml_backend_metal_buffer_type(), ggml_backend_metal_buffer_i, ctx, size); +} + +// backend + +GGML_CALL static const char * ggml_backend_metal_name(ggml_backend_t backend) { + return "Metal"; + + UNUSED(backend); +} + +GGML_CALL static void ggml_backend_metal_free(ggml_backend_t backend) { + struct ggml_backend_metal_context * ctx = (struct ggml_backend_metal_context *)backend->context; + ggml_metal_free(ctx); + free(backend); +} + +GGML_CALL static ggml_backend_buffer_type_t ggml_backend_metal_get_default_buffer_type(ggml_backend_t backend) { + return ggml_backend_metal_buffer_type(); + + UNUSED(backend); +} + +GGML_CALL static enum ggml_status ggml_backend_metal_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { + struct ggml_backend_metal_context * metal_ctx = (struct ggml_backend_metal_context *)backend->context; + + return ggml_metal_graph_compute(metal_ctx, cgraph); +} + +GGML_CALL static bool ggml_backend_metal_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { + struct ggml_backend_metal_context * metal_ctx = (struct ggml_backend_metal_context *)backend->context; + + return ggml_metal_supports_op(metal_ctx, op); +} + +GGML_CALL static bool ggml_backend_metal_supports_buft(ggml_backend_t backend, ggml_backend_buffer_type_t buft) { + return buft->iface.get_name == ggml_backend_metal_buffer_type_get_name; + + UNUSED(backend); +} + +static void ggml_backend_metal_set_n_cb(ggml_backend_t backend, int n_cb) { + GGML_ASSERT(ggml_backend_is_metal(backend)); + + struct ggml_backend_metal_context * ctx = (struct ggml_backend_metal_context *)backend->context; + + if (ctx->n_cb != n_cb) { + ctx->n_cb = MIN(n_cb, GGML_METAL_MAX_COMMAND_BUFFERS); + + if (ctx->n_cb > 2) { + GGML_METAL_LOG_WARN("%s: n_cb = %d, using n_cb > 2 is not recommended and can degrade the performance in some cases\n", __func__, n_cb); + } + } + + // TODO: setting encode_async here causes crash during the next ggml_metal_graph_compute call. why? + //ctx->encode_async = ^(size_t iter) { + // ... + //}; +} + +static struct ggml_backend_i ggml_backend_metal_i = { + /* .get_name = */ ggml_backend_metal_name, + /* .free = */ ggml_backend_metal_free, + /* .get_default_buffer_type = */ ggml_backend_metal_get_default_buffer_type, + /* .set_tensor_async = */ NULL, + /* .get_tensor_async = */ NULL, + /* .cpy_tensor_async = */ NULL, + /* .synchronize = */ NULL, + /* .graph_plan_create = */ NULL, + /* .graph_plan_free = */ NULL, + /* .graph_plan_update = */ NULL, + /* .graph_plan_compute = */ NULL, + /* .graph_compute = */ ggml_backend_metal_graph_compute, + /* .supports_op = */ ggml_backend_metal_supports_op, + /* .supports_buft = */ ggml_backend_metal_supports_buft, + /* .offload_op = */ NULL, + /* .event_new = */ NULL, + /* .event_free = */ NULL, + /* .event_record = */ NULL, + /* .event_wait = */ NULL, + /* .event_synchronize = */ NULL, +}; + +void ggml_backend_metal_log_set_callback(ggml_log_callback log_callback, void * user_data) { + ggml_metal_log_callback = log_callback; + ggml_metal_log_user_data = user_data; +} + +static ggml_guid_t ggml_backend_metal_guid(void) { + static ggml_guid guid = { 0x81, 0xa1, 0x8b, 0x1e, 0x71, 0xec, 0x79, 0xed, 0x2b, 0x85, 0xdc, 0x8a, 0x61, 0x98, 0x30, 0xe6 }; + return &guid; +} + +ggml_backend_t ggml_backend_metal_init(void) { + struct ggml_backend_metal_context * ctx = ggml_metal_init(); + if (ctx == NULL) { + GGML_METAL_LOG_ERROR("%s: error: failed to allocate context\n", __func__); + return NULL; + } + + ggml_backend_t backend = malloc(sizeof(struct ggml_backend)); + + *backend = (struct ggml_backend) { + /* .guid = */ ggml_backend_metal_guid(), + /* .interface = */ ggml_backend_metal_i, + /* .context = */ ctx, + }; + + ggml_backend_metal_set_n_cb(backend, 1); + + return backend; +} + +bool ggml_backend_is_metal(ggml_backend_t backend) { + return backend != NULL && ggml_guid_matches(backend->guid, ggml_backend_metal_guid()); +} + +void ggml_backend_metal_set_abort_callback(ggml_backend_t backend, ggml_abort_callback abort_callback, void * user_data) { + GGML_ASSERT(ggml_backend_is_metal(backend)); + + struct ggml_backend_metal_context * ctx = (struct ggml_backend_metal_context *)backend->context; + + ctx->abort_callback = abort_callback; + ctx->abort_callback_data = user_data; +} + +bool ggml_backend_metal_supports_family(ggml_backend_t backend, int family) { + GGML_ASSERT(ggml_backend_is_metal(backend)); + + struct ggml_backend_metal_context * ctx = (struct ggml_backend_metal_context *)backend->context; + + return [ctx->device supportsFamily:(MTLGPUFamilyApple1 + family - 1)]; +} + +void ggml_backend_metal_capture_next_compute(ggml_backend_t backend) { + GGML_ASSERT(ggml_backend_is_metal(backend)); + + struct ggml_backend_metal_context * ctx = (struct ggml_backend_metal_context *)backend->context; + ctx->capture_next_compute = true; +} + +GGML_CALL ggml_backend_t ggml_backend_reg_metal_init(const char * params, void * user_data); // silence warning + +GGML_CALL ggml_backend_t ggml_backend_reg_metal_init(const char * params, void * user_data) { + return ggml_backend_metal_init(); + + GGML_UNUSED(params); + GGML_UNUSED(user_data); +} diff --git a/ml/backend/ggml/ggml-metal_darwin_arm64.s b/ml/backend/ggml/ggml-metal_darwin_arm64.s new file mode 100644 index 000000000..47c729a6a --- /dev/null +++ b/ml/backend/ggml/ggml-metal_darwin_arm64.s @@ -0,0 +1,6 @@ +.section __DATA, __ggml_metallib +.globl _ggml_metallib_start +_ggml_metallib_start: +.incbin "ggml-metal-embed.metal" +.globl _ggml_metallib_end +_ggml_metallib_end: diff --git a/ml/backend/ggml/ggml-quants.c b/ml/backend/ggml/ggml-quants.c new file mode 100644 index 000000000..1c3476559 --- /dev/null +++ b/ml/backend/ggml/ggml-quants.c @@ -0,0 +1,15778 @@ +/** + * llama.cpp - commit 3f1ae2e32cde00c39b96be6d01c2997c29bae555 - do not edit this file + * + * MIT License + * + * Copyright (c) 2023-2024 The ggml authors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +#define GGML_COMMON_IMPL_C +#include "ggml-common.h" + +#include "ggml-quants.h" +#include "ggml-impl.h" +#include "ggml-cpu-impl.h" + + +#include +#include +#include +#include +#include // for qsort +#include // for GGML_ASSERT + +#define GROUP_MAX_EPS 1e-15f +#define GROUP_MAX_EPS_IQ3_XXS 1e-8f +#define GROUP_MAX_EPS_IQ2_S 1e-8f +#define GROUP_MAX_EPS_IQ1_M 1e-7f +#define GROUP_MAX_EPS_IQ1_S 1e-12f + +#if defined(_MSC_VER) +// disable "possible loss of data" to avoid warnings for hundreds of casts +// we should just be careful :) +#pragma warning(disable: 4244 4267) +#endif + +#define UNUSED GGML_UNUSED + +// some compilers don't provide _mm256_set_m128i, e.g. gcc 7 +#define MM256_SET_M128I(a, b) _mm256_insertf128_si256(_mm256_castsi128_si256(b), (a), 1) + +#if defined(__AVX__) || defined(__AVX2__) || defined(__AVX512F__) || defined(__SSSE3__) +// multiply int8_t, add results pairwise twice +static inline __m128i mul_sum_i8_pairs(const __m128i x, const __m128i y) { + // Get absolute values of x vectors + const __m128i ax = _mm_sign_epi8(x, x); + // Sign the values of the y vectors + const __m128i sy = _mm_sign_epi8(y, x); + // Perform multiplication and create 16-bit values + const __m128i dot = _mm_maddubs_epi16(ax, sy); + const __m128i ones = _mm_set1_epi16(1); + return _mm_madd_epi16(ones, dot); +} + +#if __AVX__ || __AVX2__ || __AVX512F__ +// horizontally add 8 floats +static inline float hsum_float_8(const __m256 x) { + __m128 res = _mm256_extractf128_ps(x, 1); + res = _mm_add_ps(res, _mm256_castps256_ps128(x)); + res = _mm_add_ps(res, _mm_movehl_ps(res, res)); + res = _mm_add_ss(res, _mm_movehdup_ps(res)); + return _mm_cvtss_f32(res); +} + +// horizontally add 8 int32_t +static inline int hsum_i32_8(const __m256i a) { + const __m128i sum128 = _mm_add_epi32(_mm256_castsi256_si128(a), _mm256_extractf128_si256(a, 1)); + const __m128i hi64 = _mm_unpackhi_epi64(sum128, sum128); + const __m128i sum64 = _mm_add_epi32(hi64, sum128); + const __m128i hi32 = _mm_shuffle_epi32(sum64, _MM_SHUFFLE(2, 3, 0, 1)); + return _mm_cvtsi128_si32(_mm_add_epi32(sum64, hi32)); +} + +// horizontally add 4 int32_t +static inline int hsum_i32_4(const __m128i a) { + const __m128i hi64 = _mm_unpackhi_epi64(a, a); + const __m128i sum64 = _mm_add_epi32(hi64, a); + const __m128i hi32 = _mm_shuffle_epi32(sum64, _MM_SHUFFLE(2, 3, 0, 1)); + return _mm_cvtsi128_si32(_mm_add_epi32(sum64, hi32)); +} + +#if defined(__AVX2__) || defined(__AVX512F__) +// spread 32 bits to 32 bytes { 0x00, 0xFF } +static inline __m256i bytes_from_bits_32(const uint8_t * x) { + uint32_t x32; + memcpy(&x32, x, sizeof(uint32_t)); + const __m256i shuf_mask = _mm256_set_epi64x( + 0x0303030303030303, 0x0202020202020202, + 0x0101010101010101, 0x0000000000000000); + __m256i bytes = _mm256_shuffle_epi8(_mm256_set1_epi32(x32), shuf_mask); + const __m256i bit_mask = _mm256_set1_epi64x(0x7fbfdfeff7fbfdfe); + bytes = _mm256_or_si256(bytes, bit_mask); + return _mm256_cmpeq_epi8(bytes, _mm256_set1_epi64x(-1)); +} + +// Unpack 32 4-bit fields into 32 bytes +// The output vector contains 32 bytes, each one in [ 0 .. 15 ] interval +static inline __m256i bytes_from_nibbles_32(const uint8_t * rsi) +{ + const __m128i tmp = _mm_loadu_si128((const __m128i *)rsi); + const __m256i bytes = MM256_SET_M128I(_mm_srli_epi16(tmp, 4), tmp); + const __m256i lowMask = _mm256_set1_epi8( 0xF ); + return _mm256_and_si256(lowMask, bytes); +} + +// add int16_t pairwise and return as float vector +static inline __m256 sum_i16_pairs_float(const __m256i x) { + const __m256i ones = _mm256_set1_epi16(1); + const __m256i summed_pairs = _mm256_madd_epi16(ones, x); + return _mm256_cvtepi32_ps(summed_pairs); +} + +static inline __m256 mul_sum_us8_pairs_float(const __m256i ax, const __m256i sy) { +#if defined(__AVXVNNI__) || (defined(__AVX512VNNI__) && defined(__AVX512VL__)) + const __m256i zero = _mm256_setzero_si256(); + const __m256i summed_pairs = _mm256_dpbusd_epi32(zero, ax, sy); + return _mm256_cvtepi32_ps(summed_pairs); +#else + // Perform multiplication and create 16-bit values + const __m256i dot = _mm256_maddubs_epi16(ax, sy); + return sum_i16_pairs_float(dot); +#endif +} + +// multiply int8_t, add results pairwise twice and return as float vector +static inline __m256 mul_sum_i8_pairs_float(const __m256i x, const __m256i y) { +#if __AVXVNNIINT8__ + const __m256i zero = _mm256_setzero_si256(); + const __m256i summed_pairs = _mm256_dpbssd_epi32(zero, x, y); + return _mm256_cvtepi32_ps(summed_pairs); +#else + // Get absolute values of x vectors + const __m256i ax = _mm256_sign_epi8(x, x); + // Sign the values of the y vectors + const __m256i sy = _mm256_sign_epi8(y, x); + return mul_sum_us8_pairs_float(ax, sy); +#endif +} + +static inline __m128i packNibbles( __m256i bytes ) +{ + // Move bits within 16-bit lanes from 0000_abcd_0000_efgh into 0000_0000_abcd_efgh +#if __AVX512F__ + const __m256i bytes_srli_4 = _mm256_srli_epi16(bytes, 4); // 0000_0000_abcd_0000 + bytes = _mm256_or_si256(bytes, bytes_srli_4); // 0000_abcd_abcd_efgh + return _mm256_cvtepi16_epi8(bytes); // abcd_efgh +#else + const __m256i lowByte = _mm256_set1_epi16( 0xFF ); + __m256i high = _mm256_andnot_si256( lowByte, bytes ); + __m256i low = _mm256_and_si256( lowByte, bytes ); + high = _mm256_srli_epi16( high, 4 ); + bytes = _mm256_or_si256( low, high ); + + // Compress uint16_t lanes into bytes + __m128i r0 = _mm256_castsi256_si128( bytes ); + __m128i r1 = _mm256_extracti128_si256( bytes, 1 ); + return _mm_packus_epi16( r0, r1 ); +#endif +} +#elif defined(__AVX__) +// spread 32 bits to 32 bytes { 0x00, 0xFF } +static inline __m256i bytes_from_bits_32(const uint8_t * x) { + uint32_t x32; + memcpy(&x32, x, sizeof(uint32_t)); + const __m128i shuf_maskl = _mm_set_epi64x(0x0101010101010101, 0x0000000000000000); + const __m128i shuf_maskh = _mm_set_epi64x(0x0303030303030303, 0x0202020202020202); + __m128i bytesl = _mm_shuffle_epi8(_mm_set1_epi32(x32), shuf_maskl); + __m128i bytesh = _mm_shuffle_epi8(_mm_set1_epi32(x32), shuf_maskh); + const __m128i bit_mask = _mm_set1_epi64x(0x7fbfdfeff7fbfdfe); + bytesl = _mm_or_si128(bytesl, bit_mask); + bytesh = _mm_or_si128(bytesh, bit_mask); + bytesl = _mm_cmpeq_epi8(bytesl, _mm_set1_epi64x(-1)); + bytesh = _mm_cmpeq_epi8(bytesh, _mm_set1_epi64x(-1)); + return MM256_SET_M128I(bytesh, bytesl); +} + +// Unpack 32 4-bit fields into 32 bytes +// The output vector contains 32 bytes, each one in [ 0 .. 15 ] interval +static inline __m256i bytes_from_nibbles_32(const uint8_t * rsi) +{ + // Load 16 bytes from memory + __m128i tmpl = _mm_loadu_si128((const __m128i *)rsi); + __m128i tmph = _mm_srli_epi16(tmpl, 4); + const __m128i lowMask = _mm_set1_epi8(0xF); + tmpl = _mm_and_si128(lowMask, tmpl); + tmph = _mm_and_si128(lowMask, tmph); + return MM256_SET_M128I(tmph, tmpl); +} + +// add int16_t pairwise and return as float vector +static inline __m256 sum_i16_pairs_float(const __m128i xh, const __m128i xl) { + const __m128i ones = _mm_set1_epi16(1); + const __m128i summed_pairsl = _mm_madd_epi16(ones, xl); + const __m128i summed_pairsh = _mm_madd_epi16(ones, xh); + const __m256i summed_pairs = MM256_SET_M128I(summed_pairsh, summed_pairsl); + return _mm256_cvtepi32_ps(summed_pairs); +} + +static inline __m256 mul_sum_us8_pairs_float(const __m256i ax, const __m256i sy) { + const __m128i axl = _mm256_castsi256_si128(ax); + const __m128i axh = _mm256_extractf128_si256(ax, 1); + const __m128i syl = _mm256_castsi256_si128(sy); + const __m128i syh = _mm256_extractf128_si256(sy, 1); + // Perform multiplication and create 16-bit values + const __m128i dotl = _mm_maddubs_epi16(axl, syl); + const __m128i doth = _mm_maddubs_epi16(axh, syh); + return sum_i16_pairs_float(doth, dotl); +} + +// multiply int8_t, add results pairwise twice and return as float vector +static inline __m256 mul_sum_i8_pairs_float(const __m256i x, const __m256i y) { + const __m128i xl = _mm256_castsi256_si128(x); + const __m128i xh = _mm256_extractf128_si256(x, 1); + const __m128i yl = _mm256_castsi256_si128(y); + const __m128i yh = _mm256_extractf128_si256(y, 1); + // Get absolute values of x vectors + const __m128i axl = _mm_sign_epi8(xl, xl); + const __m128i axh = _mm_sign_epi8(xh, xh); + // Sign the values of the y vectors + const __m128i syl = _mm_sign_epi8(yl, xl); + const __m128i syh = _mm_sign_epi8(yh, xh); + // Perform multiplication and create 16-bit values + const __m128i dotl = _mm_maddubs_epi16(axl, syl); + const __m128i doth = _mm_maddubs_epi16(axh, syh); + return sum_i16_pairs_float(doth, dotl); +} + +static inline __m128i packNibbles( __m128i bytes1, __m128i bytes2 ) +{ + // Move bits within 16-bit lanes from 0000_abcd_0000_efgh into 0000_0000_abcd_efgh + const __m128i lowByte = _mm_set1_epi16( 0xFF ); + __m128i high = _mm_andnot_si128( lowByte, bytes1 ); + __m128i low = _mm_and_si128( lowByte, bytes1 ); + high = _mm_srli_epi16( high, 4 ); + bytes1 = _mm_or_si128( low, high ); + high = _mm_andnot_si128( lowByte, bytes2 ); + low = _mm_and_si128( lowByte, bytes2 ); + high = _mm_srli_epi16( high, 4 ); + bytes2 = _mm_or_si128( low, high ); + + return _mm_packus_epi16( bytes1, bytes2); +} + +static inline __m128i mul_add_epi8_sse(const __m128i x, const __m128i y) { + const __m128i ax = _mm_sign_epi8(x, x); + const __m128i sy = _mm_sign_epi8(y, x); + return _mm_maddubs_epi16(ax, sy); +} +#endif +#elif defined(__SSSE3__) +// horizontally add 4x4 floats +static inline float hsum_float_4x4(const __m128 a, const __m128 b, const __m128 c, const __m128 d) { + __m128 res_0 =_mm_hadd_ps(a, b); + __m128 res_1 =_mm_hadd_ps(c, d); + __m128 res =_mm_hadd_ps(res_0, res_1); + res =_mm_hadd_ps(res, res); + res =_mm_hadd_ps(res, res); + + return _mm_cvtss_f32(res); +} +#endif // __AVX__ || __AVX2__ || __AVX512F__ +#endif // defined(__AVX__) || defined(__AVX2__) || defined(__AVX512F__) || defined(__SSSE3__) + +#if defined(__ARM_NEON) || defined(__wasm_simd128__) || defined(__POWER9_VECTOR__) +#define B1(c,s,n) 0x ## n ## c , 0x ## n ## s +#define B2(c,s,n) B1(c,s,n ## c), B1(c,s,n ## s) +#define B3(c,s,n) B2(c,s,n ## c), B2(c,s,n ## s) +#define B4(c,s,n) B3(c,s,n ## c), B3(c,s,n ## s) +#define B5(c,s,n) B4(c,s,n ## c), B4(c,s,n ## s) +#define B6(c,s,n) B5(c,s,n ## c), B5(c,s,n ## s) +#define B7(c,s,n) B6(c,s,n ## c), B6(c,s,n ## s) +#define B8(c,s ) B7(c,s, c), B7(c,s, s) + +// precomputed tables for expanding 8bits to 8 bytes: +static const uint64_t table_b2b_0[1 << 8] = { B8(00, 10) }; // ( b) << 4 +static const uint64_t table_b2b_1[1 << 8] = { B8(10, 00) }; // (!b) << 4 +#endif + +#if defined(__loongarch_asx) + +#ifdef __clang__ +#define VREGS_PREFIX "$vr" +#define XREGS_PREFIX "$xr" +#else // GCC +#define VREGS_PREFIX "$f" +#define XREGS_PREFIX "$f" +#endif +#define __ALL_REGS "0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31" +// Convert __m128i to __m256i +static inline __m256i ____m256i(__m128i in) { + __m256i out = __lasx_xvldi(0); + __asm__ volatile ( + ".irp i," __ALL_REGS "\n\t" + " .ifc %[out], " XREGS_PREFIX"\\i \n\t" + " .irp j," __ALL_REGS "\n\t" + " .ifc %[in], " VREGS_PREFIX "\\j \n\t" + " xvpermi.q $xr\\i, $xr\\j, 0x20 \n\t" + " .endif \n\t" + " .endr \n\t" + " .endif \n\t" + ".endr \n\t" + : [out] "+f" (out) : [in] "f" (in) + ); + return out; +} +// Convert two __m128i to __m256i +static inline __m256i lasx_set_q(__m128i inhi, __m128i inlo) { + __m256i out; + __asm__ volatile ( + ".irp i," __ALL_REGS "\n\t" + " .ifc %[hi], " VREGS_PREFIX "\\i \n\t" + " .irp j," __ALL_REGS "\n\t" + " .ifc %[lo], " VREGS_PREFIX "\\j \n\t" + " xvpermi.q $xr\\i, $xr\\j, 0x20 \n\t" + " .endif \n\t" + " .endr \n\t" + " .endif \n\t" + ".endr \n\t" + ".ifnc %[out], %[hi] \n\t" + ".irp i," __ALL_REGS "\n\t" + " .ifc %[out], " XREGS_PREFIX "\\i \n\t" + " .irp j," __ALL_REGS "\n\t" + " .ifc %[hi], " VREGS_PREFIX "\\j \n\t" + " xvori.b $xr\\i, $xr\\j, 0 \n\t" + " .endif \n\t" + " .endr \n\t" + " .endif \n\t" + ".endr \n\t" + ".endif \n\t" + : [out] "=f" (out), [hi] "+f" (inhi) + : [lo] "f" (inlo) + ); + return out; +} +// Convert __m256i low part to __m128i +static inline __m128i lasx_extracti128_lo(__m256i in) { + __m128i out; + __asm__ volatile ( + ".ifnc %[out], %[in] \n\t" + ".irp i," __ALL_REGS "\n\t" + " .ifc %[out], " VREGS_PREFIX "\\i \n\t" + " .irp j," __ALL_REGS "\n\t" + " .ifc %[in], " XREGS_PREFIX "\\j \n\t" + " vori.b $vr\\i, $vr\\j, 0 \n\t" + " .endif \n\t" + " .endr \n\t" + " .endif \n\t" + ".endr \n\t" + ".endif \n\t" + : [out] "=f" (out) : [in] "f" (in) + ); + return out; +} +// Convert __m256i high part to __m128i +static inline __m128i lasx_extracti128_hi(__m256i in) { + __m128i out; + __asm__ volatile ( + ".irp i," __ALL_REGS "\n\t" + " .ifc %[out], " VREGS_PREFIX "\\i \n\t" + " .irp j," __ALL_REGS "\n\t" + " .ifc %[in], " XREGS_PREFIX "\\j \n\t" + " xvpermi.q $xr\\i, $xr\\j, 0x11 \n\t" + " .endif \n\t" + " .endr \n\t" + " .endif \n\t" + ".endr \n\t" + : [out] "=f" (out) : [in] "f" (in) + ); + return out; +} + +static __m256i lasx_set_w(int e7, int e6, int e5, int e4, int e3, int e2, int e1, int e0) { + v8i32 __ret = {e0, e1, e2, e3, e4, e5, e6, e7}; + return (__m256i)__ret; +} + +static __m128i lsx_set_w(int32_t a, int32_t b, int32_t c, int32_t d) { + v4i32 __ret = {d, c, b, a}; + return (__m128i)__ret; +} + +static __m256i lasx_set_d(int64_t a, int64_t b, int64_t c, int64_t d) { + v4i64 __ret = {d, c, b, a}; + return (__m256i)__ret; +} + +static __m256i lasx_insertf128( __m128i x, __m128i y) { + return lasx_set_q(x, y); +} + +static __m128i lsx_shuffle_b(__m128i a, __m128i b) { + __m128i mask_f, zero, tmp0, tmp2, mask; + int f = 0x8f; + mask_f = __lsx_vreplgr2vr_b(f); + zero = __lsx_vldi(0); + tmp0 = __lsx_vand_v(b, mask_f); // get mask with low 4 bit and sign bits + tmp0 = __lsx_vori_b(tmp0, 0x10); // make each mask or with 0x10 prepare for positive + mask = __lsx_vsle_b(zero, tmp0); // if mask >= 0, set mask + tmp2 = __lsx_vand_v(tmp0, mask); // maskout the in2 < ones + return __lsx_vshuf_b(a, zero, tmp2); +} + +static __m256i lasx_shuffle_b(__m256i a, __m256i b) { + __m256i mask_f, zero, tmp0, tmp2, mask; + int f = 0x8f; + mask_f = __lasx_xvreplgr2vr_b(f); + zero = __lasx_xvldi(0); + tmp0 = __lasx_xvand_v(b, mask_f); // get mask with low 4 bit and sign bits + tmp0 = __lasx_xvori_b(tmp0, 0x10); // make each mask or with 0x10 prepare for positive + mask = __lasx_xvsle_b(zero, tmp0); // if mask >= 0, set mask + tmp2 = __lasx_xvand_v(tmp0, mask); // maskout the in2 < ones + return __lasx_xvshuf_b(a, zero, tmp2); +} + +static __m256i lasx_extu8_16(__m128i a) { + __m128i zero = __lsx_vldi(0); + __m128i vlo = __lsx_vilvl_b(zero, a); + __m128i vhi = __lsx_vilvh_b(zero, a); + return lasx_set_q(vhi, vlo); +} + +static __m256i lasx_ext8_16(__m128i a) { + __m128i sign = __lsx_vslti_b(a, 0); + __m128i vlo = __lsx_vilvl_b(sign, a); + __m128i vhi = __lsx_vilvh_b(sign, a); + return lasx_set_q(vhi, vlo); +} + +static __m256i lasx_ext16_32(__m128i a) { + __m256i tmp1; + tmp1 = __lasx_xvinsgr2vr_w(tmp1, __lsx_vpickve2gr_h(a, 0), 0); + tmp1 = __lasx_xvinsgr2vr_w(tmp1, __lsx_vpickve2gr_h(a, 1), 1); + tmp1 = __lasx_xvinsgr2vr_w(tmp1, __lsx_vpickve2gr_h(a, 2), 2); + tmp1 = __lasx_xvinsgr2vr_w(tmp1, __lsx_vpickve2gr_h(a, 3), 3); + tmp1 = __lasx_xvinsgr2vr_w(tmp1, __lsx_vpickve2gr_h(a, 4), 4); + tmp1 = __lasx_xvinsgr2vr_w(tmp1, __lsx_vpickve2gr_h(a, 5), 5); + tmp1 = __lasx_xvinsgr2vr_w(tmp1, __lsx_vpickve2gr_h(a, 6), 6); + tmp1 = __lasx_xvinsgr2vr_w(tmp1, __lsx_vpickve2gr_h(a, 7), 7); + return tmp1; +} + +static __m128i lasx_extracti128( __m256i a, int pos) { + __m128i ret; + if( pos == 0) + { + ret = lasx_extracti128_lo(a); + } else { + ret = lasx_extracti128_hi(a); + } + return ret; +} + +static __m128 lasx_extractf128( __m256 a, int pos) { + __m128 ret; + if( pos == 0) + { + ret = (__m128)lasx_extracti128_lo((__m256i)a); + } else { + ret = (__m128)lasx_extracti128_hi((__m256i)a); + } + return ret; +} + +static __m128i lsx_hadd_h(__m128i a, __m128i b) { + __m128i tmp1 = __lsx_vpickev_h(b, a); + __m128i tmp2 = __lsx_vpickod_h(b, a); + return __lsx_vadd_h(tmp1, tmp2); +} + +static __m128i lsx_hadd_w(__m128i a, __m128i b) { + __m128i tmp1 = __lsx_vpickev_w(b, a); + __m128i tmp2 = __lsx_vpickod_w(b, a); + return __lsx_vadd_w(tmp1, tmp2); +} + +static __m128 lsx_hadd_s(__m128 a, __m128 b) { + __m128 tmp1 = (__m128)__lsx_vpickev_w((__m128i)b, (__m128i)a); + __m128 tmp2 = (__m128)__lsx_vpickod_w((__m128i)b, (__m128i)a); + + return __lsx_vfadd_s(tmp1, tmp2); +} + +static __m256i lasx_maddubs_h(__m256i a, __m256i b) { + __m256i tmp1, tmp2; + tmp1 = __lasx_xvmulwev_h_b(a, b); + tmp2 = __lasx_xvmulwod_h_b(a, b); + return __lasx_xvsadd_h(tmp1, tmp2); +} + +static __m256i lasx_madd_h(__m256i a, __m256i b) { + __m256i tmp1, tmp2; + tmp1 = __lasx_xvmulwev_w_h(a, b); + tmp2 = __lasx_xvmulwod_w_h(a, b); + return __lasx_xvadd_w(tmp1, tmp2); +} + +static __m256i lasx_packs_w(__m256i a, __m256i b) { + __m256i tmp, tmp1; + tmp = __lasx_xvsat_w(a, 15); + tmp1 = __lasx_xvsat_w(b, 15); + return __lasx_xvpickev_h(tmp1, tmp); +} + +static __m256i lasx_packs_h(__m256i a, __m256i b) { + __m256i tmp, tmp1; + tmp = __lasx_xvsat_h(a, 7); + tmp1 = __lasx_xvsat_h(b, 7); + return __lasx_xvpickev_b(tmp1, tmp); +} + +static __m128i lsx_packs_w(__m128i a, __m128i b) { + __m128i tmp, tmp1; + tmp = __lsx_vsat_w(a, 15); + tmp1 = __lsx_vsat_w(b, 15); + return __lsx_vpickev_h(tmp1, tmp); +} + +static __m128i lsx_packs_h(__m128i a, __m128i b) { + __m128i tmp, tmp1; + tmp = __lsx_vsat_h(a, 7); + tmp1 = __lsx_vsat_h(b, 7); + return __lsx_vpickev_b(tmp1, tmp); +} + +static __m128i lsx_packus_h(__m128i a, __m128i b) { + __m128i tmp, tmp1; + tmp = __lsx_vsat_hu(a, 7); + tmp1 = __lsx_vsat_hu(b, 7); + return __lsx_vpickev_b(tmp1, tmp); +} + + +static __m128i lsx_maddubs_h(__m128i a, __m128i b) { + __m128i tmp1, tmp2; + tmp1 = __lsx_vmulwev_h_b(a, b); + tmp2 = __lsx_vmulwod_h_b(a, b); + return __lsx_vsadd_h(tmp1, tmp2); +} + +static __m128i lsx_madd_h(__m128i a, __m128i b) { + __m128i tmp1, tmp2; + tmp1 = __lsx_vmulwev_w_h(a, b); + tmp2 = __lsx_vmulwod_w_h(a, b); + return __lsx_vadd_w(tmp1, tmp2); +} + +// multiply int8_t, add results pairwise twice +static inline __m128i mul_sum_i8_pairs(const __m128i x, const __m128i y) { + // Get absolute values of x vectors + const __m128i ax = __lsx_vsigncov_b(x, x); + // Sign the values of the y vectors + const __m128i sy = __lsx_vsigncov_b(x, y); + // Perform multiplication and create 16-bit values + const __m128i dot = lsx_maddubs_h(ax, sy); + const __m128i ones = __lsx_vreplgr2vr_h(1); + return lsx_madd_h(ones, dot); +} + +// horizontally add 8 floats +static inline float hsum_float_8(const __m256 x) { + __m128 res = lasx_extractf128(x, 1); + ft_union tmp; + res = __lsx_vfadd_s(res, lasx_extractf128(x, 0)); + res = __lsx_vfadd_s(res, (__m128)__lsx_vpickod_d((__m128i)res, (__m128i)res)); + res = __lsx_vfadd_s(res, (__m128)__lsx_vinsgr2vr_w(__lsx_vldi(0), __lsx_vpickve2gr_w(res, 1), 0)); + tmp.i = __lsx_vpickve2gr_w(res, 0); + return tmp.f; +} + +// horizontally add 8 int32_t +static inline int hsum_i32_8(const __m256i a) { + + __m256i tmp1 = __lasx_xvpermi_q(a, a, 0x11); + __m256i tmp2 = __lasx_xvpermi_q(a, a, 0x00); + + __m128i tmp1_128 = lasx_extracti128_lo(tmp1); + __m128i tmp2_128 = lasx_extracti128_lo(tmp2); + + __m128i sum128 = __lsx_vadd_w(tmp1_128, tmp2_128); + + __m128i ev = __lsx_vpickev_w(sum128, sum128); + __m128i od = __lsx_vpickod_w(sum128, sum128); + __m128i sum64 = __lsx_vadd_w(ev, od); + + int sum64_1, sum64_2; + sum64_1 = __lsx_vpickve2gr_w(sum64, 0); + sum64_2 = __lsx_vpickve2gr_w(sum64, 1); + + return sum64_1 + sum64_2; +} + +// horizontally add 4 int32_t +static inline int hsum_i32_4(const __m128i a) { + __m128i ev = __lsx_vpickev_w(a, a); + __m128i od = __lsx_vpickod_w(a, a); + __m128i sum64 = __lsx_vadd_w(ev, od); + + int sum64_1, sum64_2; + sum64_1 = __lsx_vpickve2gr_w(sum64, 0); + sum64_2 = __lsx_vpickve2gr_w(sum64, 1); + + return sum64_1 + sum64_2; +} + +// spread 32 bits to 32 bytes { 0x00, 0xFF } +static inline __m256i bytes_from_bits_32(const uint8_t * x) { + + uint32_t x32; + memcpy(&x32, x, sizeof(uint32_t)); + const __m256i shuf_mask = lasx_set_d( + 0x0303030303030303, 0x0202020202020202, + 0x0101010101010101, 0x0000000000000000); + + __m256i bytes = lasx_shuffle_b(__lasx_xvreplgr2vr_w(x32), shuf_mask); + const __m256i bit_mask = __lasx_xvreplgr2vr_d(0x7fbfdfeff7fbfdfe); + bytes = __lasx_xvor_v(bytes, bit_mask); + return __lasx_xvseq_b(bytes, __lasx_xvreplgr2vr_d(-1)); +} + +// Unpack 32 4-bit fields into 32 bytes +// The output vector contains 32 bytes, each one in [ 0 .. 15 ] interval +static inline __m256i bytes_from_nibbles_32(const uint8_t * rsi) { + const __m128i lo = __lsx_vld((const __m128i *)rsi, 0); + __m128i hi = __lsx_vsrli_h(lo, 4); + return __lasx_xvandi_b(lasx_insertf128(hi, lo), 0xf); +} + +// add int16_t pairwise and return as float vector +static inline __m256 sum_i16_pairs_float(const __m256i x) { + __m256i v = __lasx_xvpackod_h(x, x); + __m256i summed_pairs = __lasx_xvaddwev_w_h(x, v); + return __lasx_xvffint_s_w(summed_pairs); +} + +static inline __m256 mul_sum_us8_pairs_float(const __m256i ax, const __m256i sy) { + // Perform multiplication and create 16-bit values + const __m256i dot = lasx_maddubs_h(ax, sy); + return sum_i16_pairs_float(dot); +} + +// multiply int8_t, add results pairwise twice and return as float vector +static inline __m256 mul_sum_i8_pairs_float(const __m256i x, const __m256i y) { + + // Get absolute values of x vectors + const __m256i ax = __lasx_xvsigncov_b(x, x); + // Sign the values of the y vectors + const __m256i sy = __lasx_xvsigncov_b(x, y); + + return mul_sum_us8_pairs_float(ax, sy); +} + +static inline __m128i packNibbles( __m256i bytes ) { + // Move bits within 16-bit lanes from 0000_abcd_0000_efgh into 0000_0000_abcd_efgh + const __m256i lowByte = __lasx_xvreplgr2vr_h(0xFF); + __m256i high = __lasx_xvandn_v(lowByte, bytes); + __m256i low = __lasx_xvand_v(lowByte, bytes); + high = __lasx_xvsrli_h(high, 4); + bytes = __lasx_xvor_v(low, high); + // Compress uint16_t lanes into bytes + __m128i *r0 = (__m128i *)&bytes; + __m256i tmp_h128 = __lasx_xvpermi_q(bytes, bytes, 0x11); + __m128i *r1 = (__m128i *)&tmp_h128; + + __m128i zero = __lsx_vldi(0); + __m128i tmp, tmp2, tmp3; + + tmp = __lsx_vmax_h(zero, *r0); + tmp2 = __lsx_vsat_hu(tmp, 7); + + tmp = __lsx_vmax_h(zero, *r1); + tmp3 = __lsx_vsat_hu(tmp, 7); + return __lsx_vpickev_b(tmp3, tmp2); +} +#endif //__loongarch_asx + +// reference implementation for deterministic creation of model files +void quantize_row_q4_0_ref(const float * restrict x, block_q4_0 * restrict y, int64_t k) { + static const int qk = QK4_0; + + assert(k % qk == 0); + + const int nb = k / qk; + + for (int i = 0; i < nb; i++) { + float amax = 0.0f; // absolute max + float max = 0.0f; + + for (int j = 0; j < qk; j++) { + const float v = x[i*qk + j]; + if (amax < fabsf(v)) { + amax = fabsf(v); + max = v; + } + } + + const float d = max / -8; + const float id = d ? 1.0f/d : 0.0f; + + y[i].d = GGML_FP32_TO_FP16(d); + + for (int j = 0; j < qk/2; ++j) { + const float x0 = x[i*qk + 0 + j]*id; + const float x1 = x[i*qk + qk/2 + j]*id; + + const uint8_t xi0 = MIN(15, (int8_t)(x0 + 8.5f)); + const uint8_t xi1 = MIN(15, (int8_t)(x1 + 8.5f)); + + y[i].qs[j] = xi0; + y[i].qs[j] |= xi1 << 4; + } + } +} + +void quantize_row_q4_0(const float * restrict x, void * restrict y, int64_t k) { + quantize_row_q4_0_ref(x, y, k); +} + + +void quantize_row_q4_1_ref(const float * restrict x, block_q4_1 * restrict y, int64_t k) { + const int qk = QK4_1; + + assert(k % qk == 0); + + const int nb = k / qk; + + for (int i = 0; i < nb; i++) { + float min = FLT_MAX; + float max = -FLT_MAX; + + for (int j = 0; j < qk; j++) { + const float v = x[i*qk + j]; + + if (v < min) min = v; + if (v > max) max = v; + } + + const float d = (max - min) / ((1 << 4) - 1); + const float id = d ? 1.0f/d : 0.0f; + + y[i].d = GGML_FP32_TO_FP16(d); + y[i].m = GGML_FP32_TO_FP16(min); + + for (int j = 0; j < qk/2; ++j) { + const float x0 = (x[i*qk + 0 + j] - min)*id; + const float x1 = (x[i*qk + qk/2 + j] - min)*id; + + const uint8_t xi0 = MIN(15, (int8_t)(x0 + 0.5f)); + const uint8_t xi1 = MIN(15, (int8_t)(x1 + 0.5f)); + + y[i].qs[j] = xi0; + y[i].qs[j] |= xi1 << 4; + } + } +} + +void quantize_row_q4_1(const float * restrict x, void * restrict y, int64_t k) { + quantize_row_q4_1_ref(x, y, k); +} + +void quantize_row_q5_0_ref(const float * restrict x, block_q5_0 * restrict y, int64_t k) { + static const int qk = QK5_0; + + assert(k % qk == 0); + + const int nb = k / qk; + + for (int i = 0; i < nb; i++) { + float amax = 0.0f; // absolute max + float max = 0.0f; + + for (int j = 0; j < qk; j++) { + const float v = x[i*qk + j]; + if (amax < fabsf(v)) { + amax = fabsf(v); + max = v; + } + } + + const float d = max / -16; + const float id = d ? 1.0f/d : 0.0f; + + y[i].d = GGML_FP32_TO_FP16(d); + + uint32_t qh = 0; + + for (int j = 0; j < qk/2; ++j) { + const float x0 = x[i*qk + 0 + j]*id; + const float x1 = x[i*qk + qk/2 + j]*id; + + const uint8_t xi0 = MIN(31, (int8_t)(x0 + 16.5f)); + const uint8_t xi1 = MIN(31, (int8_t)(x1 + 16.5f)); + + y[i].qs[j] = (xi0 & 0x0F) | ((xi1 & 0x0F) << 4); + + // get the 5-th bit and store it in qh at the right position + qh |= ((xi0 & 0x10u) >> 4) << (j + 0); + qh |= ((xi1 & 0x10u) >> 4) << (j + qk/2); + } + + memcpy(&y[i].qh, &qh, sizeof(qh)); + } +} + +void quantize_row_q5_0(const float * restrict x, void * restrict y, int64_t k) { + quantize_row_q5_0_ref(x, y, k); +} + +void quantize_row_q5_1_ref(const float * restrict x, block_q5_1 * restrict y, int64_t k) { + const int qk = QK5_1; + + assert(k % qk == 0); + + const int nb = k / qk; + + for (int i = 0; i < nb; i++) { + float min = FLT_MAX; + float max = -FLT_MAX; + + for (int j = 0; j < qk; j++) { + const float v = x[i*qk + j]; + + if (v < min) min = v; + if (v > max) max = v; + } + + const float d = (max - min) / ((1 << 5) - 1); + const float id = d ? 1.0f/d : 0.0f; + + y[i].d = GGML_FP32_TO_FP16(d); + y[i].m = GGML_FP32_TO_FP16(min); + + uint32_t qh = 0; + + for (int j = 0; j < qk/2; ++j) { + const float x0 = (x[i*qk + 0 + j] - min)*id; + const float x1 = (x[i*qk + qk/2 + j] - min)*id; + + const uint8_t xi0 = (uint8_t)(x0 + 0.5f); + const uint8_t xi1 = (uint8_t)(x1 + 0.5f); + + y[i].qs[j] = (xi0 & 0x0F) | ((xi1 & 0x0F) << 4); + + // get the 5-th bit and store it in qh at the right position + qh |= ((xi0 & 0x10u) >> 4) << (j + 0); + qh |= ((xi1 & 0x10u) >> 4) << (j + qk/2); + } + + memcpy(&y[i].qh, &qh, sizeof(y[i].qh)); + } +} + +void quantize_row_q5_1(const float * restrict x, void * restrict y, int64_t k) { + quantize_row_q5_1_ref(x, y, k); +} + +// reference implementation for deterministic creation of model files +void quantize_row_q8_0_ref(const float * restrict x, block_q8_0 * restrict y, int64_t k) { + assert(k % QK8_0 == 0); + const int nb = k / QK8_0; + + for (int i = 0; i < nb; i++) { + float amax = 0.0f; // absolute max + + for (int j = 0; j < QK8_0; j++) { + const float v = x[i*QK8_0 + j]; + amax = MAX(amax, fabsf(v)); + } + + const float d = amax / ((1 << 7) - 1); + const float id = d ? 1.0f/d : 0.0f; + + y[i].d = GGML_FP32_TO_FP16(d); + + for (int j = 0; j < QK8_0; ++j) { + const float x0 = x[i*QK8_0 + j]*id; + + y[i].qs[j] = roundf(x0); + } + } +} + +void quantize_row_q8_0(const float * restrict x, void * restrict vy, int64_t k) { + assert(QK8_0 == 32); + assert(k % QK8_0 == 0); + const int nb = k / QK8_0; + + block_q8_0 * restrict y = vy; + +#if defined(__ARM_NEON) + for (int i = 0; i < nb; i++) { + float32x4_t srcv [8]; + float32x4_t asrcv[8]; + float32x4_t amaxv[8]; + + for (int j = 0; j < 8; j++) srcv[j] = vld1q_f32(x + i*32 + 4*j); + for (int j = 0; j < 8; j++) asrcv[j] = vabsq_f32(srcv[j]); + + for (int j = 0; j < 4; j++) amaxv[2*j] = vmaxq_f32(asrcv[2*j], asrcv[2*j+1]); + for (int j = 0; j < 2; j++) amaxv[4*j] = vmaxq_f32(amaxv[4*j], amaxv[4*j+2]); + for (int j = 0; j < 1; j++) amaxv[8*j] = vmaxq_f32(amaxv[8*j], amaxv[8*j+4]); + + const float amax = vmaxvq_f32(amaxv[0]); + + const float d = amax / ((1 << 7) - 1); + const float id = d ? 1.0f/d : 0.0f; + + y[i].d = GGML_FP32_TO_FP16(d); + + for (int j = 0; j < 8; j++) { + const float32x4_t v = vmulq_n_f32(srcv[j], id); + const int32x4_t vi = vcvtnq_s32_f32(v); + + y[i].qs[4*j + 0] = vgetq_lane_s32(vi, 0); + y[i].qs[4*j + 1] = vgetq_lane_s32(vi, 1); + y[i].qs[4*j + 2] = vgetq_lane_s32(vi, 2); + y[i].qs[4*j + 3] = vgetq_lane_s32(vi, 3); + } + } +#elif defined(__wasm_simd128__) + for (int i = 0; i < nb; i++) { + v128_t srcv [8]; + v128_t asrcv[8]; + v128_t amaxv[8]; + + for (int j = 0; j < 8; j++) srcv[j] = wasm_v128_load(x + i*32 + 4*j); + for (int j = 0; j < 8; j++) asrcv[j] = wasm_f32x4_abs(srcv[j]); + + for (int j = 0; j < 4; j++) amaxv[2*j] = wasm_f32x4_max(asrcv[2*j], asrcv[2*j+1]); + for (int j = 0; j < 2; j++) amaxv[4*j] = wasm_f32x4_max(amaxv[4*j], amaxv[4*j+2]); + for (int j = 0; j < 1; j++) amaxv[8*j] = wasm_f32x4_max(amaxv[8*j], amaxv[8*j+4]); + + const float amax = MAX(MAX(wasm_f32x4_extract_lane(amaxv[0], 0), + wasm_f32x4_extract_lane(amaxv[0], 1)), + MAX(wasm_f32x4_extract_lane(amaxv[0], 2), + wasm_f32x4_extract_lane(amaxv[0], 3))); + + const float d = amax / ((1 << 7) - 1); + const float id = d ? 1.0f/d : 0.0f; + + y[i].d = GGML_FP32_TO_FP16(d); + + for (int j = 0; j < 8; j++) { + const v128_t v = wasm_f32x4_mul(srcv[j], wasm_f32x4_splat(id)); + const v128_t vi = wasm_i32x4_trunc_sat_f32x4(v); + + y[i].qs[4*j + 0] = wasm_i32x4_extract_lane(vi, 0); + y[i].qs[4*j + 1] = wasm_i32x4_extract_lane(vi, 1); + y[i].qs[4*j + 2] = wasm_i32x4_extract_lane(vi, 2); + y[i].qs[4*j + 3] = wasm_i32x4_extract_lane(vi, 3); + } + } +#elif defined(__AVX2__) || defined(__AVX__) + for (int i = 0; i < nb; i++) { + // Load elements into 4 AVX vectors + __m256 v0 = _mm256_loadu_ps( x ); + __m256 v1 = _mm256_loadu_ps( x + 8 ); + __m256 v2 = _mm256_loadu_ps( x + 16 ); + __m256 v3 = _mm256_loadu_ps( x + 24 ); + x += 32; + + // Compute max(abs(e)) for the block + const __m256 signBit = _mm256_set1_ps( -0.0f ); + __m256 maxAbs = _mm256_andnot_ps( signBit, v0 ); + maxAbs = _mm256_max_ps( maxAbs, _mm256_andnot_ps( signBit, v1 ) ); + maxAbs = _mm256_max_ps( maxAbs, _mm256_andnot_ps( signBit, v2 ) ); + maxAbs = _mm256_max_ps( maxAbs, _mm256_andnot_ps( signBit, v3 ) ); + + __m128 max4 = _mm_max_ps( _mm256_extractf128_ps( maxAbs, 1 ), _mm256_castps256_ps128( maxAbs ) ); + max4 = _mm_max_ps( max4, _mm_movehl_ps( max4, max4 ) ); + max4 = _mm_max_ss( max4, _mm_movehdup_ps( max4 ) ); + const float maxScalar = _mm_cvtss_f32( max4 ); + + // Quantize these floats + const float d = maxScalar / 127.f; + y[i].d = GGML_FP32_TO_FP16(d); + const float id = ( maxScalar != 0.0f ) ? 127.f / maxScalar : 0.0f; + const __m256 mul = _mm256_set1_ps( id ); + + // Apply the multiplier + v0 = _mm256_mul_ps( v0, mul ); + v1 = _mm256_mul_ps( v1, mul ); + v2 = _mm256_mul_ps( v2, mul ); + v3 = _mm256_mul_ps( v3, mul ); + + // Round to nearest integer + v0 = _mm256_round_ps( v0, _MM_ROUND_NEAREST ); + v1 = _mm256_round_ps( v1, _MM_ROUND_NEAREST ); + v2 = _mm256_round_ps( v2, _MM_ROUND_NEAREST ); + v3 = _mm256_round_ps( v3, _MM_ROUND_NEAREST ); + + // Convert floats to integers + __m256i i0 = _mm256_cvtps_epi32( v0 ); + __m256i i1 = _mm256_cvtps_epi32( v1 ); + __m256i i2 = _mm256_cvtps_epi32( v2 ); + __m256i i3 = _mm256_cvtps_epi32( v3 ); + +#if defined(__AVX2__) + // Convert int32 to int16 + i0 = _mm256_packs_epi32( i0, i1 ); // 0, 1, 2, 3, 8, 9, 10, 11, 4, 5, 6, 7, 12, 13, 14, 15 + i2 = _mm256_packs_epi32( i2, i3 ); // 16, 17, 18, 19, 24, 25, 26, 27, 20, 21, 22, 23, 28, 29, 30, 31 + // Convert int16 to int8 + i0 = _mm256_packs_epi16( i0, i2 ); // 0, 1, 2, 3, 8, 9, 10, 11, 16, 17, 18, 19, 24, 25, 26, 27, 4, 5, 6, 7, 12, 13, 14, 15, 20, 21, 22, 23, 28, 29, 30, 31 + + // We got our precious signed bytes, but the order is now wrong + // These AVX2 pack instructions process 16-byte pieces independently + // The following instruction is fixing the order + const __m256i perm = _mm256_setr_epi32( 0, 4, 1, 5, 2, 6, 3, 7 ); + i0 = _mm256_permutevar8x32_epi32( i0, perm ); + + _mm256_storeu_si256((__m256i *)y[i].qs, i0); +#else + // Since we don't have in AVX some necessary functions, + // we split the registers in half and call AVX2 analogs from SSE + __m128i ni0 = _mm256_castsi256_si128( i0 ); + __m128i ni1 = _mm256_extractf128_si256( i0, 1); + __m128i ni2 = _mm256_castsi256_si128( i1 ); + __m128i ni3 = _mm256_extractf128_si256( i1, 1); + __m128i ni4 = _mm256_castsi256_si128( i2 ); + __m128i ni5 = _mm256_extractf128_si256( i2, 1); + __m128i ni6 = _mm256_castsi256_si128( i3 ); + __m128i ni7 = _mm256_extractf128_si256( i3, 1); + + // Convert int32 to int16 + ni0 = _mm_packs_epi32( ni0, ni1 ); + ni2 = _mm_packs_epi32( ni2, ni3 ); + ni4 = _mm_packs_epi32( ni4, ni5 ); + ni6 = _mm_packs_epi32( ni6, ni7 ); + // Convert int16 to int8 + ni0 = _mm_packs_epi16( ni0, ni2 ); + ni4 = _mm_packs_epi16( ni4, ni6 ); + + _mm_storeu_si128((__m128i *)(y[i].qs + 0), ni0); + _mm_storeu_si128((__m128i *)(y[i].qs + 16), ni4); +#endif + } +#elif defined(__riscv_v_intrinsic) + + size_t vl = __riscv_vsetvl_e32m4(QK8_0); + + for (int i = 0; i < nb; i++) { + // load elements + vfloat32m4_t v_x = __riscv_vle32_v_f32m4(x+i*QK8_0, vl); + + vfloat32m4_t vfabs = __riscv_vfabs_v_f32m4(v_x, vl); + vfloat32m1_t tmp = __riscv_vfmv_v_f_f32m1(0.0f, vl); + vfloat32m1_t vmax = __riscv_vfredmax_vs_f32m4_f32m1(vfabs, tmp, vl); + float amax = __riscv_vfmv_f_s_f32m1_f32(vmax); + + const float d = amax / ((1 << 7) - 1); + const float id = d ? 1.0f/d : 0.0f; + + y[i].d = GGML_FP32_TO_FP16(d); + + vfloat32m4_t x0 = __riscv_vfmul_vf_f32m4(v_x, id, vl); + + // convert to integer + vint16m2_t vi = __riscv_vfncvt_x_f_w_i16m2(x0, vl); + vint8m1_t vs = __riscv_vncvt_x_x_w_i8m1(vi, vl); + + // store result + __riscv_vse8_v_i8m1(y[i].qs , vs, vl); + } + +#elif defined(__POWER9_VECTOR__) + for (int i = 0; i < nb; i++) { + vector float srcv [8]; + vector float asrcv[8]; + vector float amaxv[8]; + vector signed int vi[8]; + + for (int j = 0; j < 8; j++) srcv[j] = vec_xl(0, x + i*32 + 4*j); + for (int j = 0; j < 8; j++) asrcv[j] = vec_abs(srcv[j]); + + for (int j = 0; j < 4; j++) amaxv[2*j] = vec_max(asrcv[2*j], asrcv[2*j+1]); + for (int j = 0; j < 2; j++) amaxv[4*j] = vec_max(amaxv[4*j], amaxv[4*j+2]); + for (int j = 0; j < 1; j++) amaxv[8*j] = vec_max(amaxv[8*j], amaxv[8*j+4]); + + const float amax = MAX(MAX(vec_extract(amaxv[0], 0), + vec_extract(amaxv[0], 1)), + MAX(vec_extract(amaxv[0], 2), + vec_extract(amaxv[0], 3))); + + const float d = amax / ((1 << 7) - 1); + const float id = d ? 1.0f/d : 0.0f; + const vector float vid = vec_splats(id); + + y[i].d = GGML_FP32_TO_FP16(d); + + for (int j = 0; j < 8; j++) { + const vector float v = vec_round(vec_mul(srcv[j], vid)); + vi[j] = vec_cts(v, 0); + } + vec_xst(vec_pack(vec_pack(vi[0], vi[1]), vec_pack(vi[2], vi[3])), 0, &y[i].qs[0]); + vec_xst(vec_pack(vec_pack(vi[4], vi[5]), vec_pack(vi[6], vi[7])), 16, &y[i].qs[0]); + } + +#elif defined(__loongarch_asx) + for (int i = 0; i < nb; i++) { + ft_union fi; + __m256 v0 = (__m256)__lasx_xvld( x , 0); + __m256 v1 = (__m256)__lasx_xvld( x , 32); + __m256 v2 = (__m256)__lasx_xvld( x , 64); + __m256 v3 = (__m256)__lasx_xvld( x , 96); + x += 32; + + // Compute max(abs(e)) for the block + const __m256 sign_bit = __lasx_xvreplfr2vr_s( -0.0f ); + __m256 max_abs = (__m256)__lasx_xvandn_v( (__m256i)sign_bit, (__m256i)v0 ); + max_abs = __lasx_xvfmax_s( max_abs, (__m256)__lasx_xvandn_v( (__m256i)sign_bit, (__m256i)v1 ) ); + max_abs = __lasx_xvfmax_s( max_abs, (__m256)__lasx_xvandn_v( (__m256i)sign_bit, (__m256i)v2 ) ); + max_abs = __lasx_xvfmax_s( max_abs, (__m256)__lasx_xvandn_v( (__m256i)sign_bit, (__m256i)v3 ) ); + + __m128 max4 = __lsx_vfmax_s( lasx_extractf128( max_abs, 1 ), lasx_extractf128( max_abs , 0) ); + max4 = __lsx_vfmax_s( max4, (__m128)__lsx_vpickod_d((__m128i) max4, (__m128i)max4 ) ); + __m128 tmp = max4; + max4 = __lsx_vfmax_s( max4, (__m128)__lsx_vinsgr2vr_w(tmp, __lsx_vpickve2gr_w( max4, 1 ), 0 )); + fi.i = __lsx_vpickve2gr_w( (__m128i)max4, 0 ); + const float max_scalar = fi.f; + + // Quantize these floats + const float d = max_scalar / 127.f; + y[i].d = GGML_FP32_TO_FP16(d); + const float id = ( max_scalar != 0.0f ) ? 127.f / max_scalar : 0.0f; + const __m256 mul = (__m256)__lasx_xvreplfr2vr_s( id ); + + // Apply the multiplier + v0 = __lasx_xvfmul_s( v0, mul ); + v1 = __lasx_xvfmul_s( v1, mul ); + v2 = __lasx_xvfmul_s( v2, mul ); + v3 = __lasx_xvfmul_s( v3, mul ); + + // Round to nearest integer + __m256i i0 = __lasx_xvftintrne_w_s( v0 ); + __m256i i1 = __lasx_xvftintrne_w_s( v1 ); + __m256i i2 = __lasx_xvftintrne_w_s( v2 ); + __m256i i3 = __lasx_xvftintrne_w_s( v3 ); + + __m128i ni0 = lasx_extracti128( i0, 0 ); + __m128i ni1 = lasx_extracti128( i0, 1); + __m128i ni2 = lasx_extracti128( i1, 0); + __m128i ni3 = lasx_extracti128( i1, 1); + __m128i ni4 = lasx_extracti128( i2, 0); + __m128i ni5 = lasx_extracti128( i2, 1); + __m128i ni6 = lasx_extracti128( i3, 0); + __m128i ni7 = lasx_extracti128( i3, 1); + + // Convert int32 to int16 + ni0 = lsx_packs_w( ni0, ni1 ); + ni2 = lsx_packs_w( ni2, ni3 ); + ni4 = lsx_packs_w( ni4, ni5 ); + ni6 = lsx_packs_w( ni6, ni7 ); + // Convert int16 to int8 + ni0 = lsx_packs_h( ni0, ni2 ); + ni4 = lsx_packs_h( ni4, ni6 ); + + __lsx_vst(ni0, (__m128i *)(y[i].qs + 0), 0); + __lsx_vst(ni4, (__m128i *)(y[i].qs + 16), 0); + + } +#else + GGML_UNUSED(nb); + // scalar + quantize_row_q8_0_ref(x, y, k); +#endif +} + +// reference implementation for deterministic creation of model files +void quantize_row_q8_1_ref(const float * restrict x, block_q8_1 * restrict y, int64_t k) { + assert(QK8_1 == 32); + assert(k % QK8_1 == 0); + const int nb = k / QK8_1; + + for (int i = 0; i < nb; i++) { + float amax = 0.0f; // absolute max + + for (int j = 0; j < QK8_1; j++) { + const float v = x[i*QK8_1 + j]; + amax = MAX(amax, fabsf(v)); + } + + const float d = amax / ((1 << 7) - 1); + const float id = d ? 1.0f/d : 0.0f; + + y[i].d = GGML_FP32_TO_FP16(d); + + int sum = 0; + + for (int j = 0; j < QK8_1/2; ++j) { + const float v0 = x[i*QK8_1 + j]*id; + const float v1 = x[i*QK8_1 + QK8_1/2 + j]*id; + + y[i].qs[ j] = roundf(v0); + y[i].qs[QK8_1/2 + j] = roundf(v1); + + sum += y[i].qs[ j]; + sum += y[i].qs[QK8_1/2 + j]; + } + + y[i].s = GGML_FP32_TO_FP16(sum*d); + } +} + +void quantize_row_q8_1(const float * restrict x, void * restrict vy, int64_t k) { + assert(k % QK8_1 == 0); + const int nb = k / QK8_1; + + block_q8_1 * restrict y = vy; + +#if defined(__ARM_NEON) + for (int i = 0; i < nb; i++) { + float32x4_t srcv [8]; + float32x4_t asrcv[8]; + float32x4_t amaxv[8]; + + for (int j = 0; j < 8; j++) srcv[j] = vld1q_f32(x + i*32 + 4*j); + for (int j = 0; j < 8; j++) asrcv[j] = vabsq_f32(srcv[j]); + + for (int j = 0; j < 4; j++) amaxv[2*j] = vmaxq_f32(asrcv[2*j], asrcv[2*j+1]); + for (int j = 0; j < 2; j++) amaxv[4*j] = vmaxq_f32(amaxv[4*j], amaxv[4*j+2]); + for (int j = 0; j < 1; j++) amaxv[8*j] = vmaxq_f32(amaxv[8*j], amaxv[8*j+4]); + + const float amax = vmaxvq_f32(amaxv[0]); + + const float d = amax / ((1 << 7) - 1); + const float id = d ? 1.0f/d : 0.0f; + + y[i].d = GGML_FP32_TO_FP16(d); + + int32x4_t accv = vdupq_n_s32(0); + + for (int j = 0; j < 8; j++) { + const float32x4_t v = vmulq_n_f32(srcv[j], id); + const int32x4_t vi = vcvtnq_s32_f32(v); + + y[i].qs[4*j + 0] = vgetq_lane_s32(vi, 0); + y[i].qs[4*j + 1] = vgetq_lane_s32(vi, 1); + y[i].qs[4*j + 2] = vgetq_lane_s32(vi, 2); + y[i].qs[4*j + 3] = vgetq_lane_s32(vi, 3); + + accv = vaddq_s32(accv, vi); + } + + y[i].s = GGML_FP32_TO_FP16(d * vaddvq_s32(accv)); + } +#elif defined(__wasm_simd128__) + for (int i = 0; i < nb; i++) { + v128_t srcv [8]; + v128_t asrcv[8]; + v128_t amaxv[8]; + + for (int j = 0; j < 8; j++) srcv[j] = wasm_v128_load(x + i*32 + 4*j); + for (int j = 0; j < 8; j++) asrcv[j] = wasm_f32x4_abs(srcv[j]); + + for (int j = 0; j < 4; j++) amaxv[2*j] = wasm_f32x4_max(asrcv[2*j], asrcv[2*j+1]); + for (int j = 0; j < 2; j++) amaxv[4*j] = wasm_f32x4_max(amaxv[4*j], amaxv[4*j+2]); + for (int j = 0; j < 1; j++) amaxv[8*j] = wasm_f32x4_max(amaxv[8*j], amaxv[8*j+4]); + + const float amax = MAX(MAX(wasm_f32x4_extract_lane(amaxv[0], 0), + wasm_f32x4_extract_lane(amaxv[0], 1)), + MAX(wasm_f32x4_extract_lane(amaxv[0], 2), + wasm_f32x4_extract_lane(amaxv[0], 3))); + + const float d = amax / ((1 << 7) - 1); + const float id = d ? 1.0f/d : 0.0f; + + y[i].d = GGML_FP32_TO_FP16(d); + + v128_t accv = wasm_i32x4_splat(0); + + for (int j = 0; j < 8; j++) { + const v128_t v = wasm_f32x4_mul(srcv[j], wasm_f32x4_splat(id)); + const v128_t vi = wasm_i32x4_trunc_sat_f32x4(v); + + y[i].qs[4*j + 0] = wasm_i32x4_extract_lane(vi, 0); + y[i].qs[4*j + 1] = wasm_i32x4_extract_lane(vi, 1); + y[i].qs[4*j + 2] = wasm_i32x4_extract_lane(vi, 2); + y[i].qs[4*j + 3] = wasm_i32x4_extract_lane(vi, 3); + + accv = wasm_i32x4_add(accv, vi); + } + + y[i].s = GGML_FP32_TO_FP16( + d * (wasm_i32x4_extract_lane(accv, 0) + + wasm_i32x4_extract_lane(accv, 1) + + wasm_i32x4_extract_lane(accv, 2) + + wasm_i32x4_extract_lane(accv, 3))); + } +#elif defined(__AVX2__) || defined(__AVX__) + for (int i = 0; i < nb; i++) { + // Load elements into 4 AVX vectors + __m256 v0 = _mm256_loadu_ps( x ); + __m256 v1 = _mm256_loadu_ps( x + 8 ); + __m256 v2 = _mm256_loadu_ps( x + 16 ); + __m256 v3 = _mm256_loadu_ps( x + 24 ); + x += 32; + + // Compute max(abs(e)) for the block + const __m256 signBit = _mm256_set1_ps( -0.0f ); + __m256 maxAbs = _mm256_andnot_ps( signBit, v0 ); + maxAbs = _mm256_max_ps( maxAbs, _mm256_andnot_ps( signBit, v1 ) ); + maxAbs = _mm256_max_ps( maxAbs, _mm256_andnot_ps( signBit, v2 ) ); + maxAbs = _mm256_max_ps( maxAbs, _mm256_andnot_ps( signBit, v3 ) ); + + __m128 max4 = _mm_max_ps( _mm256_extractf128_ps( maxAbs, 1 ), _mm256_castps256_ps128( maxAbs ) ); + max4 = _mm_max_ps( max4, _mm_movehl_ps( max4, max4 ) ); + max4 = _mm_max_ss( max4, _mm_movehdup_ps( max4 ) ); + const float max_scalar = _mm_cvtss_f32( max4 ); + + // Quantize these floats + const float d = max_scalar / 127.f; + y[i].d = GGML_FP32_TO_FP16(d); + const float id = ( max_scalar != 0.0f ) ? 127.f / max_scalar : 0.0f; + const __m256 mul = _mm256_set1_ps( id ); + + // Apply the multiplier + v0 = _mm256_mul_ps( v0, mul ); + v1 = _mm256_mul_ps( v1, mul ); + v2 = _mm256_mul_ps( v2, mul ); + v3 = _mm256_mul_ps( v3, mul ); + + // Round to nearest integer + v0 = _mm256_round_ps( v0, _MM_ROUND_NEAREST ); + v1 = _mm256_round_ps( v1, _MM_ROUND_NEAREST ); + v2 = _mm256_round_ps( v2, _MM_ROUND_NEAREST ); + v3 = _mm256_round_ps( v3, _MM_ROUND_NEAREST ); + + // Convert floats to integers + __m256i i0 = _mm256_cvtps_epi32( v0 ); + __m256i i1 = _mm256_cvtps_epi32( v1 ); + __m256i i2 = _mm256_cvtps_epi32( v2 ); + __m256i i3 = _mm256_cvtps_epi32( v3 ); + +#if defined(__AVX2__) + // Compute the sum of the quants and set y[i].s + y[i].s = GGML_FP32_TO_FP16(d * hsum_i32_8(_mm256_add_epi32(_mm256_add_epi32(i0, i1), _mm256_add_epi32(i2, i3)))); + + // Convert int32 to int16 + i0 = _mm256_packs_epi32( i0, i1 ); // 0, 1, 2, 3, 8, 9, 10, 11, 4, 5, 6, 7, 12, 13, 14, 15 + i2 = _mm256_packs_epi32( i2, i3 ); // 16, 17, 18, 19, 24, 25, 26, 27, 20, 21, 22, 23, 28, 29, 30, 31 + // Convert int16 to int8 + i0 = _mm256_packs_epi16( i0, i2 ); // 0, 1, 2, 3, 8, 9, 10, 11, 16, 17, 18, 19, 24, 25, 26, 27, 4, 5, 6, 7, 12, 13, 14, 15, 20, 21, 22, 23, 28, 29, 30, 31 + + // We got our precious signed bytes, but the order is now wrong + // These AVX2 pack instructions process 16-byte pieces independently + // The following instruction is fixing the order + const __m256i perm = _mm256_setr_epi32( 0, 4, 1, 5, 2, 6, 3, 7 ); + i0 = _mm256_permutevar8x32_epi32( i0, perm ); + + _mm256_storeu_si256((__m256i *)y[i].qs, i0); +#else + // Since we don't have in AVX some necessary functions, + // we split the registers in half and call AVX2 analogs from SSE + __m128i ni0 = _mm256_castsi256_si128( i0 ); + __m128i ni1 = _mm256_extractf128_si256( i0, 1); + __m128i ni2 = _mm256_castsi256_si128( i1 ); + __m128i ni3 = _mm256_extractf128_si256( i1, 1); + __m128i ni4 = _mm256_castsi256_si128( i2 ); + __m128i ni5 = _mm256_extractf128_si256( i2, 1); + __m128i ni6 = _mm256_castsi256_si128( i3 ); + __m128i ni7 = _mm256_extractf128_si256( i3, 1); + + // Compute the sum of the quants and set y[i].s + const __m128i s0 = _mm_add_epi32(_mm_add_epi32(ni0, ni1), _mm_add_epi32(ni2, ni3)); + const __m128i s1 = _mm_add_epi32(_mm_add_epi32(ni4, ni5), _mm_add_epi32(ni6, ni7)); + y[i].s = GGML_FP32_TO_FP16(d * hsum_i32_4(_mm_add_epi32(s0, s1))); + + // Convert int32 to int16 + ni0 = _mm_packs_epi32( ni0, ni1 ); + ni2 = _mm_packs_epi32( ni2, ni3 ); + ni4 = _mm_packs_epi32( ni4, ni5 ); + ni6 = _mm_packs_epi32( ni6, ni7 ); + // Convert int16 to int8 + ni0 = _mm_packs_epi16( ni0, ni2 ); + ni4 = _mm_packs_epi16( ni4, ni6 ); + + _mm_storeu_si128((__m128i *)(y[i].qs + 0), ni0); + _mm_storeu_si128((__m128i *)(y[i].qs + 16), ni4); +#endif + } +#elif defined(__riscv_v_intrinsic) + + size_t vl = __riscv_vsetvl_e32m4(QK8_1); + + for (int i = 0; i < nb; i++) { + // load elements + vfloat32m4_t v_x = __riscv_vle32_v_f32m4(x+i*QK8_1, vl); + + vfloat32m4_t vfabs = __riscv_vfabs_v_f32m4(v_x, vl); + vfloat32m1_t tmp = __riscv_vfmv_v_f_f32m1(0.0, vl); + vfloat32m1_t vmax = __riscv_vfredmax_vs_f32m4_f32m1(vfabs, tmp, vl); + float amax = __riscv_vfmv_f_s_f32m1_f32(vmax); + + const float d = amax / ((1 << 7) - 1); + const float id = d ? 1.0f/d : 0.0f; + + y[i].d = GGML_FP32_TO_FP16(d); + + vfloat32m4_t x0 = __riscv_vfmul_vf_f32m4(v_x, id, vl); + + // convert to integer + vint16m2_t vi = __riscv_vfncvt_x_f_w_i16m2(x0, vl); + vint8m1_t vs = __riscv_vncvt_x_x_w_i8m1(vi, vl); + + // store result + __riscv_vse8_v_i8m1(y[i].qs , vs, vl); + + // compute sum for y[i].s + vint16m1_t tmp2 = __riscv_vmv_v_x_i16m1(0, vl); + vint16m1_t vwrs = __riscv_vwredsum_vs_i8m1_i16m1(vs, tmp2, vl); + + // set y[i].s + int sum = __riscv_vmv_x_s_i16m1_i16(vwrs); + y[i].s = GGML_FP32_TO_FP16(sum*d); + } + +#elif defined(__POWER9_VECTOR__) + for (int i = 0; i < nb; i++) { + vector float srcv [8]; + vector float asrcv[8]; + vector float amaxv[8]; + vector signed int vi[8]; + + for (int j = 0; j < 8; j++) srcv[j] = vec_xl(0, x + i*32 + 4*j); + for (int j = 0; j < 8; j++) asrcv[j] = vec_abs(srcv[j]); + + for (int j = 0; j < 4; j++) amaxv[2*j] = vec_max(asrcv[2*j], asrcv[2*j+1]); + for (int j = 0; j < 2; j++) amaxv[4*j] = vec_max(amaxv[4*j], amaxv[4*j+2]); + for (int j = 0; j < 1; j++) amaxv[8*j] = vec_max(amaxv[8*j], amaxv[8*j+4]); + + const float amax = MAX(MAX(vec_extract(amaxv[0], 0), + vec_extract(amaxv[0], 1)), + MAX(vec_extract(amaxv[0], 2), + vec_extract(amaxv[0], 3))); + + const float d = amax / ((1 << 7) - 1); + const float id = d ? 1.0f/d : 0.0f; + const vector float vid = vec_splats(id); + + y[i].d = GGML_FP32_TO_FP16(d); + + vector int accv = vec_splats(0); + + for (int j = 0; j < 8; j++) { + const vector float v = vec_round(vec_mul(srcv[j], vid)); + vi[j] = vec_cts(v, 0); + + accv = vec_add(accv, vi[j]); + } + vec_xst(vec_pack(vec_pack(vi[0], vi[1]), vec_pack(vi[2], vi[3])), 0, &y[i].qs[0]); + vec_xst(vec_pack(vec_pack(vi[4], vi[5]), vec_pack(vi[6], vi[7])), 16, &y[i].qs[0]); + + accv = vec_add(accv, vec_sld(accv, accv, 4)); + accv = vec_add(accv, vec_sld(accv, accv, 8)); + y[i].s = GGML_FP32_TO_FP16(d * vec_extract(accv, 0)); + } + +#elif defined(__loongarch_asx) + for (int i = 0; i < nb; i++) { + ft_union ft; + __m256 v0 = (__m256)__lasx_xvld( x , 0 ); + __m256 v1 = (__m256)__lasx_xvld( x , 32 ); + __m256 v2 = (__m256)__lasx_xvld( x , 64 ); + __m256 v3 = (__m256)__lasx_xvld( x , 96 ); + x += 32; + + // Compute max(abs(e)) for the block + const __m256 sign_bit = __lasx_xvreplfr2vr_s( -0.0f ); + __m256 max_abs = (__m256)__lasx_xvandn_v( (__m256i)sign_bit, (__m256i)v0 ); + max_abs = __lasx_xvfmax_s( max_abs, (__m256)__lasx_xvandn_v( (__m256i)sign_bit, (__m256i)v1 ) ); + max_abs = __lasx_xvfmax_s( max_abs, (__m256)__lasx_xvandn_v( (__m256i)sign_bit, (__m256i)v2 ) ); + max_abs = __lasx_xvfmax_s( max_abs, (__m256)__lasx_xvandn_v( (__m256i)sign_bit, (__m256i)v3 ) ); + + __m128 max4 = __lsx_vfmax_s( lasx_extractf128( max_abs, 1 ), lasx_extractf128( max_abs, 0) ); + max4 = __lsx_vfmax_s( max4, (__m128)__lsx_vpickod_d((__m128i) max4, (__m128i)max4 ) ); + __m128 tmp = max4; + max4 = __lsx_vfmax_s( max4, (__m128)__lsx_vextrins_w((__m128i)tmp, (__m128i)max4, 0x10 )); + ft.i = __lsx_vpickve2gr_w( (__m128i)max4, 0 ); + const float max_scalar = ft.f; + + // Quantize these floats + const float d = max_scalar / 127.f; + y[i].d = GGML_FP32_TO_FP16(d); + const float id = ( max_scalar != 0.0f ) ? 127.f / max_scalar : 0.0f; + const __m256 mul = __lasx_xvreplfr2vr_s( id ); + + // Apply the multiplier + v0 = __lasx_xvfmul_s( v0, mul ); + v1 = __lasx_xvfmul_s( v1, mul ); + v2 = __lasx_xvfmul_s( v2, mul ); + v3 = __lasx_xvfmul_s( v3, mul ); + + // Round to nearest integer + __m256i i0 = __lasx_xvftintrne_w_s( v0 ); + __m256i i1 = __lasx_xvftintrne_w_s( v1 ); + __m256i i2 = __lasx_xvftintrne_w_s( v2 ); + __m256i i3 = __lasx_xvftintrne_w_s( v3 ); + + __m128i ni0 = lasx_extracti128(i0, 0); + __m128i ni1 = lasx_extracti128( i0, 1); + __m128i ni2 = lasx_extracti128( i1, 0); + __m128i ni3 = lasx_extracti128( i1, 1); + __m128i ni4 = lasx_extracti128( i2, 0 ); + __m128i ni5 = lasx_extracti128( i2, 1); + __m128i ni6 = lasx_extracti128( i3, 0); + __m128i ni7 = lasx_extracti128( i3, 1); + + // Compute the sum of the quants and set y[i].s + const __m128i s0 = __lsx_vadd_w(__lsx_vadd_w(ni0, ni1), __lsx_vadd_w(ni2, ni3)); + const __m128i s1 = __lsx_vadd_w(__lsx_vadd_w(ni4, ni5), __lsx_vadd_w(ni6, ni7)); + y[i].s = GGML_FP32_TO_FP16(d * hsum_i32_4(__lsx_vadd_w(s0, s1))); + + // Convert int32 to int16 + ni0 = lsx_packs_w( ni0, ni1 ); + ni2 = lsx_packs_w( ni2, ni3 ); + ni4 = lsx_packs_w( ni4, ni5 ); + ni6 = lsx_packs_w( ni6, ni7 ); + // Convert int16 to int8 + ni0 = lsx_packs_h( ni0, ni2 ); + ni4 = lsx_packs_h( ni4, ni6 ); + + __lsx_vst(ni0, (__m128i *)(y[i].qs + 0), 0); + __lsx_vst(ni4, (__m128i *)(y[i].qs + 16), 0); + } +#else + GGML_UNUSED(nb); + // scalar + quantize_row_q8_1_ref(x, y, k); +#endif +} + +void dequantize_row_q4_0(const block_q4_0 * restrict x, float * restrict y, int64_t k) { + static const int qk = QK4_0; + + assert(k % qk == 0); + + const int nb = k / qk; + + for (int i = 0; i < nb; i++) { + const float d = GGML_FP16_TO_FP32(x[i].d); + + for (int j = 0; j < qk/2; ++j) { + const int x0 = (x[i].qs[j] & 0x0F) - 8; + const int x1 = (x[i].qs[j] >> 4) - 8; + + y[i*qk + j + 0 ] = x0*d; + y[i*qk + j + qk/2] = x1*d; + } + } +} + +void dequantize_row_q4_1(const block_q4_1 * restrict x, float * restrict y, int64_t k) { + static const int qk = QK4_1; + + assert(k % qk == 0); + + const int nb = k / qk; + + for (int i = 0; i < nb; i++) { + const float d = GGML_FP16_TO_FP32(x[i].d); + const float m = GGML_FP16_TO_FP32(x[i].m); + + for (int j = 0; j < qk/2; ++j) { + const int x0 = (x[i].qs[j] & 0x0F); + const int x1 = (x[i].qs[j] >> 4); + + y[i*qk + j + 0 ] = x0*d + m; + y[i*qk + j + qk/2] = x1*d + m; + } + } +} + +void dequantize_row_q5_0(const block_q5_0 * restrict x, float * restrict y, int64_t k) { + static const int qk = QK5_0; + + assert(k % qk == 0); + + const int nb = k / qk; + + for (int i = 0; i < nb; i++) { + const float d = GGML_FP16_TO_FP32(x[i].d); + + uint32_t qh; + memcpy(&qh, x[i].qh, sizeof(qh)); + + for (int j = 0; j < qk/2; ++j) { + const uint8_t xh_0 = ((qh >> (j + 0)) << 4) & 0x10; + const uint8_t xh_1 = ((qh >> (j + 12)) ) & 0x10; + + const int32_t x0 = ((x[i].qs[j] & 0x0F) | xh_0) - 16; + const int32_t x1 = ((x[i].qs[j] >> 4) | xh_1) - 16; + + y[i*qk + j + 0 ] = x0*d; + y[i*qk + j + qk/2] = x1*d; + } + } +} + +void dequantize_row_q5_1(const block_q5_1 * restrict x, float * restrict y, int64_t k) { + static const int qk = QK5_1; + + assert(k % qk == 0); + + const int nb = k / qk; + + for (int i = 0; i < nb; i++) { + const float d = GGML_FP16_TO_FP32(x[i].d); + const float m = GGML_FP16_TO_FP32(x[i].m); + + uint32_t qh; + memcpy(&qh, x[i].qh, sizeof(qh)); + + for (int j = 0; j < qk/2; ++j) { + const uint8_t xh_0 = ((qh >> (j + 0)) << 4) & 0x10; + const uint8_t xh_1 = ((qh >> (j + 12)) ) & 0x10; + + const int x0 = (x[i].qs[j] & 0x0F) | xh_0; + const int x1 = (x[i].qs[j] >> 4) | xh_1; + + y[i*qk + j + 0 ] = x0*d + m; + y[i*qk + j + qk/2] = x1*d + m; + } + } +} + +void dequantize_row_q8_0(const block_q8_0 * restrict x, float * restrict y, int64_t k) { + static const int qk = QK8_0; + + assert(k % qk == 0); + + const int nb = k / qk; + + for (int i = 0; i < nb; i++) { + const float d = GGML_FP16_TO_FP32(x[i].d); + + for (int j = 0; j < qk; ++j) { + y[i*qk + j] = x[i].qs[j]*d; + } + } +} + +// +// 2-6 bit quantization in super-blocks +// + +// +// ===================== Helper functions +// +static inline int nearest_int(float fval) { + assert(fabsf(fval) <= 4194303.f); + float val = fval + 12582912.f; + int i; memcpy(&i, &val, sizeof(int)); + return (i & 0x007fffff) - 0x00400000; +} + +static float make_qx_quants(int n, int nmax, const float * restrict x, int8_t * restrict L, int rmse_type, + const float * restrict qw) { + float max = 0; + float amax = 0; + for (int i = 0; i < n; ++i) { + float ax = fabsf(x[i]); + if (ax > amax) { amax = ax; max = x[i]; } + } + if (amax < GROUP_MAX_EPS) { // all zero + for (int i = 0; i < n; ++i) { + L[i] = 0; + } + return 0.f; + } + float iscale = -nmax / max; + if (rmse_type == 0) { + for (int i = 0; i < n; ++i) { + int l = nearest_int(iscale * x[i]); + L[i] = nmax + MAX(-nmax, MIN(nmax-1, l)); + } + return 1/iscale; + } + bool return_early = false; + if (rmse_type < 0) { + rmse_type = -rmse_type; + return_early = true; + } + float sumlx = 0; + float suml2 = 0; +#ifdef HAVE_BUGGY_APPLE_LINKER + // use 'volatile' to prevent unroll and work around a bug in Apple ld64 1015.7 + for (volatile int i = 0; i < n; ++i) { +#else + for (int i = 0; i < n; ++i) { +#endif + int l = nearest_int(iscale * x[i]); + l = MAX(-nmax, MIN(nmax-1, l)); + L[i] = l + nmax; + float w = qw ? qw[i] : rmse_type == 1 ? x[i] * x[i] : rmse_type == 2 ? 1 : rmse_type == 3 ? fabsf(x[i]) : sqrtf(fabsf(x[i])); + sumlx += w*x[i]*l; + suml2 += w*l*l; + } + float scale = suml2 ? sumlx/suml2 : 0.0f; + if (return_early) return suml2 > 0 ? 0.5f*(scale + 1/iscale) : 1/iscale; + float best = scale * sumlx; + for (int is = -9; is <= 9; ++is) { + if (is == 0) { + continue; + } + iscale = -(nmax + 0.1f*is) / max; + sumlx = suml2 = 0; + for (int i = 0; i < n; ++i) { + int l = nearest_int(iscale * x[i]); + l = MAX(-nmax, MIN(nmax-1, l)); + float w = qw ? qw[i] : rmse_type == 1 ? x[i] * x[i] : rmse_type == 2 ? 1 : rmse_type == 3 ? fabsf(x[i]) : sqrtf(fabsf(x[i])); + sumlx += w*x[i]*l; + suml2 += w*l*l; + } + if (suml2 > 0 && sumlx*sumlx > best*suml2) { + for (int i = 0; i < n; ++i) { + int l = nearest_int(iscale * x[i]); + L[i] = nmax + MAX(-nmax, MIN(nmax-1, l)); + } + scale = sumlx/suml2; best = scale*sumlx; + } + } + return scale; +} + +static float make_q3_quants(int n, int nmax, const float * restrict x, int8_t * restrict L, bool do_rmse) { + float max = 0; + float amax = 0; + for (int i = 0; i < n; ++i) { + float ax = fabsf(x[i]); + if (ax > amax) { amax = ax; max = x[i]; } + } + if (amax < GROUP_MAX_EPS) { // all zero + for (int i = 0; i < n; ++i) { L[i] = 0; } + return 0.f; + } + float iscale = -nmax / max; + if (do_rmse) { + float sumlx = 0; + float suml2 = 0; + for (int i = 0; i < n; ++i) { + int l = nearest_int(iscale * x[i]); + l = MAX(-nmax, MIN(nmax-1, l)); + L[i] = l; + float w = x[i]*x[i]; + sumlx += w*x[i]*l; + suml2 += w*l*l; + } + for (int itry = 0; itry < 5; ++itry) { + int n_changed = 0; + for (int i = 0; i < n; ++i) { + float w = x[i]*x[i]; + float slx = sumlx - w*x[i]*L[i]; + if (slx > 0) { + float sl2 = suml2 - w*L[i]*L[i]; + int new_l = nearest_int(x[i] * sl2 / slx); + new_l = MAX(-nmax, MIN(nmax-1, new_l)); + if (new_l != L[i]) { + slx += w*x[i]*new_l; + sl2 += w*new_l*new_l; + if (sl2 > 0 && slx*slx*suml2 > sumlx*sumlx*sl2) { + L[i] = new_l; sumlx = slx; suml2 = sl2; + ++n_changed; + } + } + } + } + if (!n_changed) { + break; + } + } + for (int i = 0; i < n; ++i) { + L[i] += nmax; + } + return sumlx / suml2; + } + for (int i = 0; i < n; ++i) { + int l = nearest_int(iscale * x[i]); + l = MAX(-nmax, MIN(nmax-1, l)); + L[i] = l + nmax; + } + return 1/iscale; +} + +static float make_qkx1_quants(int n, int nmax, const float * restrict x, uint8_t * restrict L, float * restrict the_min, + int ntry, float alpha) { + float min = x[0]; + float max = x[0]; + for (int i = 1; i < n; ++i) { + if (x[i] < min) min = x[i]; + if (x[i] > max) max = x[i]; + } + if (max == min) { + for (int i = 0; i < n; ++i) L[i] = 0; + *the_min = 0; + return 0.f; + } + if (min > 0) min = 0; + float iscale = nmax/(max - min); + float scale = 1/iscale; + for (int itry = 0; itry < ntry; ++itry) { + float sumlx = 0; int suml2 = 0; + bool did_change = false; + for (int i = 0; i < n; ++i) { + int l = nearest_int(iscale*(x[i] - min)); + l = MAX(0, MIN(nmax, l)); + if (l != L[i]) { + L[i] = l; + did_change = true; + } + sumlx += (x[i] - min)*l; + suml2 += l*l; + } + scale = sumlx/suml2; + float sum = 0; + for (int i = 0; i < n; ++i) { + sum += x[i] - scale*L[i]; + } + min = alpha*min + (1 - alpha)*sum/n; + if (min > 0) min = 0; + iscale = 1/scale; + if (!did_change) break; + } + *the_min = -min; + return scale; +} + +static float make_qkx2_quants(int n, int nmax, const float * restrict x, const float * restrict weights, + uint8_t * restrict L, float * restrict the_min, uint8_t * restrict Laux, + float rmin, float rdelta, int nstep, bool use_mad) { + float min = x[0]; + float max = x[0]; + float sum_w = weights[0]; + float sum_x = sum_w * x[0]; +#ifdef HAVE_BUGGY_APPLE_LINKER + // use 'volatile' to prevent unroll and work around a bug in Apple ld64 1015.7 + for (volatile int i = 1; i < n; ++i) { +#else + for (int i = 1; i < n; ++i) { +#endif + if (x[i] < min) min = x[i]; + if (x[i] > max) max = x[i]; + float w = weights[i]; + sum_w += w; + sum_x += w * x[i]; + } + if (min > 0) min = 0; + if (max == min) { + for (int i = 0; i < n; ++i) L[i] = 0; + *the_min = -min; + return 0.f; + } + float iscale = nmax/(max - min); + float scale = 1/iscale; + float best_mad = 0; + for (int i = 0; i < n; ++i) { + int l = nearest_int(iscale*(x[i] - min)); + L[i] = MAX(0, MIN(nmax, l)); + float diff = scale * L[i] + min - x[i]; + diff = use_mad ? fabsf(diff) : diff * diff; + float w = weights[i]; + best_mad += w * diff; + } + if (nstep < 1) { + *the_min = -min; + return scale; + } + for (int is = 0; is <= nstep; ++is) { + iscale = (rmin + rdelta*is + nmax)/(max - min); + float sum_l = 0, sum_l2 = 0, sum_xl = 0; + for (int i = 0; i < n; ++i) { + int l = nearest_int(iscale*(x[i] - min)); + l = MAX(0, MIN(nmax, l)); + Laux[i] = l; + float w = weights[i]; + sum_l += w*l; + sum_l2 += w*l*l; + sum_xl += w*l*x[i]; + } + float D = sum_w * sum_l2 - sum_l * sum_l; + if (D > 0) { + float this_scale = (sum_w * sum_xl - sum_x * sum_l)/D; + float this_min = (sum_l2 * sum_x - sum_l * sum_xl)/D; + if (this_min > 0) { + this_min = 0; + this_scale = sum_xl / sum_l2; + } + float mad = 0; + for (int i = 0; i < n; ++i) { + float diff = this_scale * Laux[i] + this_min - x[i]; + diff = use_mad ? fabsf(diff) : diff * diff; + float w = weights[i]; + mad += w * diff; + } + if (mad < best_mad) { + for (int i = 0; i < n; ++i) { + L[i] = Laux[i]; + } + best_mad = mad; + scale = this_scale; + min = this_min; + } + } + } + *the_min = -min; + return scale; +} + +static inline void get_scale_min_k4(int j, const uint8_t * restrict q, uint8_t * restrict d, uint8_t * restrict m) { + if (j < 4) { + *d = q[j] & 63; *m = q[j + 4] & 63; + } else { + *d = (q[j+4] & 0xF) | ((q[j-4] >> 6) << 4); + *m = (q[j+4] >> 4) | ((q[j-0] >> 6) << 4); + } +} + +//========================- 2-bit (de)-quantization + +void quantize_row_q2_K_ref(const float * restrict x, block_q2_K * restrict y, int64_t k) { + assert(k % QK_K == 0); + const int nb = k / QK_K; + + uint8_t L[QK_K]; + uint8_t Laux[16]; + float weights[16]; + float mins[QK_K/16]; + float scales[QK_K/16]; + + const float q4scale = 15.f; + + for (int i = 0; i < nb; i++) { + float max_scale = 0; // as we are deducting the min, scales are always positive + float max_min = 0; + for (int j = 0; j < QK_K/16; ++j) { + for (int l = 0; l < 16; ++l) weights[l] = fabsf(x[16*j + l]); + scales[j] = make_qkx2_quants(16, 3, x + 16*j, weights, L + 16*j, &mins[j], Laux, -0.5f, 0.1f, 15, true); + float scale = scales[j]; + if (scale > max_scale) { + max_scale = scale; + } + float min = mins[j]; + if (min > max_min) { + max_min = min; + } + } + + if (max_scale > 0) { + float iscale = q4scale/max_scale; + for (int j = 0; j < QK_K/16; ++j) { + int l = nearest_int(iscale*scales[j]); + y[i].scales[j] = l; + } + y[i].d = GGML_FP32_TO_FP16(max_scale/q4scale); + } else { + for (int j = 0; j < QK_K/16; ++j) y[i].scales[j] = 0; + y[i].d = GGML_FP32_TO_FP16(0.f); + } + if (max_min > 0) { + float iscale = q4scale/max_min; + for (int j = 0; j < QK_K/16; ++j) { + int l = nearest_int(iscale*mins[j]); + y[i].scales[j] |= (l << 4); + } + y[i].dmin = GGML_FP32_TO_FP16(max_min/q4scale); + } else { + y[i].dmin = GGML_FP32_TO_FP16(0.f); + } + for (int j = 0; j < QK_K/16; ++j) { + const float d = GGML_FP16_TO_FP32(y[i].d) * (y[i].scales[j] & 0xF); + if (!d) continue; + const float dm = GGML_FP16_TO_FP32(y[i].dmin) * (y[i].scales[j] >> 4); + for (int ii = 0; ii < 16; ++ii) { + int l = nearest_int((x[16*j + ii] + dm)/d); + l = MAX(0, MIN(3, l)); + L[16*j + ii] = l; + } + } + + for (int j = 0; j < QK_K; j += 128) { + for (int l = 0; l < 32; ++l) { + y[i].qs[j/4 + l] = L[j + l] | (L[j + l + 32] << 2) | (L[j + l + 64] << 4) | (L[j + l + 96] << 6); + } + } + + x += QK_K; + } +} + +void dequantize_row_q2_K(const block_q2_K * restrict x, float * restrict y, int64_t k) { + assert(k % QK_K == 0); + const int nb = k / QK_K; + + for (int i = 0; i < nb; i++) { + + const float d = GGML_FP16_TO_FP32(x[i].d); + const float min = GGML_FP16_TO_FP32(x[i].dmin); + + const uint8_t * q = x[i].qs; + + int is = 0; + float dl, ml; + for (int n = 0; n < QK_K; n += 128) { + int shift = 0; + for (int j = 0; j < 4; ++j) { + + uint8_t sc = x[i].scales[is++]; + dl = d * (sc & 0xF); ml = min * (sc >> 4); + for (int l = 0; l < 16; ++l) *y++ = dl * ((int8_t)((q[l] >> shift) & 3)) - ml; + + sc = x[i].scales[is++]; + dl = d * (sc & 0xF); ml = min * (sc >> 4); + for (int l = 0; l < 16; ++l) *y++ = dl * ((int8_t)((q[l+16] >> shift) & 3)) - ml; + + shift += 2; + } + q += 32; + } + } +} + +void quantize_row_q2_K(const float * restrict x, void * restrict vy, int64_t k) { + quantize_row_q2_K_ref(x, vy, k); +} + +static float make_qkx3_quants(int n, int nmax, const float * restrict x, const float * restrict weights, + uint8_t * restrict L, float * restrict the_min, uint8_t * restrict Laux, + float rmin, float rdelta, int nstep, bool use_mad) { + float min = x[0]; + float max = x[0]; + float sum_w = weights ? weights[0] : x[0]*x[0]; + float sum_x = sum_w * x[0]; +#ifdef HAVE_BUGGY_APPLE_LINKER + // use 'volatile' to prevent unroll and work around a bug in Apple ld64 1015.7 + for (volatile int i = 1; i < n; ++i) { +#else + for (int i = 1; i < n; ++i) { +#endif + if (x[i] < min) min = x[i]; + if (x[i] > max) max = x[i]; + float w = weights ? weights[i] : x[i]*x[i]; + sum_w += w; + sum_x += w * x[i]; + } + if (min > 0) { + min = 0; + } + if (max <= min) { + memset(L, 0, n); + *the_min = -min; + return 0.f; + } + float iscale = nmax/(max - min); + float scale = 1/iscale; + float best_mad = 0; + for (int i = 0; i < n; ++i) { + int l = nearest_int(iscale*(x[i] - min)); + L[i] = MAX(0, MIN(nmax, l)); + float diff = scale * L[i] + min - x[i]; + diff = use_mad ? fabsf(diff) : diff*diff; + float w = weights ? weights[i] : x[i]*x[i]; + best_mad += w * diff; + } + if (nstep < 1) { + *the_min = -min; + return scale; + } + for (int is = 0; is <= nstep; ++is) { + iscale = (rmin + rdelta*is + nmax)/(max - min); + float sum_l = 0, sum_l2 = 0, sum_xl = 0; + for (int i = 0; i < n; ++i) { + int l = nearest_int(iscale*(x[i] - min)); + l = MAX(0, MIN(nmax, l)); + Laux[i] = l; + float w = weights ? weights[i] : x[i]*x[i]; + sum_l += w*l; + sum_l2 += w*l*l; + sum_xl += w*l*x[i]; + } + float D = sum_w * sum_l2 - sum_l * sum_l; + if (D > 0) { + float this_scale = (sum_w * sum_xl - sum_x * sum_l)/D; + float this_min = (sum_l2 * sum_x - sum_l * sum_xl)/D; + if (this_min > 0) { + this_min = 0; + this_scale = sum_xl / sum_l2; + } + float mad = 0; + for (int i = 0; i < n; ++i) { + float diff = this_scale * Laux[i] + this_min - x[i]; + diff = use_mad ? fabsf(diff) : diff*diff; + float w = weights ? weights[i] : x[i]*x[i]; + mad += w * diff; + } + if (mad < best_mad) { + for (int i = 0; i < n; ++i) { + L[i] = Laux[i]; + } + best_mad = mad; + scale = this_scale; + min = this_min; + } + } + } + *the_min = -min; + return scale; +} + +static float make_qp_quants(int n, int nmax, const float * restrict x, uint8_t * restrict L, const float * quant_weights) { + float max = 0; + for (int i = 0; i < n; ++i) { + max = MAX(max, x[i]); + } + if (!max) { // all zero + for (int i = 0; i < n; ++i) { L[i] = 0; } + return 0.f; + } + float iscale = nmax / max; + for (int i = 0; i < n; ++i) { + L[i] = nearest_int(iscale * x[i]); + } + float scale = 1/iscale; + float best_mse = 0; + for (int i = 0; i < n; ++i) { + float diff = x[i] - scale*L[i]; + float w = quant_weights[i]; + best_mse += w*diff*diff; + } + for (int is = -4; is <= 4; ++is) { + if (is == 0) continue; + float iscale_is = (0.1f*is + nmax)/max; + float scale_is = 1/iscale_is; + float mse = 0; + for (int i = 0; i < n; ++i) { + int l = nearest_int(iscale_is*x[i]); + l = MIN(nmax, l); + float diff = x[i] - scale_is*l; + float w = quant_weights[i]; + mse += w*diff*diff; + } + if (mse < best_mse) { + best_mse = mse; + iscale = iscale_is; + } + } + float sumlx = 0; + float suml2 = 0; + for (int i = 0; i < n; ++i) { + int l = nearest_int(iscale * x[i]); + l = MIN(nmax, l); + L[i] = l; + float w = quant_weights[i]; + sumlx += w*x[i]*l; + suml2 += w*l*l; + } + for (int itry = 0; itry < 5; ++itry) { + int n_changed = 0; + for (int i = 0; i < n; ++i) { + float w = quant_weights[i]; + float slx = sumlx - w*x[i]*L[i]; + float sl2 = suml2 - w*L[i]*L[i]; + if (slx > 0 && sl2 > 0) { + int new_l = nearest_int(x[i] * sl2 / slx); + new_l = MIN(nmax, new_l); + if (new_l != L[i]) { + slx += w*x[i]*new_l; + sl2 += w*new_l*new_l; + if (slx*slx*suml2 > sumlx*sumlx*sl2) { + L[i] = new_l; sumlx = slx; suml2 = sl2; + ++n_changed; + } + } + } + } + if (!n_changed) { + break; + } + } + return sumlx/suml2; +} + +static void quantize_row_q2_K_impl(const float * restrict x, block_q2_K * restrict y, int k, const float * restrict quant_weights) { + GGML_ASSERT(quant_weights); + assert(k % QK_K == 0); + const int nb = k / QK_K; + const bool requantize = true; + + uint8_t L[QK_K]; + uint8_t Laux[16]; + float mins[QK_K/16]; + float scales[QK_K/16]; + float sw[QK_K/16]; + float weight[16]; + uint8_t Ls[QK_K/16], Lm[QK_K/16]; + + for (int i = 0; i < nb; i++) { + memset(sw, 0, QK_K/16*sizeof(float)); + float sumx2 = 0; + for (int j = 0; j < QK_K; ++j) sumx2 += x[j]*x[j]; + float sigma2 = sumx2/QK_K; + for (int j = 0; j < QK_K/16; ++j) { + const float * restrict qw = quant_weights + QK_K * i + 16*j; + for (int l = 0; l < 16; ++l) weight[l] = qw[l] * sqrtf(sigma2 + x[16*j + l]*x[16*j + l]); + for (int l = 0; l < QK_K/16; ++l) sw[j] += weight[l]; + scales[j] = make_qkx3_quants(16, 3, x + 16*j, weight, L + 16*j, &mins[j], Laux, -0.9f, 0.05f, 36, false); + } + + float dm, mm; + dm = make_qp_quants(QK_K/16, 15, scales, Ls, sw); + mm = make_qp_quants(QK_K/16, 15, mins, Lm, sw); + + y[i].d = GGML_FP32_TO_FP16(dm); + y[i].dmin = GGML_FP32_TO_FP16(mm); + dm = GGML_FP16_TO_FP32(y[i].d); + mm = GGML_FP16_TO_FP32(y[i].dmin); + + for (int j = 0; j < QK_K/16; ++j) { + y[i].scales[j] = Ls[j] | (Lm[j] << 4); + } + + if (requantize) { + for (int j = 0; j < QK_K/16; ++j) { + const float d = dm * (y[i].scales[j] & 0xF); + if (!d) continue; + const float m = mm * (y[i].scales[j] >> 4); + for (int ii = 0; ii < 16; ++ii) { + int l = nearest_int((x[16*j + ii] + m)/d); + l = MAX(0, MIN(3, l)); + L[16*j + ii] = l; + } + } + } + + for (int j = 0; j < QK_K; j += 128) { + for (int l = 0; l < 32; ++l) { + y[i].qs[j/4 + l] = L[j + l] | (L[j + l + 32] << 2) | (L[j + l + 64] << 4) | (L[j + l + 96] << 6); + } + } + + x += QK_K; + } +} + +size_t quantize_q2_K(const float * restrict src, void * restrict dst, int64_t nrow, int64_t n_per_row, const float * quant_weights) { + size_t row_size = ggml_row_size(GGML_TYPE_Q2_K, n_per_row); + if (!quant_weights) { + quantize_row_q2_K_ref(src, dst, (int64_t)nrow*n_per_row); + } + else { + char * qrow = (char *)dst; + for (int64_t row = 0; row < nrow; ++row) { + quantize_row_q2_K_impl(src, (block_q2_K*)qrow, n_per_row, quant_weights); + src += n_per_row; + qrow += row_size; + } + } + return nrow * row_size; +} + +//========================= 3-bit (de)-quantization + +void quantize_row_q3_K_ref(const float * restrict x, block_q3_K * restrict y, int64_t k) { + assert(k % QK_K == 0); + const int nb = k / QK_K; + + int8_t L[QK_K]; + float scales[QK_K / 16]; + + for (int i = 0; i < nb; i++) { + + float max_scale = 0; + float amax = 0; + for (int j = 0; j < QK_K/16; ++j) { + scales[j] = make_q3_quants(16, 4, x + 16*j, L + 16*j, true); + float scale = fabsf(scales[j]); + if (scale > amax) { + amax = scale; max_scale = scales[j]; + } + } + + memset(y[i].scales, 0, 12); + if (max_scale) { + float iscale = -32.f/max_scale; + for (int j = 0; j < QK_K/16; ++j) { + int8_t l = nearest_int(iscale*scales[j]); + l = MAX(-32, MIN(31, l)) + 32; + if (j < 8) { + y[i].scales[j] = l & 0xF; + } else { + y[i].scales[j-8] |= ((l & 0xF) << 4); + } + l >>= 4; + y[i].scales[j%4 + 8] |= (l << (2*(j/4))); + } + y[i].d = GGML_FP32_TO_FP16(1/iscale); + } else { + y[i].d = GGML_FP32_TO_FP16(0.f); + } + + int8_t sc; + for (int j = 0; j < QK_K/16; ++j) { + sc = j < 8 ? y[i].scales[j] & 0xF : y[i].scales[j-8] >> 4; + sc = (sc | (((y[i].scales[8 + j%4] >> (2*(j/4))) & 3) << 4)) - 32; + float d = GGML_FP16_TO_FP32(y[i].d) * sc; + if (!d) { + continue; + } + for (int ii = 0; ii < 16; ++ii) { + int l = nearest_int(x[16*j + ii]/d); + l = MAX(-4, MIN(3, l)); + L[16*j + ii] = l + 4; + } + } + + memset(y[i].hmask, 0, QK_K/8); + // We put the high-bit for the 1st 8 quants into bit 0, the next 8 into bit 1, etc. + int m = 0; + uint8_t hm = 1; + for (int j = 0; j < QK_K; ++j) { + if (L[j] > 3) { + y[i].hmask[m] |= hm; + L[j] -= 4; + } + if (++m == QK_K/8) { + m = 0; hm <<= 1; + } + } + for (int j = 0; j < QK_K; j += 128) { + for (int l = 0; l < 32; ++l) { + y[i].qs[j/4 + l] = L[j + l] | (L[j + l + 32] << 2) | (L[j + l + 64] << 4) | (L[j + l + 96] << 6); + } + } + + x += QK_K; + } +} + +void dequantize_row_q3_K(const block_q3_K * restrict x, float * restrict y, int64_t k) { + assert(k % QK_K == 0); + const int nb = k / QK_K; + + const uint32_t kmask1 = 0x03030303; + const uint32_t kmask2 = 0x0f0f0f0f; + + uint32_t aux[4]; + const int8_t * scales = (const int8_t*)aux; + + for (int i = 0; i < nb; i++) { + + const float d_all = GGML_FP16_TO_FP32(x[i].d); + + const uint8_t * restrict q = x[i].qs; + const uint8_t * restrict hm = x[i].hmask; + uint8_t m = 1; + + memcpy(aux, x[i].scales, 12); + uint32_t tmp = aux[2]; + aux[2] = ((aux[0] >> 4) & kmask2) | (((tmp >> 4) & kmask1) << 4); + aux[3] = ((aux[1] >> 4) & kmask2) | (((tmp >> 6) & kmask1) << 4); + aux[0] = (aux[0] & kmask2) | (((tmp >> 0) & kmask1) << 4); + aux[1] = (aux[1] & kmask2) | (((tmp >> 2) & kmask1) << 4); + + int is = 0; + float dl; + for (int n = 0; n < QK_K; n += 128) { + int shift = 0; + for (int j = 0; j < 4; ++j) { + + dl = d_all * (scales[is++] - 32); + for (int l = 0; l < 16; ++l) { + *y++ = dl * ((int8_t)((q[l+ 0] >> shift) & 3) - ((hm[l+ 0] & m) ? 0 : 4)); + } + + dl = d_all * (scales[is++] - 32); + for (int l = 0; l < 16; ++l) { + *y++ = dl * ((int8_t)((q[l+16] >> shift) & 3) - ((hm[l+16] & m) ? 0 : 4)); + } + + shift += 2; + m <<= 1; + } + q += 32; + } + + } +} + +void quantize_row_q3_K(const float * restrict x, void * restrict vy, int64_t k) { + quantize_row_q3_K_ref(x, vy, k); +} + +static void quantize_row_q3_K_impl(const float * restrict x, block_q3_K * restrict y, int64_t n_per_row, const float * restrict quant_weights) { + assert(n_per_row % QK_K == 0); + const int nb = n_per_row / QK_K; + + int8_t L[QK_K]; + float scales[QK_K / 16]; + float weight[16]; + float sw[QK_K / 16]; + int8_t Ls[QK_K / 16]; + + for (int i = 0; i < nb; i++) { + + float sumx2 = 0; + for (int j = 0; j < QK_K; ++j) sumx2 += x[j]*x[j]; + float sigma2 = 2*sumx2/QK_K; + + for (int j = 0; j < QK_K/16; ++j) { + if (quant_weights) { + const float * qw = quant_weights + QK_K * i + 16*j; + for (int l = 0; l < 16; ++l) weight[l] = qw[l] * sqrtf(sigma2 + x[16*j+l]*x[16*j+l]); + } else { + for (int l = 0; l < 16; ++l) weight[l] = x[16*j+l]*x[16*j+l]; + } + float sumw = 0; + for (int l = 0; l < 16; ++l) sumw += weight[l]; + sw[j] = sumw; + + scales[j] = make_qx_quants(16, 4, x + 16*j, L + 16*j, 1, weight); + + } + + memset(y[i].scales, 0, 12); + + float d_block = make_qx_quants(QK_K/16, 32, scales, Ls, 1, sw); + for (int j = 0; j < QK_K/16; ++j) { + int l = Ls[j]; + if (j < 8) { + y[i].scales[j] = l & 0xF; + } else { + y[i].scales[j-8] |= ((l & 0xF) << 4); + } + l >>= 4; + y[i].scales[j%4 + 8] |= (l << (2*(j/4))); + } + y[i].d = GGML_FP32_TO_FP16(d_block); + + int8_t sc; + for (int j = 0; j < QK_K/16; ++j) { + sc = j < 8 ? y[i].scales[j] & 0xF : y[i].scales[j-8] >> 4; + sc = (sc | (((y[i].scales[8 + j%4] >> (2*(j/4))) & 3) << 4)) - 32; + float d = GGML_FP16_TO_FP32(y[i].d) * sc; + if (!d) { + continue; + } + for (int ii = 0; ii < 16; ++ii) { + int l = nearest_int(x[16*j + ii]/d); + l = MAX(-4, MIN(3, l)); + L[16*j + ii] = l + 4; + } + } + + memset(y[i].hmask, 0, QK_K/8); + // We put the high-bit for the 1st 8 quants into bit 0, the next 8 into bit 1, etc. + int m = 0; + uint8_t hm = 1; + for (int j = 0; j < QK_K; ++j) { + if (L[j] > 3) { + y[i].hmask[m] |= hm; + L[j] -= 4; + } + if (++m == QK_K/8) { + m = 0; hm <<= 1; + } + } + for (int j = 0; j < QK_K; j += 128) { + for (int l = 0; l < 32; ++l) { + y[i].qs[j/4 + l] = L[j + l] | (L[j + l + 32] << 2) | (L[j + l + 64] << 4) | (L[j + l + 96] << 6); + } + } + + x += QK_K; + } +} + +size_t quantize_q3_K(const float * restrict src, void * restrict dst, int64_t nrow, int64_t n_per_row, const float * quant_weights) { + size_t row_size = ggml_row_size(GGML_TYPE_Q3_K, n_per_row); + if (!quant_weights) { + quantize_row_q3_K_ref(src, dst, (int64_t)nrow*n_per_row); + } + else { + char * qrow = (char *)dst; + for (int64_t row = 0; row < nrow; ++row) { + quantize_row_q3_K_impl(src, (block_q3_K*)qrow, n_per_row, quant_weights); + src += n_per_row; + qrow += row_size; + } + } + return nrow * row_size; +} + +// ====================== 4-bit (de)-quantization + +void quantize_row_q4_K_ref(const float * restrict x, block_q4_K * restrict y, int64_t k) { + assert(k % QK_K == 0); + const int nb = k / QK_K; + + uint8_t L[QK_K]; + uint8_t Laux[32]; + float weights[32]; + float mins[QK_K/32]; + float scales[QK_K/32]; + + for (int i = 0; i < nb; i++) { + float max_scale = 0; // as we are deducting the min, scales are always positive + float max_min = 0; + for (int j = 0; j < QK_K/32; ++j) { + //scales[j] = make_qkx1_quants(32, 15, x + 32*j, L + 32*j, &mins[j], 9, 0.5f); + float sum_x2 = 0; + for (int l = 0; l < 32; ++l) sum_x2 += x[32*j + l] * x[32*j + l]; + float av_x = sqrtf(sum_x2/32); + for (int l = 0; l < 32; ++l) weights[l] = av_x + fabsf(x[32*j + l]); + scales[j] = make_qkx2_quants(32, 15, x + 32*j, weights, L + 32*j, &mins[j], Laux, -1.f, 0.1f, 20, false); + float scale = scales[j]; + if (scale > max_scale) { + max_scale = scale; + } + float min = mins[j]; + if (min > max_min) { + max_min = min; + } + } + + float inv_scale = max_scale > 0 ? 63.f/max_scale : 0.f; + float inv_min = max_min > 0 ? 63.f/max_min : 0.f; + for (int j = 0; j < QK_K/32; ++j) { + uint8_t ls = nearest_int(inv_scale*scales[j]); + uint8_t lm = nearest_int(inv_min*mins[j]); + ls = MIN(63, ls); + lm = MIN(63, lm); + if (j < 4) { + y[i].scales[j] = ls; + y[i].scales[j+4] = lm; + } else { + y[i].scales[j+4] = (ls & 0xF) | ((lm & 0xF) << 4); + y[i].scales[j-4] |= ((ls >> 4) << 6); + y[i].scales[j-0] |= ((lm >> 4) << 6); + } + } + y[i].d = GGML_FP32_TO_FP16(max_scale/63.f); + y[i].dmin = GGML_FP32_TO_FP16(max_min/63.f); + + uint8_t sc, m; + for (int j = 0; j < QK_K/32; ++j) { + get_scale_min_k4(j, y[i].scales, &sc, &m); + const float d = GGML_FP16_TO_FP32(y[i].d) * sc; + if (!d) continue; + const float dm = GGML_FP16_TO_FP32(y[i].dmin) * m; + for (int ii = 0; ii < 32; ++ii) { + int l = nearest_int((x[32*j + ii] + dm)/d); + l = MAX(0, MIN(15, l)); + L[32*j + ii] = l; + } + } + + uint8_t * q = y[i].qs; + for (int j = 0; j < QK_K; j += 64) { + for (int l = 0; l < 32; ++l) q[l] = L[j + l] | (L[j + l + 32] << 4); + q += 32; + } + + x += QK_K; + } +} + +void dequantize_row_q4_K(const block_q4_K * restrict x, float * restrict y, int64_t k) { + assert(k % QK_K == 0); + const int nb = k / QK_K; + + for (int i = 0; i < nb; i++) { + const uint8_t * q = x[i].qs; + + const float d = GGML_FP16_TO_FP32(x[i].d); + const float min = GGML_FP16_TO_FP32(x[i].dmin); + + int is = 0; + uint8_t sc, m; + for (int j = 0; j < QK_K; j += 64) { + get_scale_min_k4(is + 0, x[i].scales, &sc, &m); + const float d1 = d * sc; const float m1 = min * m; + get_scale_min_k4(is + 1, x[i].scales, &sc, &m); + const float d2 = d * sc; const float m2 = min * m; + for (int l = 0; l < 32; ++l) *y++ = d1 * (q[l] & 0xF) - m1; + for (int l = 0; l < 32; ++l) *y++ = d2 * (q[l] >> 4) - m2; + q += 32; is += 2; + } + } +} + +void quantize_row_q4_K(const float * restrict x, void * restrict vy, int64_t k) { + assert(k % QK_K == 0); + block_q4_K * restrict y = vy; + quantize_row_q4_K_ref(x, y, k); +} + +static void quantize_row_q4_K_impl(const float * restrict x, block_q4_K * restrict y, int64_t n_per_row, const float * quant_weights) { + assert(n_per_row % QK_K == 0); + const int64_t nb = n_per_row / QK_K; + + uint8_t L[QK_K]; + uint8_t Laux[32]; + uint8_t Ls[QK_K/32]; + uint8_t Lm[QK_K/32]; + float weights[32]; + float sw[QK_K/32]; + float mins[QK_K/32]; + float scales[QK_K/32]; + + for (int i = 0; i < nb; i++) { + + float sum_x2 = 0; + for (int l = 0; l < QK_K; ++l) sum_x2 += x[l] * x[l]; + float sigma2 = 2*sum_x2/QK_K; + float av_x = sqrtf(sigma2); + + for (int j = 0; j < QK_K/32; ++j) { + if (quant_weights) { + const float * qw = quant_weights + QK_K*i + 32*j; + for (int l = 0; l < 32; ++l) weights[l] = qw[l] * sqrtf(sigma2 + x[32*j + l]*x[32*j + l]); + } else { + for (int l = 0; l < 32; ++l) weights[l] = av_x + fabsf(x[32*j + l]); + } + float sumw = 0; + for (int l = 0; l < 32; ++l) sumw += weights[l]; + sw[j] = sumw; + scales[j] = make_qkx3_quants(32, 15, x + 32*j, weights, L + 32*j, &mins[j], Laux, -0.9f, 0.05f, 36, false); + } + + float d_block = make_qp_quants(QK_K/32, 63, scales, Ls, sw); + float m_block = make_qp_quants(QK_K/32, 63, mins, Lm, sw); + for (int j = 0; j < QK_K/32; ++j) { + uint8_t ls = Ls[j]; + uint8_t lm = Lm[j]; + if (j < 4) { + y[i].scales[j] = ls; + y[i].scales[j+4] = lm; + } else { + y[i].scales[j+4] = (ls & 0xF) | ((lm & 0xF) << 4); + y[i].scales[j-4] |= ((ls >> 4) << 6); + y[i].scales[j-0] |= ((lm >> 4) << 6); + } + } + y[i].d = GGML_FP32_TO_FP16(d_block); + y[i].dmin = GGML_FP32_TO_FP16(m_block); + + uint8_t sc, m; + for (int j = 0; j < QK_K/32; ++j) { + get_scale_min_k4(j, y[i].scales, &sc, &m); + const float d = GGML_FP16_TO_FP32(y[i].d) * sc; + if (!d) continue; + const float dm = GGML_FP16_TO_FP32(y[i].dmin) * m; + for (int ii = 0; ii < 32; ++ii) { + int l = nearest_int((x[32*j + ii] + dm)/d); + l = MAX(0, MIN(15, l)); + L[32*j + ii] = l; + } + } + uint8_t * q = y[i].qs; + for (int j = 0; j < QK_K; j += 64) { + for (int l = 0; l < 32; ++l) q[l] = L[j + l] | (L[j + l + 32] << 4); + q += 32; + } + + x += QK_K; + + } +} + +size_t quantize_q4_K(const float * restrict src, void * restrict dst, int64_t nrow, int64_t n_per_row, const float * quant_weights) { + size_t row_size = ggml_row_size(GGML_TYPE_Q4_K, n_per_row); + if (!quant_weights) { + quantize_row_q4_K_ref(src, dst, (int64_t)nrow*n_per_row); + } + else { + char * qrow = (char *)dst; + for (int64_t row = 0; row < nrow; ++row) { + quantize_row_q4_K_impl(src, (block_q4_K*)qrow, n_per_row, quant_weights); + src += n_per_row; + qrow += row_size; + } + } + return nrow * row_size; +} + +// ====================== 5-bit (de)-quantization + +void quantize_row_q5_K_ref(const float * restrict x, block_q5_K * restrict y, int64_t k) { + assert(k % QK_K == 0); + const int64_t nb = k / QK_K; + + uint8_t L[QK_K]; + float mins[QK_K/32]; + float scales[QK_K/32]; + float weights[32]; + uint8_t Laux[32]; + + for (int i = 0; i < nb; i++) { + float max_scale = 0; // as we are deducting the min, scales are always positive + float max_min = 0; + for (int j = 0; j < QK_K/32; ++j) { + //scales[j] = make_qkx1_quants(32, 31, x + 32*j, L + 32*j, &mins[j], 9, 0.5f); + float sum_x2 = 0; + for (int l = 0; l < 32; ++l) sum_x2 += x[32*j + l] * x[32*j + l]; + float av_x = sqrtf(sum_x2/32); + for (int l = 0; l < 32; ++l) weights[l] = av_x + fabsf(x[32*j + l]); + scales[j] = make_qkx2_quants(32, 31, x + 32*j, weights, L + 32*j, &mins[j], Laux, -0.5f, 0.1f, 15, false); + float scale = scales[j]; + if (scale > max_scale) { + max_scale = scale; + } + float min = mins[j]; + if (min > max_min) { + max_min = min; + } + } + + float inv_scale = max_scale > 0 ? 63.f/max_scale : 0.f; + float inv_min = max_min > 0 ? 63.f/max_min : 0.f; + for (int j = 0; j < QK_K/32; ++j) { + uint8_t ls = nearest_int(inv_scale*scales[j]); + uint8_t lm = nearest_int(inv_min*mins[j]); + ls = MIN(63, ls); + lm = MIN(63, lm); + if (j < 4) { + y[i].scales[j] = ls; + y[i].scales[j+4] = lm; + } else { + y[i].scales[j+4] = (ls & 0xF) | ((lm & 0xF) << 4); + y[i].scales[j-4] |= ((ls >> 4) << 6); + y[i].scales[j-0] |= ((lm >> 4) << 6); + } + } + y[i].d = GGML_FP32_TO_FP16(max_scale/63.f); + y[i].dmin = GGML_FP32_TO_FP16(max_min/63.f); + + uint8_t sc, m; + for (int j = 0; j < QK_K/32; ++j) { + get_scale_min_k4(j, y[i].scales, &sc, &m); + const float d = GGML_FP16_TO_FP32(y[i].d) * sc; + if (!d) continue; + const float dm = GGML_FP16_TO_FP32(y[i].dmin) * m; + for (int ii = 0; ii < 32; ++ii) { + int l = nearest_int((x[32*j + ii] + dm)/d); + l = MAX(0, MIN(31, l)); + L[32*j + ii] = l; + } + } + + uint8_t * restrict qh = y[i].qh; + uint8_t * restrict ql = y[i].qs; + memset(qh, 0, QK_K/8); + + uint8_t m1 = 1, m2 = 2; + for (int n = 0; n < QK_K; n += 64) { + for (int j = 0; j < 32; ++j) { + int l1 = L[n + j]; + if (l1 > 15) { + l1 -= 16; qh[j] |= m1; + } + int l2 = L[n + j + 32]; + if (l2 > 15) { + l2 -= 16; qh[j] |= m2; + } + ql[j] = l1 | (l2 << 4); + } + m1 <<= 2; m2 <<= 2; + ql += 32; + } + + x += QK_K; + } +} + +void dequantize_row_q5_K(const block_q5_K * restrict x, float * restrict y, int64_t k) { + assert(k % QK_K == 0); + const int64_t nb = k / QK_K; + + for (int i = 0; i < nb; i++) { + const uint8_t * ql = x[i].qs; + const uint8_t * qh = x[i].qh; + + const float d = GGML_FP16_TO_FP32(x[i].d); + const float min = GGML_FP16_TO_FP32(x[i].dmin); + + int is = 0; + uint8_t sc, m; + uint8_t u1 = 1, u2 = 2; + for (int j = 0; j < QK_K; j += 64) { + get_scale_min_k4(is + 0, x[i].scales, &sc, &m); + const float d1 = d * sc; const float m1 = min * m; + get_scale_min_k4(is + 1, x[i].scales, &sc, &m); + const float d2 = d * sc; const float m2 = min * m; + for (int l = 0; l < 32; ++l) *y++ = d1 * ((ql[l] & 0xF) + (qh[l] & u1 ? 16 : 0)) - m1; + for (int l = 0; l < 32; ++l) *y++ = d2 * ((ql[l] >> 4) + (qh[l] & u2 ? 16 : 0)) - m2; + ql += 32; is += 2; + u1 <<= 2; u2 <<= 2; + } + } +} + +void quantize_row_q5_K(const float * restrict x, void * restrict vy, int64_t k) { + assert(k % QK_K == 0); + block_q5_K * restrict y = vy; + quantize_row_q5_K_ref(x, y, k); +} + +static void quantize_row_q5_K_impl(const float * restrict x, block_q5_K * restrict y, int64_t n_per_row, const float * quant_weights) { + assert(n_per_row % QK_K == 0); + const int64_t nb = n_per_row / QK_K; + + uint8_t L[QK_K]; + uint8_t Laux[32]; + uint8_t Ls[QK_K/32]; + uint8_t Lm[QK_K/32]; + float mins[QK_K/32]; + float scales[QK_K/32]; + float sw[QK_K/32]; + float weights[32]; + + for (int i = 0; i < nb; i++) { + + float sum_x2 = 0; + for (int l = 0; l < QK_K; ++l) sum_x2 += x[l] * x[l]; + float sigma2 = 2*sum_x2/QK_K; + float av_x = sqrtf(sigma2); + + for (int j = 0; j < QK_K/32; ++j) { + if (quant_weights) { + const float * qw = quant_weights + QK_K*i + 32*j; + for (int l = 0; l < 32; ++l) weights[l] = qw[l] * sqrtf(sigma2 + x[32*j + l]*x[32*j + l]); + } else { + for (int l = 0; l < 32; ++l) weights[l] = av_x + fabsf(x[32*j + l]); + } + float sumw = 0; + for (int l = 0; l < 32; ++l) sumw += weights[l]; + sw[j] = sumw; + + scales[j] = make_qkx3_quants(32, 31, x + 32*j, weights, L + 32*j, &mins[j], Laux, -0.9f, 0.05f, 36, false); + } + + float d_block = make_qp_quants(QK_K/32, 63, scales, Ls, sw); + float m_block = make_qp_quants(QK_K/32, 63, mins, Lm, sw); + + for (int j = 0; j < QK_K/32; ++j) { + uint8_t ls = Ls[j]; + uint8_t lm = Lm[j]; + ls = MIN(63, ls); + lm = MIN(63, lm); + if (j < 4) { + y[i].scales[j] = ls; + y[i].scales[j+4] = lm; + } else { + y[i].scales[j+4] = (ls & 0xF) | ((lm & 0xF) << 4); + y[i].scales[j-4] |= ((ls >> 4) << 6); + y[i].scales[j-0] |= ((lm >> 4) << 6); + } + } + y[i].d = GGML_FP32_TO_FP16(d_block); + y[i].dmin = GGML_FP32_TO_FP16(m_block); + + uint8_t sc, m; + for (int j = 0; j < QK_K/32; ++j) { + get_scale_min_k4(j, y[i].scales, &sc, &m); + const float d = GGML_FP16_TO_FP32(y[i].d) * sc; + if (!d) continue; + const float dm = GGML_FP16_TO_FP32(y[i].dmin) * m; + for (int ii = 0; ii < 32; ++ii) { + int l = nearest_int((x[32*j + ii] + dm)/d); + l = MAX(0, MIN(31, l)); + L[32*j + ii] = l; + } + } + + uint8_t * restrict qh = y[i].qh; + uint8_t * restrict ql = y[i].qs; + memset(qh, 0, QK_K/8); + + uint8_t m1 = 1, m2 = 2; + for (int n = 0; n < QK_K; n += 64) { + for (int j = 0; j < 32; ++j) { + int l1 = L[n + j]; + if (l1 > 15) { + l1 -= 16; qh[j] |= m1; + } + int l2 = L[n + j + 32]; + if (l2 > 15) { + l2 -= 16; qh[j] |= m2; + } + ql[j] = l1 | (l2 << 4); + } + m1 <<= 2; m2 <<= 2; + ql += 32; + } + + x += QK_K; + + } +} + +size_t quantize_q5_K(const float * restrict src, void * restrict dst, int64_t nrow, int64_t n_per_row, const float * quant_weights) { + size_t row_size = ggml_row_size(GGML_TYPE_Q5_K, n_per_row); + if (!quant_weights) { + quantize_row_q5_K_ref(src, dst, (int64_t)nrow*n_per_row); + } + else { + char * qrow = (char *)dst; + for (int64_t row = 0; row < nrow; ++row) { + quantize_row_q5_K_impl(src, (block_q5_K*)qrow, n_per_row, quant_weights); + src += n_per_row; + qrow += row_size; + } + } + return nrow * row_size; +} + +// ====================== 6-bit (de)-quantization + +void quantize_row_q6_K_ref(const float * restrict x, block_q6_K * restrict y, int64_t k) { + assert(k % QK_K == 0); + const int64_t nb = k / QK_K; + + int8_t L[QK_K]; + float scales[QK_K/16]; + + for (int i = 0; i < nb; i++) { + + float max_scale = 0; + float max_abs_scale = 0; + + for (int ib = 0; ib < QK_K/16; ++ib) { + + const float scale = make_qx_quants(16, 32, x + 16*ib, L + 16*ib, 1, NULL); + scales[ib] = scale; + + const float abs_scale = fabsf(scale); + if (abs_scale > max_abs_scale) { + max_abs_scale = abs_scale; + max_scale = scale; + } + + } + + if (max_abs_scale < GROUP_MAX_EPS) { + memset(&y[i], 0, sizeof(block_q6_K)); + y[i].d = GGML_FP32_TO_FP16(0.f); + x += QK_K; + continue; + } + + float iscale = -128.f/max_scale; + y[i].d = GGML_FP32_TO_FP16(1/iscale); + for (int ib = 0; ib < QK_K/16; ++ib) { + y[i].scales[ib] = MIN(127, nearest_int(iscale*scales[ib])); + } + + for (int j = 0; j < QK_K/16; ++j) { + float d = GGML_FP16_TO_FP32(y[i].d) * y[i].scales[j]; + if (!d) { + continue; + } + for (int ii = 0; ii < 16; ++ii) { + int l = nearest_int(x[16*j + ii]/d); + l = MAX(-32, MIN(31, l)); + L[16*j + ii] = l + 32; + } + } + + uint8_t * restrict ql = y[i].ql; + uint8_t * restrict qh = y[i].qh; + for (int j = 0; j < QK_K; j += 128) { + for (int l = 0; l < 32; ++l) { + const uint8_t q1 = L[j + l + 0] & 0xF; + const uint8_t q2 = L[j + l + 32] & 0xF; + const uint8_t q3 = L[j + l + 64] & 0xF; + const uint8_t q4 = L[j + l + 96] & 0xF; + ql[l+ 0] = q1 | (q3 << 4); + ql[l+32] = q2 | (q4 << 4); + qh[l] = (L[j + l] >> 4) | ((L[j + l + 32] >> 4) << 2) | ((L[j + l + 64] >> 4) << 4) | ((L[j + l + 96] >> 4) << 6); + } + ql += 64; + qh += 32; + } + + x += QK_K; + } +} + +void dequantize_row_q6_K(const block_q6_K * restrict x, float * restrict y, int64_t k) { + assert(k % QK_K == 0); + const int64_t nb = k / QK_K; + + for (int i = 0; i < nb; i++) { + const float d = GGML_FP16_TO_FP32(x[i].d); + + const uint8_t * restrict ql = x[i].ql; + const uint8_t * restrict qh = x[i].qh; + const int8_t * restrict sc = x[i].scales; + + for (int n = 0; n < QK_K; n += 128) { + for (int l = 0; l < 32; ++l) { + int is = l/16; + const int8_t q1 = (int8_t)((ql[l + 0] & 0xF) | (((qh[l] >> 0) & 3) << 4)) - 32; + const int8_t q2 = (int8_t)((ql[l + 32] & 0xF) | (((qh[l] >> 2) & 3) << 4)) - 32; + const int8_t q3 = (int8_t)((ql[l + 0] >> 4) | (((qh[l] >> 4) & 3) << 4)) - 32; + const int8_t q4 = (int8_t)((ql[l + 32] >> 4) | (((qh[l] >> 6) & 3) << 4)) - 32; + y[l + 0] = d * sc[is + 0] * q1; + y[l + 32] = d * sc[is + 2] * q2; + y[l + 64] = d * sc[is + 4] * q3; + y[l + 96] = d * sc[is + 6] * q4; + } + y += 128; + ql += 64; + qh += 32; + sc += 8; + } + } +} + +void quantize_row_q6_K(const float * restrict x, void * restrict vy, int64_t k) { + assert(k % QK_K == 0); + block_q6_K * restrict y = vy; + quantize_row_q6_K_ref(x, y, k); +} + +static void quantize_row_q6_K_impl(const float * restrict x, block_q6_K * restrict y, int64_t n_per_row, const float * quant_weights) { + assert(n_per_row % QK_K == 0); + const int64_t nb = n_per_row / QK_K; + + int8_t L[QK_K]; + float scales[QK_K/16]; + //float weights[16]; + + for (int i = 0; i < nb; i++) { + + //float sum_x2 = 0; + //for (int j = 0; j < QK_K; ++j) sum_x2 += x[j]*x[j]; + //float sigma2 = sum_x2/QK_K; + + float max_scale = 0; + float max_abs_scale = 0; + + for (int ib = 0; ib < QK_K/16; ++ib) { + + float scale; + if (quant_weights) { + const float * qw = quant_weights + QK_K*i + 16*ib; + //for (int j = 0; j < 16; ++j) weights[j] = qw[j] * sqrtf(sigma2 + x[16*ib + j]*x[16*ib + j]); + //scale = make_qx_quants(16, 32, x + 16*ib, L + 16*ib, 1, weights); + scale = make_qx_quants(16, 32, x + 16*ib, L + 16*ib, 1, qw); + } else { + scale = make_qx_quants(16, 32, x + 16*ib, L + 16*ib, 1, NULL); + } + scales[ib] = scale; + + const float abs_scale = fabsf(scale); + if (abs_scale > max_abs_scale) { + max_abs_scale = abs_scale; + max_scale = scale; + } + + } + + if (max_abs_scale < GROUP_MAX_EPS) { + memset(&y[i], 0, sizeof(block_q6_K)); + y[i].d = GGML_FP32_TO_FP16(0.f); + x += QK_K; + continue; + } + + float iscale = -128.f/max_scale; + y[i].d = GGML_FP32_TO_FP16(1/iscale); + for (int ib = 0; ib < QK_K/16; ++ib) { + y[i].scales[ib] = MIN(127, nearest_int(iscale*scales[ib])); + } + + for (int j = 0; j < QK_K/16; ++j) { + float d = GGML_FP16_TO_FP32(y[i].d) * y[i].scales[j]; + if (!d) { + continue; + } + for (int ii = 0; ii < 16; ++ii) { + int l = nearest_int(x[16*j + ii]/d); + l = MAX(-32, MIN(31, l)); + L[16*j + ii] = l + 32; + } + } + + uint8_t * restrict ql = y[i].ql; + uint8_t * restrict qh = y[i].qh; + for (int j = 0; j < QK_K; j += 128) { + for (int l = 0; l < 32; ++l) { + const uint8_t q1 = L[j + l + 0] & 0xF; + const uint8_t q2 = L[j + l + 32] & 0xF; + const uint8_t q3 = L[j + l + 64] & 0xF; + const uint8_t q4 = L[j + l + 96] & 0xF; + ql[l+ 0] = q1 | (q3 << 4); + ql[l+32] = q2 | (q4 << 4); + qh[l] = (L[j + l] >> 4) | ((L[j + l + 32] >> 4) << 2) | ((L[j + l + 64] >> 4) << 4) | ((L[j + l + 96] >> 4) << 6); + } + ql += 64; + qh += 32; + } + + x += QK_K; + + } +} + +size_t quantize_q6_K(const float * restrict src, void * restrict dst, int64_t nrow, int64_t n_per_row, const float * quant_weights) { + size_t row_size = ggml_row_size(GGML_TYPE_Q6_K, n_per_row); + if (!quant_weights) { + quantize_row_q6_K_ref(src, dst, (int64_t)nrow*n_per_row); + } + else { + char * qrow = (char *)dst; + for (int64_t row = 0; row < nrow; ++row) { + quantize_row_q6_K_impl(src, (block_q6_K*)qrow, n_per_row, quant_weights); + src += n_per_row; + qrow += row_size; + } + } + return nrow * row_size; +} + +static void quantize_row_q4_0_impl(const float * restrict x, block_q4_0 * restrict y, int64_t n_per_row, const float * quant_weights) { + static_assert(QK4_0 == 32, "QK4_0 must be 32"); + + if (!quant_weights) { + quantize_row_q4_0_ref(x, y, n_per_row); + return; + } + + float weight[QK4_0]; + int8_t L[QK4_0]; + + float sum_x2 = 0; + for (int j = 0; j < n_per_row; ++j) sum_x2 += x[j]*x[j]; + float sigma2 = sum_x2/n_per_row; + + const int64_t nb = n_per_row/QK4_0; + for (int ib = 0; ib < nb; ++ib) { + const float * xb = x + QK4_0 * ib; + const float * qw = quant_weights + QK4_0 * ib; + for (int j = 0; j < QK4_0; ++j) weight[j] = qw[j] * sqrtf(sigma2 + xb[j]*xb[j]); + float d = make_qx_quants(QK4_0, 8, xb, L, 1, weight); + y[ib].d = GGML_FP32_TO_FP16(d); + for (int j = 0; j < 16; ++j) { + y[ib].qs[j] = L[j] | (L[j+16] << 4); + } + } +} + +size_t quantize_q4_0(const float * restrict src, void * restrict dst, int64_t nrow, int64_t n_per_row, const float * quant_weights) { + if (!quant_weights) { + quantize_row_q4_0_ref(src, dst, (int64_t)nrow*n_per_row); + return nrow * ggml_row_size(GGML_TYPE_Q4_0, n_per_row); + } + size_t row_size = ggml_row_size(GGML_TYPE_Q4_0, n_per_row); + char * qrow = (char *)dst; + for (int64_t row = 0; row < nrow; ++row) { + quantize_row_q4_0_impl(src, (block_q4_0*)qrow, n_per_row, quant_weights); + src += n_per_row; + qrow += row_size; + } + return nrow * row_size; +} + +static void quantize_row_q4_1_impl(const float * restrict x, block_q4_1 * restrict y, int64_t n_per_row, const float * quant_weights) { + static_assert(QK4_1 == 32, "QK4_1 must be 32"); + + if (!quant_weights) { + quantize_row_q4_1_ref(x, y, n_per_row); + return; + } + + float weight[QK4_1]; + uint8_t L[QK4_1], Laux[QK4_1]; + + float sum_x2 = 0; + for (int j = 0; j < n_per_row; ++j) sum_x2 += x[j]*x[j]; + float sigma2 = sum_x2/n_per_row; + + const int64_t nb = n_per_row/QK4_1; + for (int ib = 0; ib < nb; ++ib) { + const float * xb = x + QK4_1 * ib; + const float * qw = quant_weights + QK4_1 * ib; + for (int j = 0; j < QK4_1; ++j) weight[j] = qw[j] * sqrtf(sigma2 + xb[j]*xb[j]); + float min; + float d = make_qkx3_quants(QK4_1, 15, xb, weight, L, &min, Laux, -0.9f, 0.05f, 36, false); + y[ib].d = GGML_FP32_TO_FP16(d); + y[ib].m = GGML_FP32_TO_FP16(-min); + for (int j = 0; j < 16; ++j) { + y[ib].qs[j] = L[j] | (L[j+16] << 4); + } + } +} + +size_t quantize_q4_1(const float * restrict src, void * restrict dst, int64_t nrow, int64_t n_per_row, const float * quant_weights) { + if (!quant_weights) { + quantize_row_q4_1_ref(src, dst, (int64_t)nrow*n_per_row); + return nrow * ggml_row_size(GGML_TYPE_Q4_1, n_per_row); + } + size_t row_size = ggml_row_size(GGML_TYPE_Q4_1, n_per_row); + char * qrow = (char *)dst; + for (int64_t row = 0; row < nrow; ++row) { + quantize_row_q4_1_impl(src, (block_q4_1*)qrow, n_per_row, quant_weights); + src += n_per_row; + qrow += row_size; + } + return nrow * row_size; +} + +static void quantize_row_q5_0_impl(const float * restrict x, block_q5_0 * restrict y, int64_t n_per_row, const float * quant_weights) { + static_assert(QK5_0 == 32, "QK5_0 must be 32"); + + if (!quant_weights) { + quantize_row_q5_0_ref(x, y, n_per_row); + return; + } + + float weight[QK5_0]; + int8_t L[QK5_0]; + + float sum_x2 = 0; + for (int j = 0; j < n_per_row; ++j) sum_x2 += x[j]*x[j]; + float sigma2 = sum_x2/n_per_row; + + const int64_t nb = n_per_row/QK5_0; + for (int ib = 0; ib < nb; ++ib) { + const float * xb = x + QK5_0 * ib; + const float * qw = quant_weights + QK5_0 * ib; + for (int j = 0; j < QK5_0; ++j) weight[j] = qw[j] * sqrtf(sigma2 + xb[j]*xb[j]); + float d = make_qx_quants(QK5_0, 16, xb, L, 1, weight); + y[ib].d = GGML_FP32_TO_FP16(d); + + uint32_t qh = 0; + + for (int j = 0; j < 16; ++j) { + const uint8_t xi0 = L[j]; + const uint8_t xi1 = L[j+16]; + y[ib].qs[j] = (xi0 & 0x0F) | ((xi1 & 0x0F) << 4); + + // get the 5-th bit and store it in qh at the right position + qh |= ((xi0 & 0x10u) >> 4) << (j + 0); + qh |= ((xi1 & 0x10u) >> 4) << (j + QK5_0/2); + } + + memcpy(&y[ib].qh, &qh, sizeof(qh)); + } +} + +size_t quantize_q5_0(const float * restrict src, void * restrict dst, int64_t nrow, int64_t n_per_row, const float * quant_weights) { + if (!quant_weights) { + quantize_row_q5_0_ref(src, dst, (int64_t)nrow*n_per_row); + return nrow * ggml_row_size(GGML_TYPE_Q5_0, n_per_row); + } + size_t row_size = ggml_row_size(GGML_TYPE_Q5_0, n_per_row); + char * qrow = (char *)dst; + for (int64_t row = 0; row < nrow; ++row) { + quantize_row_q5_0_impl(src, (block_q5_0*)qrow, n_per_row, quant_weights); + src += n_per_row; + qrow += row_size; + } + return nrow * row_size; +} + +static void quantize_row_q5_1_impl(const float * restrict x, block_q5_1 * restrict y, int64_t n_per_row, const float * quant_weights) { + static_assert(QK5_1 == 32, "QK5_1 must be 32"); + + if (!quant_weights) { + quantize_row_q5_1_ref(x, y, n_per_row); + return; + } + + float weight[QK5_1]; + uint8_t L[QK5_1], Laux[QK5_1]; + + float sum_x2 = 0; + for (int j = 0; j < n_per_row; ++j) sum_x2 += x[j]*x[j]; + float sigma2 = sum_x2/n_per_row; + + const int64_t nb = n_per_row/QK5_1; + for (int ib = 0; ib < nb; ++ib) { + const float * xb = x + QK5_1 * ib; + const float * qw = quant_weights + QK5_1 * ib; + for (int j = 0; j < QK5_1; ++j) weight[j] = qw[j] * sqrtf(sigma2 + xb[j]*xb[j]); + float min; + float d = make_qkx3_quants(QK5_1, 31, xb, weight, L, &min, Laux, -0.9f, 0.05f, 36, false); + y[ib].d = GGML_FP32_TO_FP16(d); + y[ib].m = GGML_FP32_TO_FP16(-min); + + uint32_t qh = 0; + for (int j = 0; j < 16; ++j) { + const uint8_t xi0 = L[j]; + const uint8_t xi1 = L[j+16]; + y[ib].qs[j] = (xi0 & 0x0F) | ((xi1 & 0x0F) << 4); + // get the 5-th bit and store it in qh at the right position + qh |= ((xi0 & 0x10u) >> 4) << (j + 0); + qh |= ((xi1 & 0x10u) >> 4) << (j + QK5_0/2); + } + memcpy(&y[ib].qh, &qh, sizeof(qh)); + } +} + +size_t quantize_q5_1(const float * restrict src, void * restrict dst, int64_t nrow, int64_t n_per_row, const float * quant_weights) { + if (!quant_weights) { + quantize_row_q5_1_ref(src, dst, (int64_t)nrow*n_per_row); + return nrow * ggml_row_size(GGML_TYPE_Q5_1, n_per_row); + } + size_t row_size = ggml_row_size(GGML_TYPE_Q5_1, n_per_row); + char * qrow = (char *)dst; + for (int64_t row = 0; row < nrow; ++row) { + quantize_row_q5_1_impl(src, (block_q5_1*)qrow, n_per_row, quant_weights); + src += n_per_row; + qrow += row_size; + } + return nrow * row_size; +} + +size_t quantize_q8_0(const float * restrict src, void * restrict dst, int64_t nrow, int64_t n_per_row, const float * quant_weights) { + (void)quant_weights; // not used + const size_t row_size = ggml_row_size(GGML_TYPE_Q8_0, n_per_row); + quantize_row_q8_0_ref(src, dst, (int64_t)nrow*n_per_row); + return nrow * row_size; +} + +// ====================== Ternary (de)-quantization (BitNet b1.58 and TriLMs) + +void quantize_row_tq1_0_ref(const float * restrict x, block_tq1_0 * restrict y, int64_t k) { + assert(k % QK_K == 0); + const int64_t nb = k / QK_K; + + for (int64_t i = 0; i < nb; i++) { + float amax = 0.0f; // absolute max + + for (int j = 0; j < QK_K; j++) { + const float v = x[j]; + amax = MAX(amax, fabsf(v)); + } + + const float d = amax; + const float id = d ? 1.0f/d : 0.0f; + + y[i].d = GGML_FP32_TO_FP16(d); + + // 5 elements per byte, along 32 bytes + for (size_t j = 0; j < sizeof(y->qs) - sizeof(y->qs) % 32; j += 32) { + for (size_t m = 0; m < 32; ++m) { + uint8_t q = 0; + for (size_t n = 0; n < 5; ++n) { + int xi = lroundf(x[m + n*32] * id) + 1; // -1, 0, 1 -> 0, 1, 2 + q *= 3; + q += xi; + } + // ceiling division (243 == pow(3, 5)) + q = ((uint16_t)q * 256 + (243 - 1)) / 243; + y[i].qs[j + m] = q; + } + x += 5*32; + } + // along 16 bytes + for (size_t j = sizeof(y->qs) - sizeof(y->qs) % 32; j < sizeof(y->qs); j += 16) { + for (size_t m = 0; m < 16; ++m) { + uint8_t q = 0; + for (size_t n = 0; n < 5; ++n) { + int xi = lroundf(x[m + n*16] * id) + 1; // -1, 0, 1 -> 0, 1, 2 + q *= 3; + q += xi; + } + // ceiling division (243 == pow(3, 5)) + q = ((uint16_t)q * 256 + (243 - 1)) / 243; + y[i].qs[j + m] = q; + } + x += 5*16; + } + // 4 elements per byte + for (size_t j = 0; j < sizeof(y->qh); ++j) { + uint8_t q = 0; + for (size_t m = 0; m < 4; ++m) { + // -1, 0, 1 -> 0, 1, 2 + int xi = lroundf(x[j + m*sizeof(y->qh)] * id) + 1; + q *= 3; + q += xi; + } + // shift the first value to the most significant trit + q *= 3; + // ceiling division (243 == pow(3, 5)) + q = ((uint16_t)q * 256 + (243 - 1)) / 243; + y[i].qh[j] = q; + } + x += 4*sizeof(y->qh); + } +} + +void quantize_row_tq2_0_ref(const float * restrict x, block_tq2_0 * restrict y, int64_t k) { + assert(k % QK_K == 0); + const int64_t nb = k / QK_K; + + for (int64_t i = 0; i < nb; i++) { + float amax = 0.0f; // absolute max + + for (int j = 0; j < QK_K; j++) { + const float v = x[j]; + amax = MAX(amax, fabsf(v)); + } + + const float d = amax; + const float id = d ? 1.0f/d : 0.0f; + + y[i].d = GGML_FP32_TO_FP16(d); + + for (size_t j = 0; j < sizeof(y->qs); j += 32) { + for (size_t m = 0; m < 32; ++m) { + uint8_t q = 0; + for (size_t n = 0; n < 4; ++n) { + // -1, 0, 1 -> 0, 1, 2 + int xi = lroundf(x[m + n*32] * id) + 1; + q += (xi & 3) << (2*n); + } + y[i].qs[j + m] = q; + } + x += 4*32; + } + } +} + +void quantize_row_tq1_0(const float * restrict x, void * restrict vy, int64_t k) { + assert(k % QK_K == 0); + block_tq1_0 * restrict y = vy; + quantize_row_tq1_0_ref(x, y, k); +} + +void quantize_row_tq2_0(const float * restrict x, void * restrict vy, int64_t k) { + assert(k % QK_K == 0); + block_tq2_0 * restrict y = vy; + quantize_row_tq2_0_ref(x, y, k); +} + +size_t quantize_tq1_0(const float * restrict src, void * restrict dst, int64_t nrow, int64_t n_per_row, const float * quant_weights) { + (void)quant_weights; // not used + const size_t row_size = ggml_row_size(GGML_TYPE_TQ1_0, n_per_row); + quantize_row_tq1_0(src, dst, (int64_t)nrow*n_per_row); + return nrow * row_size; +} + +size_t quantize_tq2_0(const float * restrict src, void * restrict dst, int64_t nrow, int64_t n_per_row, const float * quant_weights) { + (void)quant_weights; // not used + const size_t row_size = ggml_row_size(GGML_TYPE_TQ2_0, n_per_row); + quantize_row_tq2_0(src, dst, (int64_t)nrow*n_per_row); + return nrow * row_size; +} + + +void dequantize_row_tq1_0(const block_tq1_0 * restrict x, float * restrict y, int64_t k) { + assert(k % QK_K == 0); + const int64_t nb = k / QK_K; + + const uint8_t pow3[6] = {1, 3, 9, 27, 81, 243}; + + for (int64_t i = 0; i < nb; ++i) { + + const float d = GGML_FP16_TO_FP32(x[i].d); + + for (size_t j = 0; j < sizeof(x->qs) - sizeof(x->qs) % 32; j += 32) { + for (size_t n = 0; n < 5; ++n) { + for (size_t m = 0; m < 32; ++m) { + uint8_t q = x[i].qs[j + m] * pow3[n]; + int16_t xi = ((uint16_t) q * 3) >> 8; + *y++ = (float) (xi - 1) * d; + } + } + } + for (size_t j = sizeof(x->qs) - sizeof(x->qs) % 32; j < sizeof(x->qs); j += 16) { + for (size_t n = 0; n < 5; ++n) { + for (size_t m = 0; m < 16; ++m) { + uint8_t q = x[i].qs[j + m] * pow3[n]; + int16_t xi = ((uint16_t) q * 3) >> 8; + *y++ = (float) (xi - 1) * d; + } + } + } + + for (size_t n = 0; n < 4; ++n) { + for (size_t j = 0; j < sizeof(x->qh); ++j) { + uint8_t q = x[i].qh[j] * pow3[n]; + int16_t xi = ((uint16_t) q * 3) >> 8; + *y++ = (float) (xi - 1) * d; + } + } + } +} + +void dequantize_row_tq2_0(const block_tq2_0 * restrict x, float * restrict y, int64_t k) { + assert(k % QK_K == 0); + const int64_t nb = k / QK_K; + + for (int64_t i = 0; i < nb; ++i) { + + const float d = GGML_FP16_TO_FP32(x[i].d); + + for (size_t j = 0; j < sizeof(x->qs); j += 32) { + for (size_t l = 0; l < 4; ++l) { + for (size_t m = 0; m < 32; ++m) { + int8_t q = (x[i].qs[j + m] >> (l*2)) & 3; + *y++ = (float) (q - 1) * d; + } + } + } + } +} + +// ====================== "True" 2-bit (de)-quantization + +void dequantize_row_iq2_xxs(const block_iq2_xxs * restrict x, float * restrict y, int64_t k) { + assert(k % QK_K == 0); + const int64_t nb = k / QK_K; + + uint32_t aux32[2]; + const uint8_t * aux8 = (const uint8_t *)aux32; + + for (int i = 0; i < nb; i++) { + + const float d = GGML_FP16_TO_FP32(x[i].d); + + for (int ib32 = 0; ib32 < QK_K/32; ++ib32) { + memcpy(aux32, x[i].qs + 4*ib32, 2*sizeof(uint32_t)); + const float db = d * (0.5f + (aux32[1] >> 28)) * 0.25f; + for (int l = 0; l < 4; ++l) { + const uint8_t * grid = (const uint8_t *)(iq2xxs_grid + aux8[l]); + const uint8_t signs = ksigns_iq2xs[(aux32[1] >> 7*l) & 127]; + for (int j = 0; j < 8; ++j) { + y[j] = db * grid[j] * (signs & kmask_iq2xs[j] ? -1.f : 1.f); + } + y += 8; + } + } + } +} + +// ====================== 2.3125 bpw (de)-quantization + +void dequantize_row_iq2_xs(const block_iq2_xs * restrict x, float * restrict y, int64_t k) { + assert(k % QK_K == 0); + const int64_t nb = k / QK_K; + + float db[2]; + + for (int i = 0; i < nb; i++) { + + const float d = GGML_FP16_TO_FP32(x[i].d); + + for (int ib32 = 0; ib32 < QK_K/32; ++ib32) { + db[0] = d * (0.5f + (x[i].scales[ib32] & 0xf)) * 0.25f; + db[1] = d * (0.5f + (x[i].scales[ib32] >> 4)) * 0.25f; + for (int l = 0; l < 4; ++l) { + const uint8_t * grid = (const uint8_t *)(iq2xs_grid + (x[i].qs[4*ib32 + l] & 511)); + const uint8_t signs = ksigns_iq2xs[x[i].qs[4*ib32 + l] >> 9]; + for (int j = 0; j < 8; ++j) { + y[j] = db[l/2] * grid[j] * (signs & kmask_iq2xs[j] ? -1.f : 1.f); + } + y += 8; + } + } + } +} + +// ====================== 2.5625 bpw (de)-quantization + +void dequantize_row_iq2_s(const block_iq2_s * restrict x, float * restrict y, int64_t k) { + assert(k % QK_K == 0); + const int64_t nb = k / QK_K; + + float db[2]; + + for (int i = 0; i < nb; i++) { + + const float d = GGML_FP16_TO_FP32(x[i].d); + const uint8_t * qs = x[i].qs; + const uint8_t * qh = x[i].qh; + const uint8_t * signs = qs + QK_K/8; + + for (int ib32 = 0; ib32 < QK_K/32; ++ib32) { + db[0] = d * (0.5f + (x[i].scales[ib32] & 0xf)) * 0.25f; + db[1] = d * (0.5f + (x[i].scales[ib32] >> 4)) * 0.25f; + for (int l = 0; l < 4; ++l) { + const float dl = db[l/2]; + const uint8_t * grid = (const uint8_t *)(iq2s_grid + (qs[l] | (qh[ib32] << (8-2*l) & 0x300))); + for (int j = 0; j < 8; ++j) { + y[j] = dl * grid[j] * (signs[l] & kmask_iq2xs[j] ? -1.f : 1.f); + } + y += 8; + } + qs += 4; + signs += 4; + } + } +} + +// ====================== 3.0625 bpw (de)-quantization + +void dequantize_row_iq3_xxs(const block_iq3_xxs * restrict x, float * restrict y, int64_t k) { + assert(k % QK_K == 0); + const int64_t nb = k / QK_K; + + uint32_t aux32; + + for (int i = 0; i < nb; i++) { + + const float d = GGML_FP16_TO_FP32(x[i].d); + const uint8_t * qs = x[i].qs; + const uint8_t * scales_and_signs = qs + QK_K/4; + + for (int ib32 = 0; ib32 < QK_K/32; ++ib32) { + memcpy(&aux32, scales_and_signs + 4*ib32, sizeof(uint32_t)); + const float db = d * (0.5f + (aux32 >> 28)) * 0.5f; + for (int l = 0; l < 4; ++l) { + const uint8_t signs = ksigns_iq2xs[(aux32 >> 7*l) & 127]; + const uint8_t * grid1 = (const uint8_t *)(iq3xxs_grid + qs[2*l+0]); + const uint8_t * grid2 = (const uint8_t *)(iq3xxs_grid + qs[2*l+1]); + for (int j = 0; j < 4; ++j) { + y[j+0] = db * grid1[j] * (signs & kmask_iq2xs[j+0] ? -1.f : 1.f); + y[j+4] = db * grid2[j] * (signs & kmask_iq2xs[j+4] ? -1.f : 1.f); + } + y += 8; + } + qs += 8; + } + } +} + +// ====================== 3.3125 bpw (de)-quantization + +void dequantize_row_iq3_s(const block_iq3_s * restrict x, float * restrict y, int64_t k) { + assert(k % QK_K == 0); + const int64_t nb = k / QK_K; + + for (int i = 0; i < nb; i++) { + + const float d = GGML_FP16_TO_FP32(x[i].d); + const uint8_t * qs = x[i].qs; + const uint8_t * qh = x[i].qh; + const uint8_t * signs = x[i].signs; + + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + const float db1 = d * (1 + 2*(x[i].scales[ib32/2] & 0xf)); + const float db2 = d * (1 + 2*(x[i].scales[ib32/2] >> 4)); + for (int l = 0; l < 4; ++l) { + const uint8_t * grid1 = (const uint8_t *)(iq3s_grid + (qs[2*l+0] | ((qh[0] << (8-2*l)) & 256))); + const uint8_t * grid2 = (const uint8_t *)(iq3s_grid + (qs[2*l+1] | ((qh[0] << (7-2*l)) & 256))); + for (int j = 0; j < 4; ++j) { + y[j+0] = db1 * grid1[j] * (signs[l] & kmask_iq2xs[j+0] ? -1.f : 1.f); + y[j+4] = db1 * grid2[j] * (signs[l] & kmask_iq2xs[j+4] ? -1.f : 1.f); + } + y += 8; + } + qs += 8; + signs += 4; + for (int l = 0; l < 4; ++l) { + const uint8_t * grid1 = (const uint8_t *)(iq3s_grid + (qs[2*l+0] | ((qh[1] << (8-2*l)) & 256))); + const uint8_t * grid2 = (const uint8_t *)(iq3s_grid + (qs[2*l+1] | ((qh[1] << (7-2*l)) & 256))); + for (int j = 0; j < 4; ++j) { + y[j+0] = db2 * grid1[j] * (signs[l] & kmask_iq2xs[j+0] ? -1.f : 1.f); + y[j+4] = db2 * grid2[j] * (signs[l] & kmask_iq2xs[j+4] ? -1.f : 1.f); + } + y += 8; + } + qh += 2; + qs += 8; + signs += 4; + } + } +} + +// ====================== 1.5625 bpw (de)-quantization + +void dequantize_row_iq1_s(const block_iq1_s * restrict x, float * restrict y, int64_t k) { + assert(k % QK_K == 0); + const int64_t nb = k / QK_K; + + for (int i = 0; i < nb; i++) { + + const float d = GGML_FP16_TO_FP32(x[i].d); + const uint8_t * qs = x[i].qs; + const uint16_t * qh = x[i].qh; + + for (int ib = 0; ib < QK_K/32; ++ib) { + const float dl = d * (2*((qh[ib] >> 12) & 7) + 1); + const float delta = qh[ib] & 0x8000 ? -IQ1S_DELTA : IQ1S_DELTA; + for (int l = 0; l < 4; ++l) { + const int8_t * grid = (const int8_t *)(iq1s_grid + (qs[l] | (((qh[ib] >> 3*l) & 7) << 8))); + for (int j = 0; j < 8; ++j) { + y[j] = dl * (grid[j] + delta); + } + y += 8; + } + qs += 4; + } + } +} + +void dequantize_row_iq1_m(const block_iq1_m * restrict x, float * restrict y, int64_t k) { + assert(k % QK_K == 0); + const int64_t nb = k / QK_K; + + float delta[4]; + uint16_t idx[4]; + + iq1m_scale_t scale; + + for (int i = 0; i < nb; i++) { + + const uint16_t * sc = (const uint16_t *)x[i].scales; + scale.u16 = (sc[0] >> 12) | ((sc[1] >> 8) & 0x00f0) | ((sc[2] >> 4) & 0x0f00) | (sc[3] & 0xf000); + const float d = GGML_FP16_TO_FP32(scale.f16); + + const uint8_t * qs = x[i].qs; + const uint8_t * qh = x[i].qh; + + for (int ib = 0; ib < QK_K/32; ++ib) { + const float dl1 = d * (2*((sc[ib/2] >> (6*(ib%2)+0)) & 0x7) + 1); + const float dl2 = d * (2*((sc[ib/2] >> (6*(ib%2)+3)) & 0x7) + 1); + + idx[0] = qs[0] | ((qh[0] << 8) & 0x700); + idx[1] = qs[1] | ((qh[0] << 4) & 0x700); + idx[2] = qs[2] | ((qh[1] << 8) & 0x700); + idx[3] = qs[3] | ((qh[1] << 4) & 0x700); + delta[0] = qh[0] & 0x08 ? -IQ1S_DELTA : IQ1S_DELTA; + delta[1] = qh[0] & 0x80 ? -IQ1S_DELTA : IQ1S_DELTA; + delta[2] = qh[1] & 0x08 ? -IQ1S_DELTA : IQ1S_DELTA; + delta[3] = qh[1] & 0x80 ? -IQ1S_DELTA : IQ1S_DELTA; + for (int l = 0; l < 2; ++l) { + const int8_t * grid = (const int8_t *)(iq1s_grid + idx[l]); + for (int j = 0; j < 8; ++j) { + y[j] = dl1 * (grid[j] + delta[l]); + } + y += 8; + } + for (int l = 2; l < 4; ++l) { + const int8_t * grid = (const int8_t *)(iq1s_grid + idx[l]); + for (int j = 0; j < 8; ++j) { + y[j] = dl2 * (grid[j] + delta[l]); + } + y += 8; + } + qs += 4; + qh += 2; + } + } +} + +static const int8_t kvalues_iq4nl[16] = {-127, -104, -83, -65, -49, -35, -22, -10, 1, 13, 25, 38, 53, 69, 89, 113}; + +void dequantize_row_iq4_nl(const block_iq4_nl * restrict x, float * restrict y, int64_t k) { + assert(k % QK4_NL == 0); + const int64_t nb = k / QK4_NL; + + for (int i = 0; i < nb; i++) { + + const uint8_t * qs = x[i].qs; + + const float d = GGML_FP16_TO_FP32(x[i].d); + for (int j = 0; j < QK4_NL/2; ++j) { + y[j+ 0] = d * kvalues_iq4nl[qs[j] & 0xf]; + y[j+QK4_NL/2] = d * kvalues_iq4nl[qs[j] >> 4]; + } + y += QK4_NL; + qs += QK4_NL/2; + } +} + +void dequantize_row_iq4_xs(const block_iq4_xs * restrict x, float * restrict y, int64_t k) { + assert(k % QK_K == 0); + const int64_t nb = k / QK_K; + + for (int i = 0; i < nb; i++) { + + const uint8_t * qs = x[i].qs; + + const float d = GGML_FP16_TO_FP32(x[i].d); + + for (int ib = 0; ib < QK_K/32; ++ib) { + const int ls = ((x[i].scales_l[ib/2] >> 4*(ib%2)) & 0xf) | (((x[i].scales_h >> 2*ib) & 3) << 4); + const float dl = d * (ls - 32); + for (int j = 0; j < 16; ++j) { + y[j+ 0] = dl * kvalues_iq4nl[qs[j] & 0xf]; + y[j+16] = dl * kvalues_iq4nl[qs[j] >> 4]; + } + y += 32; + qs += 16; + } + } +} + +//===================================== Q8_K ============================================== + +void quantize_row_q8_K_ref(const float * restrict x, block_q8_K * restrict y, int64_t k) { + assert(k % QK_K == 0); + const int64_t nb = k / QK_K; + + for (int i = 0; i < nb; i++) { + + float max = 0; + float amax = 0; + for (int j = 0; j < QK_K; ++j) { + float ax = fabsf(x[j]); + if (ax > amax) { + amax = ax; max = x[j]; + } + } + if (!amax) { + y[i].d = 0; + memset(y[i].qs, 0, QK_K); + x += QK_K; + continue; + } + //const float iscale = -128.f/max; + // We need this change for IQ2_XXS, else the AVX implementation becomes very awkward + const float iscale = -127.f/max; + for (int j = 0; j < QK_K; ++j) { + int v = nearest_int(iscale*x[j]); + y[i].qs[j] = MIN(127, v); + } + for (int j = 0; j < QK_K/16; ++j) { + int sum = 0; + for (int ii = 0; ii < 16; ++ii) { + sum += y[i].qs[j*16 + ii]; + } + y[i].bsums[j] = sum; + } + y[i].d = 1/iscale; + x += QK_K; + } +} + +void dequantize_row_q8_K(const block_q8_K * restrict x, float * restrict y, int64_t k) { + assert(k % QK_K == 0); + const int64_t nb = k / QK_K; + + for (int i = 0; i < nb; i++) { + for (int j = 0; j < QK_K; ++j) { + *y++ = x[i].d * x[i].qs[j]; + } + } +} + +void quantize_row_q8_K(const float * restrict x, void * restrict y, int64_t k) { + quantize_row_q8_K_ref(x, y, k); +} + +//===================================== Dot products ================================= + +// +// Helper functions +// +#if __AVX__ || __AVX2__ || __AVX512F__ + +// shuffles to pick the required scales in dot products +static inline __m256i get_scale_shuffle_q3k(int i) { + static const uint8_t k_shuffle[128] = { + 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, + 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, + 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, 10,11,10,11,10,11,10,11,10,11,10,11,10,11,10,11, + 12,13,12,13,12,13,12,13,12,13,12,13,12,13,12,13, 14,15,14,15,14,15,14,15,14,15,14,15,14,15,14,15, + }; + return _mm256_loadu_si256((const __m256i*)k_shuffle + i); +} +static inline __m256i get_scale_shuffle_k4(int i) { + static const uint8_t k_shuffle[256] = { + 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, + 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, + 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, + 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, + 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, + 10,11,10,11,10,11,10,11,10,11,10,11,10,11,10,11,10,11,10,11,10,11,10,11,10,11,10,11,10,11,10,11, + 12,13,12,13,12,13,12,13,12,13,12,13,12,13,12,13,12,13,12,13,12,13,12,13,12,13,12,13,12,13,12,13, + 14,15,14,15,14,15,14,15,14,15,14,15,14,15,14,15,14,15,14,15,14,15,14,15,14,15,14,15,14,15,14,15 + }; + return _mm256_loadu_si256((const __m256i*)k_shuffle + i); +} +static inline __m128i get_scale_shuffle(int i) { + static const uint8_t k_shuffle[128] = { + 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, + 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, + 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5, + 6, 6, 6, 6, 6, 6, 6, 6, 7, 7, 7, 7, 7, 7, 7, 7, + 8, 8, 8, 8, 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 9, 9, + 10,10,10,10,10,10,10,10, 11,11,11,11,11,11,11,11, + 12,12,12,12,12,12,12,12, 13,13,13,13,13,13,13,13, + 14,14,14,14,14,14,14,14, 15,15,15,15,15,15,15,15 + }; + return _mm_loadu_si128((const __m128i*)k_shuffle + i); +} +#elif defined(__loongarch_asx) +// shuffles to pick the required scales in dot products +static inline __m256i get_scale_shuffle_q3k(int i) { + static const uint8_t k_shuffle[128] = { + 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, + 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, + 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, 10,11,10,11,10,11,10,11,10,11,10,11,10,11,10,11, + 12,13,12,13,12,13,12,13,12,13,12,13,12,13,12,13, 14,15,14,15,14,15,14,15,14,15,14,15,14,15,14,15, + }; + return __lasx_xvld((const __m256i*)k_shuffle + i, 0); +} +static inline __m256i get_scale_shuffle_k4(int i) { + static const uint8_t k_shuffle[256] = { + 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, + 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, + 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, 4, 5, + 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, + 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, 8, 9, + 10,11,10,11,10,11,10,11,10,11,10,11,10,11,10,11,10,11,10,11,10,11,10,11,10,11,10,11,10,11,10,11, + 12,13,12,13,12,13,12,13,12,13,12,13,12,13,12,13,12,13,12,13,12,13,12,13,12,13,12,13,12,13,12,13, + 14,15,14,15,14,15,14,15,14,15,14,15,14,15,14,15,14,15,14,15,14,15,14,15,14,15,14,15,14,15,14,15 + }; + return __lasx_xvld((const __m256i*)k_shuffle + i, 0); +} +static inline __m128i get_scale_shuffle(int i) { + static const uint8_t k_shuffle[128] = { + 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, + 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, + 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5, + 6, 6, 6, 6, 6, 6, 6, 6, 7, 7, 7, 7, 7, 7, 7, 7, + 8, 8, 8, 8, 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 9, 9, + 10,10,10,10,10,10,10,10, 11,11,11,11,11,11,11,11, + 12,12,12,12,12,12,12,12, 13,13,13,13,13,13,13,13, + 14,14,14,14,14,14,14,14, 15,15,15,15,15,15,15,15 + }; + return __lsx_vld((const __m128i*)k_shuffle + i, 0); +} +#endif + +void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { + const int qk = QK8_0; + const int nb = n / qk; + + assert(n % qk == 0); +#if defined(__ARM_FEATURE_MATMUL_INT8) + assert((nrc == 2) || (nrc == 1)); +#else + assert(nrc == 1); +#endif + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + + const block_q4_0 * restrict x = vx; + const block_q8_0 * restrict y = vy; + +#if defined(__ARM_FEATURE_MATMUL_INT8) + if (nrc == 2) { + const block_q4_0 * restrict vx0 = vx; + const block_q4_0 * restrict vx1 = (const block_q4_0 *) ((const uint8_t*)vx + bx); + const block_q8_0 * restrict vy0 = vy; + const block_q8_0 * restrict vy1 = (const block_q8_0 *) ((const uint8_t*)vy + by); + + float32x4_t sumv0 = vdupq_n_f32(0.0f); + + for (int i = 0; i < nb; i++) { + const block_q4_0 * restrict b_x0 = &vx0[i]; + const block_q4_0 * restrict b_x1 = &vx1[i]; + const block_q8_0 * restrict b_y0 = &vy0[i]; + const block_q8_0 * restrict b_y1 = &vy1[i]; + + const uint8x16_t m4b = vdupq_n_u8(0x0F); + const int8x16_t s8b = vdupq_n_s8(0x8); + + const uint8x16_t v0_0 = vld1q_u8(b_x0->qs); + const uint8x16_t v0_1 = vld1q_u8(b_x1->qs); + + // 4-bit -> 8-bit + const int8x16_t v0_0l = vreinterpretq_s8_u8(vandq_u8 (v0_0, m4b)); + const int8x16_t v0_0h = vreinterpretq_s8_u8(vshrq_n_u8(v0_0, 4)); + const int8x16_t v0_1l = vreinterpretq_s8_u8(vandq_u8 (v0_1, m4b)); + const int8x16_t v0_1h = vreinterpretq_s8_u8(vshrq_n_u8(v0_1, 4)); + + // sub 8 + const int8x16_t x0_l = vsubq_s8(v0_0l, s8b); + const int8x16_t x0_h = vsubq_s8(v0_0h, s8b); + const int8x16_t x1_l = vsubq_s8(v0_1l, s8b); + const int8x16_t x1_h = vsubq_s8(v0_1h, s8b); + + // load y + const int8x16_t y0_l = vld1q_s8(b_y0->qs); + const int8x16_t y0_h = vld1q_s8(b_y0->qs + 16); + const int8x16_t y1_l = vld1q_s8(b_y1->qs); + const int8x16_t y1_h = vld1q_s8(b_y1->qs + 16); + + float32_t _scale[4] = { GGML_FP16_TO_FP32(b_x0->d)*GGML_FP16_TO_FP32(b_y0->d), + GGML_FP16_TO_FP32(b_x0->d)*GGML_FP16_TO_FP32(b_y1->d), + GGML_FP16_TO_FP32(b_x1->d)*GGML_FP16_TO_FP32(b_y0->d), + GGML_FP16_TO_FP32(b_x1->d)*GGML_FP16_TO_FP32(b_y1->d)}; + + float32x4_t scale = vld1q_f32(_scale); + + int8x16_t l0 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(x0_l), vreinterpretq_s64_s8(x1_l))); + int8x16_t l1 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(x0_l), vreinterpretq_s64_s8(x1_l))); + + int8x16_t l2 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(x0_h), vreinterpretq_s64_s8(x1_h))); + int8x16_t l3 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(x0_h), vreinterpretq_s64_s8(x1_h))); + + int8x16_t r0 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(y0_l), vreinterpretq_s64_s8(y1_l))); + int8x16_t r1 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(y0_l), vreinterpretq_s64_s8(y1_l))); + + int8x16_t r2 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(y0_h), vreinterpretq_s64_s8(y1_h))); + int8x16_t r3 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(y0_h), vreinterpretq_s64_s8(y1_h))); + + sumv0 = vmlaq_f32(sumv0,(vcvtq_f32_s32(vmmlaq_s32((vmmlaq_s32((vmmlaq_s32((vmmlaq_s32(vdupq_n_s32(0), l0, r0)), + l1, r1)), l2, r2)), l3, r3))), scale); + } + float32x4_t sumv1 = vextq_f32(sumv0, sumv0, 2); + float32x4_t sumv2 = vzip1q_f32(sumv0, sumv1); + + vst1_f32(s, vget_low_f32(sumv2)); + vst1_f32(s + bs, vget_high_f32(sumv2)); + return; + } +#endif + + int ib = 0; + float sumf = 0; + +#if defined(__ARM_FEATURE_SVE) + svfloat32_t sumv0 = svdup_n_f32(0.0f); + svfloat32_t sumv1 = svdup_n_f32(0.0f); + + const int vector_length = ggml_cpu_get_sve_cnt()*8; + + // VLA Implementation using switch case + switch (vector_length) { + case 128: + { + // predicate for activating higher lanes for 4 float32 elements + const svbool_t ph4 = svptrue_pat_b32(SV_VL4); + + for (; ib + 1 < nb; ib += 2) { + const block_q4_0 * restrict x0 = &x[ib + 0]; + const block_q4_0 * restrict x1 = &x[ib + 1]; + const block_q8_0 * restrict y0 = &y[ib + 0]; + const block_q8_0 * restrict y1 = &y[ib + 1]; + + // load x + const svuint8_t qx0r = svld1rq_u8(svptrue_b8(), x0->qs); + const svuint8_t qx1r = svld1rq_u8(svptrue_b8(), x1->qs); + + // 4-bit -> 8-bit + const svint8_t qx0l = svreinterpret_s8_u8(svand_n_u8_m(svptrue_b8(), qx0r, 0x0F)); + const svint8_t qx0h = svreinterpret_s8_u8(svlsr_n_u8_m(svptrue_b8(), qx0r, 0x04)); + const svint8_t qx1l = svreinterpret_s8_u8(svand_n_u8_m(svptrue_b8(), qx1r, 0x0F)); + const svint8_t qx1h = svreinterpret_s8_u8(svlsr_n_u8_m(svptrue_b8(), qx1r, 0x04)); + + // sub 8 + const svint8_t qx0ls = svsub_n_s8_x(svptrue_b8(), qx0h, 8); + const svint8_t qx0hs = svsub_n_s8_x(svptrue_b8(), qx0l, 8); + const svint8_t qx1ls = svsub_n_s8_x(svptrue_b8(), qx1h, 8); + const svint8_t qx1hs = svsub_n_s8_x(svptrue_b8(), qx1l, 8); + + // load y + const svint8_t qy0h = svld1_s8(svptrue_b8(), y0->qs); + const svint8_t qy0l = svld1_s8(svptrue_b8(), y0->qs + 16); + const svint8_t qy1h = svld1_s8(svptrue_b8(), y1->qs); + const svint8_t qy1l = svld1_s8(svptrue_b8(), y1->qs + 16); + + // dot product + sumv0 = svmla_n_f32_x(ph4, sumv0, svcvt_f32_s32_x(ph4, svadd_x(ph4, + svdot_s32(svdup_n_s32(0), qx0ls, qy0l), + svdot_s32(svdup_n_s32(0), qx0hs, qy0h))), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); + sumv1 = svmla_n_f32_x(ph4, sumv1, svcvt_f32_s32_x(ph4, svadd_x(ph4, + svdot_s32(svdup_n_s32(0), qx1ls, qy1l), + svdot_s32(svdup_n_s32(0), qx1hs, qy1h))), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); + } + + sumf = svaddv_f32(svptrue_b32(), svadd_f32_x(svptrue_b32(), sumv0, sumv1)); + } break; + case 256: + { + // predicate for activating higher lanes for 16 int8 elements + const svbool_t ph16 = svptrue_pat_b8(SV_VL16); + // predicate for activating lower lanes for 16 int8 elements + const svbool_t pl16 = svnot_b_z(svptrue_b8(), ph16); + + for (; ib + 1 < nb; ib += 2) { + const block_q4_0 * restrict x0 = &x[ib + 0]; + const block_q4_0 * restrict x1 = &x[ib + 1]; + const block_q8_0 * restrict y0 = &y[ib + 0]; + const block_q8_0 * restrict y1 = &y[ib + 1]; + + // load x + const svuint8_t qx0r = svld1rq_u8(svptrue_b8(), x0->qs); + const svuint8_t qx1r = svld1rq_u8(svptrue_b8(), x1->qs); + + // 4-bit -> 8-bit + const svint8_t qx0 = svreinterpret_s8_u8(svlsr_n_u8_m(pl16, svand_n_u8_m(ph16, qx0r, 0x0F), 0x04)); + const svint8_t qx1 = svreinterpret_s8_u8(svlsr_n_u8_m(pl16, svand_n_u8_m(ph16, qx1r, 0x0F), 0x04)); + + // sub 8 + const svint8_t qx0s = svsub_n_s8_x(svptrue_b8(), qx0, 8); + const svint8_t qx1s = svsub_n_s8_x(svptrue_b8(), qx1, 8); + + // load y + const svint8_t qy0 = svld1_s8(svptrue_b8(), y0->qs); + const svint8_t qy1 = svld1_s8(svptrue_b8(), y1->qs); + + // dot product + sumv0 = svmla_n_f32_x(svptrue_b32(), sumv0, svcvt_f32_s32_x(svptrue_b32(), + svdot_s32(svdup_n_s32(0), qx0s, qy0)), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); + sumv1 = svmla_n_f32_x(svptrue_b32(), sumv1, svcvt_f32_s32_x(svptrue_b32(), + svdot_s32(svdup_n_s32(0), qx1s, qy1)), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); + } + + sumf = svaddv_f32(svptrue_b32(), svadd_f32_x(svptrue_b32(), sumv0, sumv1)); + } break; + case 512: + { + // predicate for activating higher lanes for 32 int8 elements + const svbool_t ph32 = svptrue_pat_b8(SV_VL32); + + // predicate for activating higher lanes for 16 int8 elements + const svbool_t ph16 = svptrue_pat_b8(SV_VL16); + // predicate for activating lower lanes for 16 int8 elements from first 32 int8 activated lanes + const svbool_t pl16 = svnot_b_z(ph32, ph16); + + for (; ib + 1 < nb; ib += 2) { + const block_q4_0 * restrict x0 = &x[ib + 0]; + const block_q4_0 * restrict x1 = &x[ib + 1]; + const block_q8_0 * restrict y0 = &y[ib + 0]; + const block_q8_0 * restrict y1 = &y[ib + 1]; + + // load x + const svuint8_t qx0r = svld1rq_u8(ph32, x0->qs); + const svuint8_t qx1r = svld1rq_u8(ph32, x1->qs); + + // 4-bit -> 8-bit + const svint8_t qx0 = svreinterpret_s8_u8(svlsr_n_u8_m(pl16, svand_n_u8_m(ph16, qx0r, 0x0F), 0x04)); + const svint8_t qx1 = svreinterpret_s8_u8(svlsr_n_u8_m(pl16, svand_n_u8_m(ph16, qx1r, 0x0F), 0x04)); + + // sub 8 + const svint8_t qx0s = svsub_n_s8_x(ph32, qx0, 8); + const svint8_t qx1s = svsub_n_s8_x(ph32, qx1, 8); + + // load y + const svint8_t qy0 = svld1_s8(ph32, y0->qs); + const svint8_t qy1 = svld1_s8(ph32, y1->qs); + + // dot product + sumv0 = svmla_n_f32_x(ph32, sumv0, svcvt_f32_s32_x(ph32, + svdot_s32(svdup_n_s32(0), qx0s, qy0)), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); + sumv1 = svmla_n_f32_x(ph32, sumv1, svcvt_f32_s32_x(ph32, + svdot_s32(svdup_n_s32(0), qx1s, qy1)), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); + } + + sumf = svaddv_f32(ph32, svadd_f32_x(ph32, sumv0, sumv1)); + } break; + default: + assert(false && "Unsupported vector length"); + break; + } + +#elif defined(__ARM_NEON) + float32x4_t sumv0 = vdupq_n_f32(0.0f); + float32x4_t sumv1 = vdupq_n_f32(0.0f); + + for (; ib + 1 < nb; ib += 2) { + const block_q4_0 * restrict x0 = &x[ib + 0]; + const block_q4_0 * restrict x1 = &x[ib + 1]; + const block_q8_0 * restrict y0 = &y[ib + 0]; + const block_q8_0 * restrict y1 = &y[ib + 1]; + + const uint8x16_t m4b = vdupq_n_u8(0x0F); + const int8x16_t s8b = vdupq_n_s8(0x8); + + const uint8x16_t v0_0 = vld1q_u8(x0->qs); + const uint8x16_t v0_1 = vld1q_u8(x1->qs); + + // 4-bit -> 8-bit + const int8x16_t v0_0l = vreinterpretq_s8_u8(vandq_u8 (v0_0, m4b)); + const int8x16_t v0_0h = vreinterpretq_s8_u8(vshrq_n_u8(v0_0, 4)); + const int8x16_t v0_1l = vreinterpretq_s8_u8(vandq_u8 (v0_1, m4b)); + const int8x16_t v0_1h = vreinterpretq_s8_u8(vshrq_n_u8(v0_1, 4)); + + // sub 8 + const int8x16_t v0_0ls = vsubq_s8(v0_0l, s8b); + const int8x16_t v0_0hs = vsubq_s8(v0_0h, s8b); + const int8x16_t v0_1ls = vsubq_s8(v0_1l, s8b); + const int8x16_t v0_1hs = vsubq_s8(v0_1h, s8b); + + // load y + const int8x16_t v1_0l = vld1q_s8(y0->qs); + const int8x16_t v1_0h = vld1q_s8(y0->qs + 16); + const int8x16_t v1_1l = vld1q_s8(y1->qs); + const int8x16_t v1_1h = vld1q_s8(y1->qs + 16); + + // dot product into int32x4_t + const int32x4_t p_0 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), v0_0ls, v1_0l), v0_0hs, v1_0h); + const int32x4_t p_1 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), v0_1ls, v1_1l), v0_1hs, v1_1h); + + sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(p_0), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); + sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(p_1), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); + } + + sumf = vaddvq_f32(sumv0) + vaddvq_f32(sumv1); +#elif defined(__AVX2__) + // Initialize accumulator with zeros + __m256 acc = _mm256_setzero_ps(); + + // Main loop + for (; ib < nb; ++ib) { + /* Compute combined scale for the block */ + const __m256 d = _mm256_set1_ps( GGML_FP16_TO_FP32(x[ib].d) * GGML_FP16_TO_FP32(y[ib].d) ); + + __m256i qx = bytes_from_nibbles_32(x[ib].qs); + + // Now we have a vector with bytes in [ 0 .. 15 ] interval. Offset them into [ -8 .. +7 ] interval. + const __m256i off = _mm256_set1_epi8( 8 ); + qx = _mm256_sub_epi8( qx, off ); + + __m256i qy = _mm256_loadu_si256((const __m256i *)y[ib].qs); + + const __m256 q = mul_sum_i8_pairs_float(qx, qy); + + /* Multiply q with scale and accumulate */ + acc = _mm256_fmadd_ps( d, q, acc ); + } + + sumf = hsum_float_8(acc); +#elif defined(__AVX__) + const __m128i mone = _mm_set1_epi16(1); + + __m256 accum1 = _mm256_setzero_ps(); + __m256 accum2 = _mm256_setzero_ps(); + for (; ib + 1 < nb; ib += 2) { + const __m128i q4bits_1 = _mm_loadu_si128((const __m128i *)x[ib + 0].qs); + const __m128i q4bits_2 = _mm_loadu_si128((const __m128i *)x[ib + 1].qs); + const __m128i q8b_1_0 = _mm_loadu_si128((const __m128i *)y[ib + 0].qs); + const __m128i q8b_1_1 = _mm_loadu_si128((const __m128i *)y[ib + 0].qs + 1); + const __m128i q8b_2_0 = _mm_loadu_si128((const __m128i *)y[ib + 1].qs); + const __m128i q8b_2_1 = _mm_loadu_si128((const __m128i *)y[ib + 1].qs + 1); + + const __m128i q4b_1_0 = _mm_sub_epi8(_mm_and_si128(_mm_set1_epi8(15), q4bits_1), _mm_set1_epi8(8)); + const __m128i q4b_1_1 = _mm_sub_epi8(_mm_and_si128(_mm_set1_epi8(15), _mm_srli_epi16(q4bits_1, 4)), _mm_set1_epi8(8)); + const __m128i q4b_2_0 = _mm_sub_epi8(_mm_and_si128(_mm_set1_epi8(15), q4bits_2), _mm_set1_epi8(8)); + const __m128i q4b_2_1 = _mm_sub_epi8(_mm_and_si128(_mm_set1_epi8(15), _mm_srli_epi16(q4bits_2, 4)), _mm_set1_epi8(8)); + const __m128i p16_1_0 = mul_add_epi8_sse(q4b_1_0, q8b_1_0); + const __m128i p16_1_1 = mul_add_epi8_sse(q4b_1_1, q8b_1_1); + const __m128i p16_2_0 = mul_add_epi8_sse(q4b_2_0, q8b_2_0); + const __m128i p16_2_1 = mul_add_epi8_sse(q4b_2_1, q8b_2_1); + const __m128i p_1_0 = _mm_madd_epi16(p16_1_0, mone); + const __m128i p_1_1 = _mm_madd_epi16(p16_1_1, mone); + const __m128i p_2_0 = _mm_madd_epi16(p16_2_0, mone); + const __m128i p_2_1 = _mm_madd_epi16(p16_2_1, mone); + accum1 = _mm256_add_ps(_mm256_mul_ps(_mm256_set1_ps(GGML_FP16_TO_FP32(y[ib + 0].d)*GGML_FP16_TO_FP32(x[ib + 0].d)), + _mm256_cvtepi32_ps(MM256_SET_M128I(p_1_1, p_1_0))), accum1); + accum2 = _mm256_add_ps(_mm256_mul_ps(_mm256_set1_ps(GGML_FP16_TO_FP32(y[ib + 1].d)*GGML_FP16_TO_FP32(x[ib + 1].d)), + _mm256_cvtepi32_ps(MM256_SET_M128I(p_2_1, p_2_0))), accum2); + } + + sumf = hsum_float_8(_mm256_add_ps(accum1, accum2)); +#elif defined(__SSSE3__) + // set constants + const __m128i lowMask = _mm_set1_epi8(0xF); + const __m128i off = _mm_set1_epi8(8); + + // Initialize accumulator with zeros + __m128 acc_0 = _mm_setzero_ps(); + __m128 acc_1 = _mm_setzero_ps(); + __m128 acc_2 = _mm_setzero_ps(); + __m128 acc_3 = _mm_setzero_ps(); + + for (; ib + 1 < nb; ib += 2) { + _mm_prefetch(&x[ib] + sizeof(block_q4_0), _MM_HINT_T0); + _mm_prefetch(&y[ib] + sizeof(block_q8_0), _MM_HINT_T0); + + // Compute combined scale for the block 0 and 1 + const __m128 d_0_1 = _mm_set1_ps( GGML_FP16_TO_FP32(x[ib].d) * GGML_FP16_TO_FP32(y[ib].d) ); + + const __m128i tmp_0_1 = _mm_loadu_si128((const __m128i *)x[ib].qs); + + __m128i bx_0 = _mm_and_si128(lowMask, tmp_0_1); + __m128i by_0 = _mm_loadu_si128((const __m128i *)y[ib].qs); + bx_0 = _mm_sub_epi8(bx_0, off); + const __m128i i32_0 = mul_sum_i8_pairs(bx_0, by_0); + + __m128i bx_1 = _mm_and_si128(lowMask, _mm_srli_epi64(tmp_0_1, 4)); + __m128i by_1 = _mm_loadu_si128((const __m128i *)(y[ib].qs + 16)); + bx_1 = _mm_sub_epi8(bx_1, off); + const __m128i i32_1 = mul_sum_i8_pairs(bx_1, by_1); + + _mm_prefetch(&x[ib] + 2 * sizeof(block_q4_0), _MM_HINT_T0); + _mm_prefetch(&y[ib] + 2 * sizeof(block_q8_0), _MM_HINT_T0); + + // Compute combined scale for the block 2 and 3 + const __m128 d_2_3 = _mm_set1_ps( GGML_FP16_TO_FP32(x[ib + 1].d) * GGML_FP16_TO_FP32(y[ib + 1].d) ); + + const __m128i tmp_2_3 = _mm_loadu_si128((const __m128i *)x[ib + 1].qs); + + __m128i bx_2 = _mm_and_si128(lowMask, tmp_2_3); + __m128i by_2 = _mm_loadu_si128((const __m128i *)y[ib + 1].qs); + bx_2 = _mm_sub_epi8(bx_2, off); + const __m128i i32_2 = mul_sum_i8_pairs(bx_2, by_2); + + __m128i bx_3 = _mm_and_si128(lowMask, _mm_srli_epi64(tmp_2_3, 4)); + __m128i by_3 = _mm_loadu_si128((const __m128i *)(y[ib + 1].qs + 16)); + bx_3 = _mm_sub_epi8(bx_3, off); + const __m128i i32_3 = mul_sum_i8_pairs(bx_3, by_3); + + // Convert int32_t to float + __m128 p0 = _mm_cvtepi32_ps(i32_0); + __m128 p1 = _mm_cvtepi32_ps(i32_1); + __m128 p2 = _mm_cvtepi32_ps(i32_2); + __m128 p3 = _mm_cvtepi32_ps(i32_3); + + // Apply the scale + __m128 p0_d = _mm_mul_ps( d_0_1, p0 ); + __m128 p1_d = _mm_mul_ps( d_0_1, p1 ); + __m128 p2_d = _mm_mul_ps( d_2_3, p2 ); + __m128 p3_d = _mm_mul_ps( d_2_3, p3 ); + + // Acummulate + acc_0 = _mm_add_ps(p0_d, acc_0); + acc_1 = _mm_add_ps(p1_d, acc_1); + acc_2 = _mm_add_ps(p2_d, acc_2); + acc_3 = _mm_add_ps(p3_d, acc_3); + } + + sumf = hsum_float_4x4(acc_0, acc_1, acc_2, acc_3); +#elif defined(__riscv_v_intrinsic) + size_t vl = __riscv_vsetvl_e8m1(qk/2); + + for (; ib < nb; ++ib) { + // load elements + vuint8mf2_t tx = __riscv_vle8_v_u8mf2(x[ib].qs, vl); + + vint8mf2_t y0 = __riscv_vle8_v_i8mf2(y[ib].qs, vl); + vint8mf2_t y1 = __riscv_vle8_v_i8mf2(y[ib].qs+16, vl); + + // mask and store lower part of x, and then upper part + vuint8mf2_t x_a = __riscv_vand_vx_u8mf2(tx, 0x0F, vl); + vuint8mf2_t x_l = __riscv_vsrl_vx_u8mf2(tx, 0x04, vl); + + vint8mf2_t x_ai = __riscv_vreinterpret_v_u8mf2_i8mf2(x_a); + vint8mf2_t x_li = __riscv_vreinterpret_v_u8mf2_i8mf2(x_l); + + // subtract offset + vint8mf2_t v0 = __riscv_vsub_vx_i8mf2(x_ai, 8, vl); + vint8mf2_t v1 = __riscv_vsub_vx_i8mf2(x_li, 8, vl); + + vint16m1_t vec_mul1 = __riscv_vwmul_vv_i16m1(v0, y0, vl); + vint16m1_t vec_mul2 = __riscv_vwmul_vv_i16m1(v1, y1, vl); + + vint32m1_t vec_zero = __riscv_vmv_v_x_i32m1(0, vl); + + vint32m1_t vs1 = __riscv_vwredsum_vs_i16m1_i32m1(vec_mul1, vec_zero, vl); + vint32m1_t vs2 = __riscv_vwredsum_vs_i16m1_i32m1(vec_mul2, vs1, vl); + + int sumi = __riscv_vmv_x_s_i32m1_i32(vs2); + + sumf += sumi*GGML_FP16_TO_FP32(x[ib].d)*GGML_FP16_TO_FP32(y[ib].d); + } + +#elif defined(__POWER9_VECTOR__) + const vector signed char lowMask = vec_splats((signed char)0xF); + const vector signed int v0 = vec_splats((int32_t)0); + const vector unsigned char v4 = vec_splats((unsigned char)0x4); + const vector signed char v8 = vec_splats((signed char)0x8); + + vector float vsumf0 = vec_splats(0.0f); + +#pragma GCC unroll 8 + for (; ib < nb; ++ib) { + __builtin_prefetch(x[ib].qs, 0, 1); + __builtin_prefetch(y[ib].qs, 0, 1); + + vector float vxd = vec_splats(GGML_FP16_TO_FP32(x[ib].d)); + vector float vyd = vec_splats(GGML_FP16_TO_FP32(y[ib].d)); + vector float vd = vec_mul(vxd, vyd); + + vector signed char qxs = (vector signed char)vec_xl( 0, x[ib].qs); + vector signed char q8y0 = vec_xl( 0, y[ib].qs); + vector signed char q8y1 = vec_xl(16, y[ib].qs); + + vector signed char q4x0 = vec_and(qxs, lowMask); + vector signed char q4x1 = vec_sr(qxs, v4); + + q4x0 = vec_sub(q4x0, v8); + q4x1 = vec_sub(q4x1, v8); + + vector signed short qv0 = vec_add(vec_mule(q4x0, q8y0), vec_mulo(q4x0, q8y0)); + vector signed short qv1 = vec_add(vec_mule(q4x1, q8y1), vec_mulo(q4x1, q8y1)); + + vector signed int vsumi0 = v0; + + vsumi0 = vec_sum4s(qv0, vsumi0); + vsumi0 = vec_sum4s(qv1, vsumi0); + + vsumf0 = vec_madd(vec_ctf(vsumi0, 0), vd, vsumf0); + } + + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 4)); + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 8)); + + sumf = vec_extract(vsumf0, 0); + +#elif defined(__loongarch_asx) + // Initialize accumulator with zeros + __m256 acc = (__m256)__lasx_xvldi(0); + + // Main loop + for (; ib < nb; ++ib) { + /* Compute combined scale for the block */ + const __m256 d = __lasx_xvreplfr2vr_s( GGML_FP16_TO_FP32(x[ib].d) * GGML_FP16_TO_FP32(y[ib].d) ); + + __m256i qx = bytes_from_nibbles_32(x[ib].qs); + + // Now we have a vector with bytes in [ 0 .. 15 ] interval. Offset them into [ -8 .. +7 ] interval. + const __m256i off = __lasx_xvreplgr2vr_b( 8 ); + qx = __lasx_xvsub_b( qx, off ); + + __m256i qy = __lasx_xvld((const __m256i *)y[ib].qs, 0); + + const __m256 q = mul_sum_i8_pairs_float(qx, qy); + + /* Multiply q with scale and accumulate */ + acc = __lasx_xvfmadd_s( d, q, acc ); + } + + sumf = hsum_float_8(acc); +#elif defined(__loongarch_sx) + // set constants + const __m128i low_mask = __lsx_vreplgr2vr_b(0xF); + const __m128i off = __lsx_vreplgr2vr_b(8); + + // Initialize accumulator with zeros + __m128 acc_0 = __lsx_vldi(0); + __m128 acc_1 = __lsx_vldi(0); + __m128 acc_2 = __lsx_vldi(0); + __m128 acc_3 = __lsx_vldi(0); + + for (; ib + 1 < nb; ib += 2) { + + // Compute combined scale for the block 0 and 1 + const __m128 d_0_1 = __lsx_vreplgr2vr_w( GGML_FP16_TO_FP32(x[ib].d) * GGML_FP16_TO_FP32(y[ib].d) ); + + const __m128i tmp_0_1 = __lsx_vld((const __m128i *)x[ib].qs, 0); + + __m128i bx_0 = __lsx_vand_v(low_mask, tmp_0_1); + __m128i by_0 = __lsx_vld((const __m128i *)y[ib].qs, 0); + bx_0 = __lsx_vsub_b(bx_0, off); + const __m128i i32_0 = mul_sum_i8_pairs(bx_0, by_0); + + __m128i bx_1 = __lsx_vand_v(low_mask, __lsx_vsrli_d(tmp_0_1, 4)); + __m128i by_1 = __lsx_vld((const __m128i *)(y[ib].qs + 16), 0); + bx_1 = __lsx_vsub_b(bx_1, off); + const __m128i i32_1 = mul_sum_i8_pairs(bx_1, by_1); + + //_mm_prefetch(&x[ib] + 2 * sizeof(block_q4_0), _MM_HINT_T0); + //_mm_prefetch(&y[ib] + 2 * sizeof(block_q8_0), _MM_HINT_T0); + + // Compute combined scale for the block 2 and 3 + const __m128 d_2_3 = __lsx_vreplgr2vr_w( GGML_FP16_TO_FP32(x[ib + 1].d) * GGML_FP16_TO_FP32(y[ib + 1].d) ); + + const __m128i tmp_2_3 = __lsx_vld((const __m128i *)x[ib + 1].qs, 0); + + __m128i bx_2 = __lsx_vand_v(low_mask, tmp_2_3); + __m128i by_2 = __lsx_vld((const __m128i *)y[ib + 1].qs, 0); + bx_2 = __lsx_vsub_b(bx_2, off); + const __m128i i32_2 = mul_sum_i8_pairs(bx_2, by_2); + + __m128i bx_3 = __lsx_vand_v(low_mask, __lsx_vsrli_d(tmp_2_3, 4)); + __m128i by_3 = __lsx_vld((const __m128i *)(y[ib + 1].qs + 16), 0); + bx_3 = __lsx_vsub_b(bx_3, off); + const __m128i i32_3 = mul_sum_i8_pairs(bx_3, by_3); + + // Convert int32_t to float + __m128 p0 = __lsx_vffint_s_w(i32_0); + __m128 p1 = __lsx_vffint_s_w(i32_1); + __m128 p2 = __lsx_vffint_s_w(i32_2); + __m128 p3 = __lsx_vffint_s_w(i32_3); + + // Apply the scale + __m128 p0_d = __lsx_vfmul_s( d_0_1, p0 ); + __m128 p1_d = __lsx_vfmul_s( d_0_1, p1 ); + __m128 p2_d = __lsx_vfmul_s( d_2_3, p2 ); + __m128 p3_d = __lsx_vfmul_s( d_2_3, p3 ); + + // Acummulate + acc_0 = __lsx_vfadd_s(p0_d, acc_0); + acc_1 = __lsx_vfadd_s(p1_d, acc_1); + acc_2 = __lsx_vfadd_s(p2_d, acc_2); + acc_3 = __lsx_vfadd_s(p3_d, acc_3); + } + + sumf = hsum_float_4x4(acc_0, acc_1, acc_2, acc_3); +#endif + for (; ib < nb; ++ib) { + int sumi0 = 0; + int sumi1 = 0; + + for (int j = 0; j < qk/2; ++j) { + const int v0 = (x[ib].qs[j] & 0x0F) - 8; + const int v1 = (x[ib].qs[j] >> 4) - 8; + + sumi0 += (v0 * y[ib].qs[j]); + sumi1 += (v1 * y[ib].qs[j + qk/2]); + } + + int sumi = sumi0 + sumi1; + sumf += sumi*GGML_FP16_TO_FP32(x[ib].d)*GGML_FP16_TO_FP32(y[ib].d); + } + + *s = sumf; +} + +void ggml_vec_dot_q4_1_q8_1(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { + const int qk = QK8_1; + const int nb = n / qk; + + assert(n % qk == 0); +#if defined(__ARM_FEATURE_MATMUL_INT8) + assert((nrc == 2) || (nrc == 1)); +#else + assert(nrc == 1); +#endif + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + + const block_q4_1 * restrict x = vx; + const block_q8_1 * restrict y = vy; + +#if defined(__ARM_FEATURE_MATMUL_INT8) + if (nrc == 2) { + const block_q4_1 * restrict vx0 = vx; + const block_q4_1 * restrict vx1 = (const block_q4_1 *) ((const uint8_t*)vx + bx); + const block_q8_1 * restrict vy0 = vy; + const block_q8_1 * restrict vy1 = (const block_q8_1 *) ((const uint8_t*)vy + by); + + float32x4_t sumv0 = vdupq_n_f32(0.0f); + float32x4_t summs0 = vdupq_n_f32(0.0f); + + for (int i = 0; i < nb; i++) { + const block_q4_1 * restrict b_x0 = &vx0[i]; + const block_q4_1 * restrict b_x1 = &vx1[i]; + const block_q8_1 * restrict b_y0 = &vy0[i]; + const block_q8_1 * restrict b_y1 = &vy1[i]; + + float32_t summs_t[4] = {GGML_FP16_TO_FP32(b_x0->m) * GGML_FP16_TO_FP32(b_y0->s), + GGML_FP16_TO_FP32(b_x1->m) * GGML_FP16_TO_FP32(b_y0->s), + GGML_FP16_TO_FP32(b_x0->m) * GGML_FP16_TO_FP32(b_y1->s), + GGML_FP16_TO_FP32(b_x1->m) * GGML_FP16_TO_FP32(b_y1->s)}; + summs0 = vaddq_f32(summs0, vld1q_f32(summs_t)); + + const uint8x16_t m4b = vdupq_n_u8(0x0F); + + const uint8x16_t v0_0 = vld1q_u8(b_x0->qs); + const uint8x16_t v0_1 = vld1q_u8(b_x1->qs); + + // 4-bit -> 8-bit + const int8x16_t x0_l = vreinterpretq_s8_u8(vandq_u8 (v0_0, m4b)); + const int8x16_t x0_h = vreinterpretq_s8_u8(vshrq_n_u8(v0_0, 4)); + const int8x16_t x1_l = vreinterpretq_s8_u8(vandq_u8 (v0_1, m4b)); + const int8x16_t x1_h = vreinterpretq_s8_u8(vshrq_n_u8(v0_1, 4)); + + // load y + const int8x16_t y0_l = vld1q_s8(b_y0->qs); + const int8x16_t y0_h = vld1q_s8(b_y0->qs + 16); + const int8x16_t y1_l = vld1q_s8(b_y1->qs); + const int8x16_t y1_h = vld1q_s8(b_y1->qs + 16); + + // mmla into int32x4_t + float32_t _scale[4] = {GGML_FP16_TO_FP32(b_x0->d)*b_y0->d, + GGML_FP16_TO_FP32(b_x0->d)*b_y1->d, + GGML_FP16_TO_FP32(b_x1->d)*b_y0->d, + GGML_FP16_TO_FP32(b_x1->d)*b_y1->d}; + float32x4_t scale = vld1q_f32(_scale); + + int8x16_t l0 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(x0_l), vreinterpretq_s64_s8(x1_l))); + int8x16_t l1 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(x0_l), vreinterpretq_s64_s8(x1_l))); + + int8x16_t l2 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(x0_h), vreinterpretq_s64_s8(x1_h))); + int8x16_t l3 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(x0_h), vreinterpretq_s64_s8(x1_h))); + + int8x16_t r0 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(y0_l), vreinterpretq_s64_s8(y1_l))); + int8x16_t r1 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(y0_l), vreinterpretq_s64_s8(y1_l))); + + int8x16_t r2 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(y0_h), vreinterpretq_s64_s8(y1_h))); + int8x16_t r3 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(y0_h), vreinterpretq_s64_s8(y1_h))); + sumv0 = vmlaq_f32(sumv0,(vcvtq_f32_s32(vmmlaq_s32((vmmlaq_s32((vmmlaq_s32((vmmlaq_s32(vdupq_n_s32(0), l0, r0)), + l1, r1)), l2, r2)), l3, r3))), scale); + } + + float32x4_t sumv1 = vextq_f32(sumv0, sumv0, 2); + float32x4_t sumv2 = vzip1q_f32(sumv0, sumv1); + sumv2 = vaddq_f32(sumv2, summs0); + + vst1_f32(s, vget_low_f32 (sumv2)); + vst1_f32(s + bs, vget_high_f32(sumv2)); + return; + } +#endif + + int ib = 0; + float sumf = 0; + + // TODO: add WASM SIMD +#if defined(__ARM_NEON) + float32x4_t sumv0 = vdupq_n_f32(0.0f); + float32x4_t sumv1 = vdupq_n_f32(0.0f); + + float summs = 0; + + for (; ib + 1 < nb; ib += 2) { + const block_q4_1 * restrict x0 = &x[ib + 0]; + const block_q4_1 * restrict x1 = &x[ib + 1]; + const block_q8_1 * restrict y0 = &y[ib + 0]; + const block_q8_1 * restrict y1 = &y[ib + 1]; + + summs += GGML_FP16_TO_FP32(x0->m) * GGML_FP16_TO_FP32(y0->s) + GGML_FP16_TO_FP32(x1->m) * GGML_FP16_TO_FP32(y1->s); + + const uint8x16_t m4b = vdupq_n_u8(0x0F); + + const uint8x16_t v0_0 = vld1q_u8(x0->qs); + const uint8x16_t v0_1 = vld1q_u8(x1->qs); + + // 4-bit -> 8-bit + const int8x16_t v0_0l = vreinterpretq_s8_u8(vandq_u8 (v0_0, m4b)); + const int8x16_t v0_0h = vreinterpretq_s8_u8(vshrq_n_u8(v0_0, 4)); + const int8x16_t v0_1l = vreinterpretq_s8_u8(vandq_u8 (v0_1, m4b)); + const int8x16_t v0_1h = vreinterpretq_s8_u8(vshrq_n_u8(v0_1, 4)); + + // load y + const int8x16_t v1_0l = vld1q_s8(y0->qs); + const int8x16_t v1_0h = vld1q_s8(y0->qs + 16); + const int8x16_t v1_1l = vld1q_s8(y1->qs); + const int8x16_t v1_1h = vld1q_s8(y1->qs + 16); + + // dot product into int32x4_t + const int32x4_t p_0 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), v0_0l, v1_0l), v0_0h, v1_0h); + const int32x4_t p_1 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), v0_1l, v1_1l), v0_1h, v1_1h); + + sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(p_0), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); + sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(p_1), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); + } + + sumf = vaddvq_f32(sumv0) + vaddvq_f32(sumv1) + summs; +#elif defined(__AVX2__) || defined(__AVX__) + // Initialize accumulator with zeros + __m256 acc = _mm256_setzero_ps(); + + float summs = 0; + + // Main loop + for (; ib < nb; ++ib) { + const float d0 = GGML_FP16_TO_FP32(x[ib].d); + const float d1 = GGML_FP16_TO_FP32(y[ib].d); + + summs += GGML_FP16_TO_FP32(x[ib].m) * GGML_FP16_TO_FP32(y[ib].s); + + const __m256 d0v = _mm256_set1_ps( d0 ); + const __m256 d1v = _mm256_set1_ps( d1 ); + + // Compute combined scales + const __m256 d0d1 = _mm256_mul_ps( d0v, d1v ); + + // Load 16 bytes, and unpack 4 bit fields into bytes, making 32 bytes + const __m256i qx = bytes_from_nibbles_32(x[ib].qs); + const __m256i qy = _mm256_loadu_si256( (const __m256i *)y[ib].qs ); + + const __m256 xy = mul_sum_us8_pairs_float(qx, qy); + + // Accumulate d0*d1*x*y +#if defined(__AVX2__) + acc = _mm256_fmadd_ps( d0d1, xy, acc ); +#else + acc = _mm256_add_ps( _mm256_mul_ps( d0d1, xy ), acc ); +#endif + } + + sumf = hsum_float_8(acc) + summs; +#elif defined(__riscv_v_intrinsic) + size_t vl = __riscv_vsetvl_e8m1(qk/2); + + for (; ib < nb; ++ib) { + // load elements + vuint8mf2_t tx = __riscv_vle8_v_u8mf2(x[ib].qs, vl); + + vint8mf2_t y0 = __riscv_vle8_v_i8mf2(y[ib].qs, vl); + vint8mf2_t y1 = __riscv_vle8_v_i8mf2(y[ib].qs+16, vl); + + // mask and store lower part of x, and then upper part + vuint8mf2_t x_a = __riscv_vand_vx_u8mf2(tx, 0x0F, vl); + vuint8mf2_t x_l = __riscv_vsrl_vx_u8mf2(tx, 0x04, vl); + + vint8mf2_t v0 = __riscv_vreinterpret_v_u8mf2_i8mf2(x_a); + vint8mf2_t v1 = __riscv_vreinterpret_v_u8mf2_i8mf2(x_l); + + vint16m1_t vec_mul1 = __riscv_vwmul_vv_i16m1(v0, y0, vl); + vint16m1_t vec_mul2 = __riscv_vwmul_vv_i16m1(v1, y1, vl); + + vint32m1_t vec_zero = __riscv_vmv_v_x_i32m1(0, vl); + + vint32m1_t vs1 = __riscv_vwredsum_vs_i16m1_i32m1(vec_mul1, vec_zero, vl); + vint32m1_t vs2 = __riscv_vwredsum_vs_i16m1_i32m1(vec_mul2, vs1, vl); + + int sumi = __riscv_vmv_x_s_i32m1_i32(vs2); + + sumf += (GGML_FP16_TO_FP32(x[ib].d)*GGML_FP16_TO_FP32(y[ib].d))*sumi + GGML_FP16_TO_FP32(x[ib].m)*GGML_FP16_TO_FP32(y[ib].s); + } + +#elif defined(__POWER9_VECTOR__) + const vector signed char lowMask = vec_splats((signed char)0xF); + const vector signed int v0 = vec_splats((int32_t)0); + const vector unsigned char v4 = vec_splats((unsigned char)0x4); + + vector float vsumf0 = vec_splats(0.0f); + +#pragma GCC unroll 4 + for (; ib < nb; ++ib) { + __builtin_prefetch(x[ib].qs, 0, 1); + __builtin_prefetch(y[ib].qs, 0, 1); + + vector float vxd = vec_splats(GGML_FP16_TO_FP32(x[ib].d)); + vector float vyd = vec_splats(GGML_FP16_TO_FP32(y[ib].d)); + vector float vd = vec_mul(vxd, vyd); + + vector float vxmin = vec_splats(GGML_FP16_TO_FP32(x[ib].m)); + vector float vys = {GGML_FP16_TO_FP32(y[ib].s), 0.0f, 0.0f, 0.0f}; + vsumf0 = vec_madd(vxmin, vys, vsumf0); + + vector signed char qxs = (vector signed char)vec_xl( 0, x[ib].qs); + vector signed char q8y0 = vec_xl( 0, y[ib].qs); + vector signed char q8y1 = vec_xl(16, y[ib].qs); + + vector unsigned char q4x0 = (vector unsigned char)vec_and(qxs, lowMask); + vector unsigned char q4x1 = (vector unsigned char)vec_sr(qxs, v4); + + vector signed int vsumi0 = v0; + + vsumi0 = vec_msum(q8y0, q4x0, vsumi0); + vsumi0 = vec_msum(q8y1, q4x1, vsumi0); + + vsumf0 = vec_madd(vec_ctf(vsumi0, 0), vd, vsumf0); + } + + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 4)); + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 8)); + + sumf = vec_extract(vsumf0, 0); + +#elif defined(__loongarch_asx) + // Initialize accumulator with zeros + __m256 acc = (__m256)__lasx_xvldi(0); + + float summs = 0; + + // Main loop + for (; ib < nb; ++ib) { + const float d0 = GGML_FP16_TO_FP32(x[ib].d); + const float d1 = GGML_FP16_TO_FP32(y[ib].d); + + summs += GGML_FP16_TO_FP32(x[ib].m) * GGML_FP16_TO_FP32(y[ib].s); + + const __m256 d0v = __lasx_xvreplfr2vr_s( d0 ); + const __m256 d1v = __lasx_xvreplfr2vr_s( d1 ); + + // Compute combined scales + const __m256 d0d1 = __lasx_xvfmul_s( d0v, d1v ); + + // Load 16 bytes, and unpack 4 bit fields into bytes, making 32 bytes + const __m256i qx = bytes_from_nibbles_32(x[ib].qs); + const __m256i qy = __lasx_xvld( (const __m256i *)y[ib].qs, 0); + + const __m256 xy = mul_sum_us8_pairs_float(qx, qy); + + // Accumulate d0*d1*x*y + acc = __lasx_xvfmadd_s( d0d1, xy, acc ); + } + + sumf = hsum_float_8(acc) + summs; +#endif + for (; ib < nb; ++ib) { + int sumi0 = 0; + int sumi1 = 0; + + for (int j = 0; j < qk/2; ++j) { + const int v0 = (x[ib].qs[j] & 0x0F); + const int v1 = (x[ib].qs[j] >> 4); + + sumi0 += (v0 * y[ib].qs[j]); + sumi1 += (v1 * y[ib].qs[j + qk/2]); + } + + int sumi = sumi0 + sumi1; + sumf += (GGML_FP16_TO_FP32(x[ib].d)*GGML_FP16_TO_FP32(y[ib].d))*sumi + GGML_FP16_TO_FP32(x[ib].m)*GGML_FP16_TO_FP32(y[ib].s); + } + + *s = sumf; +} + +void ggml_vec_dot_q5_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { + const int qk = QK8_0; + const int nb = n / qk; + + int ib = 0; + float sumf = 0; + + assert(n % qk == 0); + assert(qk == QK5_0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + + const block_q5_0 * restrict x = vx; + const block_q8_0 * restrict y = vy; + +#if defined(__ARM_NEON) + float32x4_t sumv0 = vdupq_n_f32(0.0f); + float32x4_t sumv1 = vdupq_n_f32(0.0f); + + uint32_t qh0; + uint32_t qh1; + + uint64_t tmp0[4]; + uint64_t tmp1[4]; + + for (; ib + 1 < nb; ib += 2) { + const block_q5_0 * restrict x0 = &x[ib]; + const block_q5_0 * restrict x1 = &x[ib + 1]; + const block_q8_0 * restrict y0 = &y[ib]; + const block_q8_0 * restrict y1 = &y[ib + 1]; + + const uint8x16_t m4b = vdupq_n_u8(0x0F); + + // extract the 5th bit via lookup table ((!b) << 4) + memcpy(&qh0, x0->qh, sizeof(qh0)); + memcpy(&qh1, x1->qh, sizeof(qh1)); + + tmp0[0] = table_b2b_1[(qh0 >> 0) & 0xFF]; + tmp0[1] = table_b2b_1[(qh0 >> 8) & 0xFF]; + tmp0[2] = table_b2b_1[(qh0 >> 16) & 0xFF]; + tmp0[3] = table_b2b_1[(qh0 >> 24) ]; + + tmp1[0] = table_b2b_1[(qh1 >> 0) & 0xFF]; + tmp1[1] = table_b2b_1[(qh1 >> 8) & 0xFF]; + tmp1[2] = table_b2b_1[(qh1 >> 16) & 0xFF]; + tmp1[3] = table_b2b_1[(qh1 >> 24) ]; + + const int8x16_t qhl0 = vld1q_s8((const int8_t *)(tmp0 + 0)); + const int8x16_t qhh0 = vld1q_s8((const int8_t *)(tmp0 + 2)); + const int8x16_t qhl1 = vld1q_s8((const int8_t *)(tmp1 + 0)); + const int8x16_t qhh1 = vld1q_s8((const int8_t *)(tmp1 + 2)); + + const uint8x16_t v0_0 = vld1q_u8(x0->qs); + const uint8x16_t v0_1 = vld1q_u8(x1->qs); + + // 4-bit -> 8-bit + int8x16_t v0_0l = vreinterpretq_s8_u8(vandq_u8 (v0_0, m4b)); + int8x16_t v0_0h = vreinterpretq_s8_u8(vshrq_n_u8(v0_0, 4)); + int8x16_t v0_1l = vreinterpretq_s8_u8(vandq_u8 (v0_1, m4b)); + int8x16_t v0_1h = vreinterpretq_s8_u8(vshrq_n_u8(v0_1, 4)); + + // add high bit and sub 16 (equivalent to sub 0x10 when bit is zero) + const int8x16_t v0_0lf = vsubq_s8(v0_0l, qhl0); + const int8x16_t v0_0hf = vsubq_s8(v0_0h, qhh0); + const int8x16_t v0_1lf = vsubq_s8(v0_1l, qhl1); + const int8x16_t v0_1hf = vsubq_s8(v0_1h, qhh1); + + // load y + const int8x16_t v1_0l = vld1q_s8(y0->qs); + const int8x16_t v1_0h = vld1q_s8(y0->qs + 16); + const int8x16_t v1_1l = vld1q_s8(y1->qs); + const int8x16_t v1_1h = vld1q_s8(y1->qs + 16); + + sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32( + ggml_vdotq_s32(vdupq_n_s32(0), v0_0lf, v1_0l), + ggml_vdotq_s32(vdupq_n_s32(0), v0_0hf, v1_0h))), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); + sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32( + ggml_vdotq_s32(vdupq_n_s32(0), v0_1lf, v1_1l), + ggml_vdotq_s32(vdupq_n_s32(0), v0_1hf, v1_1h))), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); + } + + sumf = vaddvq_f32(sumv0) + vaddvq_f32(sumv1); +#elif defined(__wasm_simd128__) + v128_t sumv = wasm_f32x4_splat(0.0f); + + uint32_t qh; + uint64_t tmp[4]; + + // TODO: check if unrolling this is better + for (; ib < nb; ++ib) { + const block_q5_0 * restrict x0 = &x[ib]; + const block_q8_0 * restrict y0 = &y[ib]; + + const v128_t m4b = wasm_i8x16_splat(0x0F); + + // extract the 5th bit + memcpy(&qh, x0->qh, sizeof(qh)); + + tmp[0] = table_b2b_1[(qh >> 0) & 0xFF]; + tmp[1] = table_b2b_1[(qh >> 8) & 0xFF]; + tmp[2] = table_b2b_1[(qh >> 16) & 0xFF]; + tmp[3] = table_b2b_1[(qh >> 24) ]; + + const v128_t qhl = wasm_v128_load(tmp + 0); + const v128_t qhh = wasm_v128_load(tmp + 2); + + const v128_t v0 = wasm_v128_load(x0->qs); + + // 4-bit -> 8-bit + const v128_t v0l = wasm_v128_and (v0, m4b); + const v128_t v0h = wasm_u8x16_shr(v0, 4); + + // add high bit and sub 16 (equivalent to sub 0x10 when bit is zero) + const v128_t v0lf = wasm_i8x16_sub(v0l, qhl); + const v128_t v0hf = wasm_i8x16_sub(v0h, qhh); + + // load y + const v128_t v1l = wasm_v128_load(y0->qs); + const v128_t v1h = wasm_v128_load(y0->qs + 16); + + // int8x16 -> int16x8 + const v128_t v0lfl = wasm_i16x8_extend_low_i8x16 (v0lf); + const v128_t v0lfh = wasm_i16x8_extend_high_i8x16(v0lf); + const v128_t v0hfl = wasm_i16x8_extend_low_i8x16 (v0hf); + const v128_t v0hfh = wasm_i16x8_extend_high_i8x16(v0hf); + + const v128_t v1ll = wasm_i16x8_extend_low_i8x16 (v1l); + const v128_t v1lh = wasm_i16x8_extend_high_i8x16(v1l); + const v128_t v1hl = wasm_i16x8_extend_low_i8x16 (v1h); + const v128_t v1hh = wasm_i16x8_extend_high_i8x16(v1h); + + // dot product + sumv = wasm_f32x4_add(sumv, wasm_f32x4_mul(wasm_f32x4_convert_i32x4( + wasm_i32x4_add( + wasm_i32x4_add(wasm_i32x4_dot_i16x8(v0lfl, v1ll), + wasm_i32x4_dot_i16x8(v0lfh, v1lh)), + wasm_i32x4_add(wasm_i32x4_dot_i16x8(v0hfl, v1hl), + wasm_i32x4_dot_i16x8(v0hfh, v1hh)))), + wasm_f32x4_splat(GGML_FP16_TO_FP32(x0->d) * GGML_FP16_TO_FP32(y0->d)))); + } + + sumf = wasm_f32x4_extract_lane(sumv, 0) + wasm_f32x4_extract_lane(sumv, 1) + + wasm_f32x4_extract_lane(sumv, 2) + wasm_f32x4_extract_lane(sumv, 3); +#elif defined(__AVX2__) + // Initialize accumulator with zeros + __m256 acc = _mm256_setzero_ps(); + + // Main loop + for (; ib < nb; ++ib) { + /* Compute combined scale for the block */ + const __m256 d = _mm256_set1_ps(GGML_FP16_TO_FP32(x[ib].d) * GGML_FP16_TO_FP32(y[ib].d)); + + __m256i qx = bytes_from_nibbles_32(x[ib].qs); + __m256i bxhi = bytes_from_bits_32(x[ib].qh); + bxhi = _mm256_andnot_si256(bxhi, _mm256_set1_epi8((char)0xF0)); + qx = _mm256_or_si256(qx, bxhi); + + __m256i qy = _mm256_loadu_si256((const __m256i *)y[ib].qs); + + const __m256 q = mul_sum_i8_pairs_float(qx, qy); + + /* Multiply q with scale and accumulate */ + acc = _mm256_fmadd_ps(d, q, acc); + } + + sumf = hsum_float_8(acc); +#elif defined(__AVX__) + // Initialize accumulator with zeros + __m256 acc = _mm256_setzero_ps(); + __m128i mask = _mm_set1_epi8((char)0xF0); + + // Main loop + for (; ib < nb; ++ib) { + /* Compute combined scale for the block */ + const __m256 d = _mm256_set1_ps(GGML_FP16_TO_FP32(x[ib].d) * GGML_FP16_TO_FP32(y[ib].d)); + + __m256i bx_0 = bytes_from_nibbles_32(x[ib].qs); + const __m256i bxhi = bytes_from_bits_32(x[ib].qh); + __m128i bxhil = _mm256_castsi256_si128(bxhi); + __m128i bxhih = _mm256_extractf128_si256(bxhi, 1); + bxhil = _mm_andnot_si128(bxhil, mask); + bxhih = _mm_andnot_si128(bxhih, mask); + __m128i bxl = _mm256_castsi256_si128(bx_0); + __m128i bxh = _mm256_extractf128_si256(bx_0, 1); + bxl = _mm_or_si128(bxl, bxhil); + bxh = _mm_or_si128(bxh, bxhih); + bx_0 = MM256_SET_M128I(bxh, bxl); + + const __m256i by_0 = _mm256_loadu_si256((const __m256i *)y[ib].qs); + + const __m256 q = mul_sum_i8_pairs_float(bx_0, by_0); + + /* Multiply q with scale and accumulate */ + acc = _mm256_add_ps(_mm256_mul_ps(d, q), acc); + } + + sumf = hsum_float_8(acc); +#elif defined(__riscv_v_intrinsic) + uint32_t qh; + + size_t vl = __riscv_vsetvl_e8m1(qk/2); + + // These temporary registers are for masking and shift operations + vuint32m2_t vt_1 = __riscv_vid_v_u32m2(vl); + vuint32m2_t vt_2 = __riscv_vsll_vv_u32m2(__riscv_vmv_v_x_u32m2(1, vl), vt_1, vl); + + vuint32m2_t vt_3 = __riscv_vsll_vx_u32m2(vt_2, 16, vl); + vuint32m2_t vt_4 = __riscv_vadd_vx_u32m2(vt_1, 12, vl); + + for (; ib < nb; ++ib) { + memcpy(&qh, x[ib].qh, sizeof(uint32_t)); + + // ((qh & (1u << (j + 0 ))) >> (j + 0 )) << 4; + vuint32m2_t xha_0 = __riscv_vand_vx_u32m2(vt_2, qh, vl); + vuint32m2_t xhr_0 = __riscv_vsrl_vv_u32m2(xha_0, vt_1, vl); + vuint32m2_t xhl_0 = __riscv_vsll_vx_u32m2(xhr_0, 4, vl); + + // ((qh & (1u << (j + 16))) >> (j + 12)); + vuint32m2_t xha_1 = __riscv_vand_vx_u32m2(vt_3, qh, vl); + vuint32m2_t xhl_1 = __riscv_vsrl_vv_u32m2(xha_1, vt_4, vl); + + // narrowing + vuint16m1_t xhc_0 = __riscv_vncvt_x_x_w_u16m1(xhl_0, vl); + vuint8mf2_t xh_0 = __riscv_vncvt_x_x_w_u8mf2(xhc_0, vl); + + vuint16m1_t xhc_1 = __riscv_vncvt_x_x_w_u16m1(xhl_1, vl); + vuint8mf2_t xh_1 = __riscv_vncvt_x_x_w_u8mf2(xhc_1, vl); + + // load + vuint8mf2_t tx = __riscv_vle8_v_u8mf2(x[ib].qs, vl); + + vint8mf2_t y0 = __riscv_vle8_v_i8mf2(y[ib].qs, vl); + vint8mf2_t y1 = __riscv_vle8_v_i8mf2(y[ib].qs+16, vl); + + vuint8mf2_t x_at = __riscv_vand_vx_u8mf2(tx, 0x0F, vl); + vuint8mf2_t x_lt = __riscv_vsrl_vx_u8mf2(tx, 0x04, vl); + + vuint8mf2_t x_a = __riscv_vor_vv_u8mf2(x_at, xh_0, vl); + vuint8mf2_t x_l = __riscv_vor_vv_u8mf2(x_lt, xh_1, vl); + + vint8mf2_t x_ai = __riscv_vreinterpret_v_u8mf2_i8mf2(x_a); + vint8mf2_t x_li = __riscv_vreinterpret_v_u8mf2_i8mf2(x_l); + + vint8mf2_t v0 = __riscv_vsub_vx_i8mf2(x_ai, 16, vl); + vint8mf2_t v1 = __riscv_vsub_vx_i8mf2(x_li, 16, vl); + + vint16m1_t vec_mul1 = __riscv_vwmul_vv_i16m1(v0, y0, vl); + vint16m1_t vec_mul2 = __riscv_vwmul_vv_i16m1(v1, y1, vl); + + vint32m1_t vec_zero = __riscv_vmv_v_x_i32m1(0, vl); + + vint32m1_t vs1 = __riscv_vwredsum_vs_i16m1_i32m1(vec_mul1, vec_zero, vl); + vint32m1_t vs2 = __riscv_vwredsum_vs_i16m1_i32m1(vec_mul2, vs1, vl); + + int sumi = __riscv_vmv_x_s_i32m1_i32(vs2); + + sumf += (GGML_FP16_TO_FP32(x[ib].d)*GGML_FP16_TO_FP32(y[ib].d)) * sumi; + } + +#elif defined(__POWER9_VECTOR__) + const vector signed char lowMask = vec_splats((signed char)0xF); + const vector unsigned char v4 = vec_splats((unsigned char)4); + + vector float vsumf0 = vec_splats(0.0f); + +#pragma GCC unroll 4 + for (; ib < nb; ++ib) { + __builtin_prefetch(x[ib].qs, 0, 1); + __builtin_prefetch(y[ib].qs, 0, 1); + + vector float vxd = vec_splats(GGML_FP16_TO_FP32(x[ib].d)); + vector float vyd = vec_splats(GGML_FP16_TO_FP32(y[ib].d)); + vector float vd = vec_mul(vxd, vyd); + + vector signed long long aux64x2_0 = {(uint64_t)(table_b2b_1[x[ib].qh[0]]), (uint64_t)(table_b2b_1[x[ib].qh[1]])}; + vector signed long long aux64x2_1 = {(uint64_t)(table_b2b_1[x[ib].qh[2]]), (uint64_t)(table_b2b_1[x[ib].qh[3]])}; + + vector signed char qh0 = (vector signed char)aux64x2_0; + vector signed char qh1 = (vector signed char)aux64x2_1; + + vector signed char qxs = (vector signed char)vec_xl( 0, x[ib].qs); + + vector signed char q5x0 = vec_sub(vec_and (qxs, lowMask), qh0); + vector signed char q5x1 = vec_sub(vec_sr(qxs, v4), qh1); + + vector signed char q8y0 = vec_xl( 0, y[ib].qs); + vector signed char q8y1 = vec_xl( 16, y[ib].qs); + + vector signed short qv0 = vec_add(vec_mule(q5x0, q8y0), vec_mulo(q5x0, q8y0)); + vector signed short qv1 = vec_add(vec_mule(q5x1, q8y1), vec_mulo(q5x1, q8y1)); + + qv0 = vec_add(qv0, qv1); + + vector signed int vsumi0 = vec_add(vec_unpackh(qv0), vec_unpackl(qv0)); + + vsumf0 = vec_madd(vec_ctf(vsumi0, 0), vd, vsumf0); + } + + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 4)); + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 8)); + + sumf = vec_extract(vsumf0, 0); + +#elif defined(__loongarch_asx) + // Initialize accumulator with zeros + __m256 acc = (__m256)__lasx_xvldi(0); + + // Main loop + for (; ib < nb; ++ib) { + /* Compute combined scale for the block */ + const __m256 d = __lasx_xvreplfr2vr_s(GGML_FP16_TO_FP32(x[ib].d) * GGML_FP16_TO_FP32(y[ib].d)); //FIXME + + __m256i qx = bytes_from_nibbles_32(x[ib].qs); + __m256i bxhi = bytes_from_bits_32(x[ib].qh); + bxhi = __lasx_xvandn_v(bxhi, __lasx_xvreplgr2vr_b((char)0xF0)); + qx = __lasx_xvor_v(qx, bxhi); + + __m256i qy = __lasx_xvld((const __m256i *)y[ib].qs, 0); + + const __m256 q = mul_sum_i8_pairs_float(qx, qy); + + /* Multiply q with scale and accumulate */ + acc = __lasx_xvfmadd_s(d, q, acc); + } + + sumf = hsum_float_8(acc); +#endif + for (; ib < nb; ++ib) { + uint32_t qh; + memcpy(&qh, x[ib].qh, sizeof(qh)); + + int sumi0 = 0; + int sumi1 = 0; + + for (int j = 0; j < qk/2; ++j) { + const uint8_t xh_0 = ((qh & (1u << (j + 0 ))) >> (j + 0 )) << 4; + const uint8_t xh_1 = ((qh & (1u << (j + 16))) >> (j + 12)); + + const int32_t x0 = (int8_t)(((x[ib].qs[j] & 0x0F) | xh_0) - 16); + const int32_t x1 = (int8_t)(((x[ib].qs[j] >> 4) | xh_1) - 16); + + sumi0 += (x0 * y[ib].qs[j]); + sumi1 += (x1 * y[ib].qs[j + qk/2]); + } + + int sumi = sumi0 + sumi1; + sumf += (GGML_FP16_TO_FP32(x[ib].d)*GGML_FP16_TO_FP32(y[ib].d)) * sumi; + } + + *s = sumf; +} + +void ggml_vec_dot_q5_1_q8_1(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { + const int qk = QK8_1; + const int nb = n / qk; + + int ib = 0; + float sumf = 0; + + assert(n % qk == 0); + assert(qk == QK5_1); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + + const block_q5_1 * restrict x = vx; + const block_q8_1 * restrict y = vy; + +#if defined(__ARM_NEON) + float32x4_t sumv0 = vdupq_n_f32(0.0f); + float32x4_t sumv1 = vdupq_n_f32(0.0f); + + float summs0 = 0.0f; + float summs1 = 0.0f; + + uint32_t qh0; + uint32_t qh1; + + uint64_t tmp0[4]; + uint64_t tmp1[4]; + + for (; ib + 1 < nb; ib += 2) { + const block_q5_1 * restrict x0 = &x[ib]; + const block_q5_1 * restrict x1 = &x[ib + 1]; + const block_q8_1 * restrict y0 = &y[ib]; + const block_q8_1 * restrict y1 = &y[ib + 1]; + + const uint8x16_t m4b = vdupq_n_u8(0x0F); + + summs0 += GGML_FP16_TO_FP32(x0->m) * GGML_FP16_TO_FP32(y0->s); + summs1 += GGML_FP16_TO_FP32(x1->m) * GGML_FP16_TO_FP32(y1->s); + + // extract the 5th bit via lookup table ((b) << 4) + memcpy(&qh0, x0->qh, sizeof(qh0)); + memcpy(&qh1, x1->qh, sizeof(qh1)); + + tmp0[0] = table_b2b_0[(qh0 >> 0) & 0xFF]; + tmp0[1] = table_b2b_0[(qh0 >> 8) & 0xFF]; + tmp0[2] = table_b2b_0[(qh0 >> 16) & 0xFF]; + tmp0[3] = table_b2b_0[(qh0 >> 24) ]; + + tmp1[0] = table_b2b_0[(qh1 >> 0) & 0xFF]; + tmp1[1] = table_b2b_0[(qh1 >> 8) & 0xFF]; + tmp1[2] = table_b2b_0[(qh1 >> 16) & 0xFF]; + tmp1[3] = table_b2b_0[(qh1 >> 24) ]; + + const int8x16_t qhl0 = vld1q_s8((const int8_t *)(tmp0 + 0)); + const int8x16_t qhh0 = vld1q_s8((const int8_t *)(tmp0 + 2)); + const int8x16_t qhl1 = vld1q_s8((const int8_t *)(tmp1 + 0)); + const int8x16_t qhh1 = vld1q_s8((const int8_t *)(tmp1 + 2)); + + const uint8x16_t v0_0 = vld1q_u8(x0->qs); + const uint8x16_t v0_1 = vld1q_u8(x1->qs); + + // 4-bit -> 8-bit + const int8x16_t v0_0l = vreinterpretq_s8_u8(vandq_u8 (v0_0, m4b)); + const int8x16_t v0_0h = vreinterpretq_s8_u8(vshrq_n_u8(v0_0, 4)); + const int8x16_t v0_1l = vreinterpretq_s8_u8(vandq_u8 (v0_1, m4b)); + const int8x16_t v0_1h = vreinterpretq_s8_u8(vshrq_n_u8(v0_1, 4)); + + // add high bit + const int8x16_t v0_0lf = vorrq_s8(v0_0l, qhl0); + const int8x16_t v0_0hf = vorrq_s8(v0_0h, qhh0); + const int8x16_t v0_1lf = vorrq_s8(v0_1l, qhl1); + const int8x16_t v0_1hf = vorrq_s8(v0_1h, qhh1); + + // load y + const int8x16_t v1_0l = vld1q_s8(y0->qs); + const int8x16_t v1_0h = vld1q_s8(y0->qs + 16); + const int8x16_t v1_1l = vld1q_s8(y1->qs); + const int8x16_t v1_1h = vld1q_s8(y1->qs + 16); + + sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32( + ggml_vdotq_s32(vdupq_n_s32(0), v0_0lf, v1_0l), + ggml_vdotq_s32(vdupq_n_s32(0), v0_0hf, v1_0h))), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); + sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32( + ggml_vdotq_s32(vdupq_n_s32(0), v0_1lf, v1_1l), + ggml_vdotq_s32(vdupq_n_s32(0), v0_1hf, v1_1h))), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); + } + + sumf = vaddvq_f32(sumv0) + vaddvq_f32(sumv1) + summs0 + summs1; +#elif defined(__wasm_simd128__) + v128_t sumv = wasm_f32x4_splat(0.0f); + + float summs = 0.0f; + + uint32_t qh; + uint64_t tmp[4]; + + // TODO: check if unrolling this is better + for (; ib < nb; ++ib) { + const block_q5_1 * restrict x0 = &x[ib]; + const block_q8_1 * restrict y0 = &y[ib]; + + summs += GGML_FP16_TO_FP32(x0->m) * GGML_FP16_TO_FP32(y0->s); + + const v128_t m4b = wasm_i8x16_splat(0x0F); + + // extract the 5th bit + memcpy(&qh, x0->qh, sizeof(qh)); + + tmp[0] = table_b2b_0[(qh >> 0) & 0xFF]; + tmp[1] = table_b2b_0[(qh >> 8) & 0xFF]; + tmp[2] = table_b2b_0[(qh >> 16) & 0xFF]; + tmp[3] = table_b2b_0[(qh >> 24) ]; + + const v128_t qhl = wasm_v128_load(tmp + 0); + const v128_t qhh = wasm_v128_load(tmp + 2); + + const v128_t v0 = wasm_v128_load(x0->qs); + + // 4-bit -> 8-bit + const v128_t v0l = wasm_v128_and (v0, m4b); + const v128_t v0h = wasm_u8x16_shr(v0, 4); + + // add high bit + const v128_t v0lf = wasm_v128_or(v0l, qhl); + const v128_t v0hf = wasm_v128_or(v0h, qhh); + + // load y + const v128_t v1l = wasm_v128_load(y0->qs); + const v128_t v1h = wasm_v128_load(y0->qs + 16); + + // int8x16 -> int16x8 + const v128_t v0lfl = wasm_i16x8_extend_low_i8x16 (v0lf); + const v128_t v0lfh = wasm_i16x8_extend_high_i8x16(v0lf); + const v128_t v0hfl = wasm_i16x8_extend_low_i8x16 (v0hf); + const v128_t v0hfh = wasm_i16x8_extend_high_i8x16(v0hf); + + const v128_t v1ll = wasm_i16x8_extend_low_i8x16 (v1l); + const v128_t v1lh = wasm_i16x8_extend_high_i8x16(v1l); + const v128_t v1hl = wasm_i16x8_extend_low_i8x16 (v1h); + const v128_t v1hh = wasm_i16x8_extend_high_i8x16(v1h); + + // dot product + sumv = wasm_f32x4_add(sumv, + wasm_f32x4_mul(wasm_f32x4_convert_i32x4(wasm_i32x4_add( + wasm_i32x4_add(wasm_i32x4_dot_i16x8(v0lfl, v1ll), + wasm_i32x4_dot_i16x8(v0lfh, v1lh)), + wasm_i32x4_add(wasm_i32x4_dot_i16x8(v0hfl, v1hl), + wasm_i32x4_dot_i16x8(v0hfh, v1hh)))), + wasm_f32x4_splat(GGML_FP16_TO_FP32(x0->d) * GGML_FP16_TO_FP32(y0->d)))); + } + + sumf = wasm_f32x4_extract_lane(sumv, 0) + wasm_f32x4_extract_lane(sumv, 1) + + wasm_f32x4_extract_lane(sumv, 2) + wasm_f32x4_extract_lane(sumv, 3) + summs; +#elif defined(__AVX2__) + // Initialize accumulator with zeros + __m256 acc = _mm256_setzero_ps(); + + float summs = 0.0f; + + // Main loop + for (; ib < nb; ++ib) { + const __m256 dx = _mm256_set1_ps(GGML_FP16_TO_FP32(x[ib].d)); + + summs += GGML_FP16_TO_FP32(x[ib].m) * GGML_FP16_TO_FP32(y[ib].s); + + __m256i qx = bytes_from_nibbles_32(x[ib].qs); + __m256i bxhi = bytes_from_bits_32(x[ib].qh); + bxhi = _mm256_and_si256(bxhi, _mm256_set1_epi8(0x10)); + qx = _mm256_or_si256(qx, bxhi); + + const __m256 dy = _mm256_set1_ps(GGML_FP16_TO_FP32(y[ib].d)); + const __m256i qy = _mm256_loadu_si256((const __m256i *)y[ib].qs); + + const __m256 q = mul_sum_us8_pairs_float(qx, qy); + + acc = _mm256_fmadd_ps(q, _mm256_mul_ps(dx, dy), acc); + } + + sumf = hsum_float_8(acc) + summs; +#elif defined(__AVX__) + // Initialize accumulator with zeros + __m256 acc = _mm256_setzero_ps(); + __m128i mask = _mm_set1_epi8(0x10); + + float summs = 0.0f; + + // Main loop + for (; ib < nb; ++ib) { + const __m256 dx = _mm256_set1_ps(GGML_FP16_TO_FP32(x[ib].d)); + + summs += GGML_FP16_TO_FP32(x[ib].m) * GGML_FP16_TO_FP32(y[ib].s); + + __m256i bx_0 = bytes_from_nibbles_32(x[ib].qs); + const __m256i bxhi = bytes_from_bits_32(x[ib].qh); + __m128i bxhil = _mm256_castsi256_si128(bxhi); + __m128i bxhih = _mm256_extractf128_si256(bxhi, 1); + bxhil = _mm_and_si128(bxhil, mask); + bxhih = _mm_and_si128(bxhih, mask); + __m128i bxl = _mm256_castsi256_si128(bx_0); + __m128i bxh = _mm256_extractf128_si256(bx_0, 1); + bxl = _mm_or_si128(bxl, bxhil); + bxh = _mm_or_si128(bxh, bxhih); + bx_0 = MM256_SET_M128I(bxh, bxl); + + const __m256 dy = _mm256_set1_ps(GGML_FP16_TO_FP32(y[ib].d)); + const __m256i by_0 = _mm256_loadu_si256((const __m256i *)y[ib].qs); + + const __m256 q = mul_sum_us8_pairs_float(bx_0, by_0); + + acc = _mm256_add_ps(_mm256_mul_ps(q, _mm256_mul_ps(dx, dy)), acc); + } + + sumf = hsum_float_8(acc) + summs; +#elif defined(__riscv_v_intrinsic) + uint32_t qh; + + size_t vl = __riscv_vsetvl_e8m1(qk/2); + + // temporary registers for shift operations + vuint32m2_t vt_1 = __riscv_vid_v_u32m2(vl); + vuint32m2_t vt_2 = __riscv_vadd_vx_u32m2(vt_1, 12, vl); + + for (; ib < nb; ++ib) { + memcpy(&qh, x[ib].qh, sizeof(uint32_t)); + + // load qh + vuint32m2_t vqh = __riscv_vmv_v_x_u32m2(qh, vl); + + // ((qh >> (j + 0)) << 4) & 0x10; + vuint32m2_t xhr_0 = __riscv_vsrl_vv_u32m2(vqh, vt_1, vl); + vuint32m2_t xhl_0 = __riscv_vsll_vx_u32m2(xhr_0, 4, vl); + vuint32m2_t xha_0 = __riscv_vand_vx_u32m2(xhl_0, 0x10, vl); + + // ((qh >> (j + 12)) ) & 0x10; + vuint32m2_t xhr_1 = __riscv_vsrl_vv_u32m2(vqh, vt_2, vl); + vuint32m2_t xha_1 = __riscv_vand_vx_u32m2(xhr_1, 0x10, vl); + + // narrowing + vuint16m1_t xhc_0 = __riscv_vncvt_x_x_w_u16m1(xha_0, vl); + vuint8mf2_t xh_0 = __riscv_vncvt_x_x_w_u8mf2(xhc_0, vl); + + vuint16m1_t xhc_1 = __riscv_vncvt_x_x_w_u16m1(xha_1, vl); + vuint8mf2_t xh_1 = __riscv_vncvt_x_x_w_u8mf2(xhc_1, vl); + + // load + vuint8mf2_t tx = __riscv_vle8_v_u8mf2(x[ib].qs, vl); + + vint8mf2_t y0 = __riscv_vle8_v_i8mf2(y[ib].qs, vl); + vint8mf2_t y1 = __riscv_vle8_v_i8mf2(y[ib].qs+16, vl); + + vuint8mf2_t x_at = __riscv_vand_vx_u8mf2(tx, 0x0F, vl); + vuint8mf2_t x_lt = __riscv_vsrl_vx_u8mf2(tx, 0x04, vl); + + vuint8mf2_t x_a = __riscv_vor_vv_u8mf2(x_at, xh_0, vl); + vuint8mf2_t x_l = __riscv_vor_vv_u8mf2(x_lt, xh_1, vl); + + vint8mf2_t v0 = __riscv_vreinterpret_v_u8mf2_i8mf2(x_a); + vint8mf2_t v1 = __riscv_vreinterpret_v_u8mf2_i8mf2(x_l); + + vint16m1_t vec_mul1 = __riscv_vwmul_vv_i16m1(v0, y0, vl); + vint16m1_t vec_mul2 = __riscv_vwmul_vv_i16m1(v1, y1, vl); + + vint32m1_t vec_zero = __riscv_vmv_v_x_i32m1(0, vl); + + vint32m1_t vs1 = __riscv_vwredsum_vs_i16m1_i32m1(vec_mul1, vec_zero, vl); + vint32m1_t vs2 = __riscv_vwredsum_vs_i16m1_i32m1(vec_mul2, vs1, vl); + + int sumi = __riscv_vmv_x_s_i32m1_i32(vs2); + + sumf += (GGML_FP16_TO_FP32(x[ib].d)*GGML_FP16_TO_FP32(y[ib].d))*sumi + GGML_FP16_TO_FP32(x[ib].m)*GGML_FP16_TO_FP32(y[ib].s); + } + +#elif defined(__POWER9_VECTOR__) + const vector signed char lowMask = vec_splats((signed char)0xF); + const vector signed int v0 = vec_splats((int32_t)0); + const vector unsigned char v4 = vec_splats((unsigned char)0x4); + + vector float vsumf0 = vec_splats(0.0f); + +#pragma GCC unroll 4 + for (; ib < nb; ++ib) { + __builtin_prefetch(x[ib].qs, 0, 1); + __builtin_prefetch(y[ib].qs, 0, 1); + + vector float vxd = vec_splats(GGML_FP16_TO_FP32(x[ib].d)); + vector float vyd = vec_splats(GGML_FP16_TO_FP32(y[ib].d)); + vector float vd = vec_mul(vxd, vyd); + + vector float vxmin = vec_splats(GGML_FP16_TO_FP32(x[ib].m)); + vector float vys = {GGML_FP16_TO_FP32(y[ib].s), 0.f, 0.f, 0.f}; + vsumf0 = vec_madd(vxmin, vys, vsumf0); + + vector unsigned long long aux64x2_0 = {(uint64_t)(table_b2b_0[x[ib].qh[0]]), (uint64_t)(table_b2b_0[x[ib].qh[1]])}; + vector unsigned long long aux64x2_1 = {(uint64_t)(table_b2b_0[x[ib].qh[2]]), (uint64_t)(table_b2b_0[x[ib].qh[3]])}; + + vector signed char qh0 = (vector signed char)aux64x2_0; + vector signed char qh1 = (vector signed char)aux64x2_1; + + vector signed char qxs = (vector signed char)vec_xl( 0, x[ib].qs); + + vector unsigned char q5x0 = (vector unsigned char)vec_or(vec_and(qxs, lowMask), qh0); + vector unsigned char q5x1 = (vector unsigned char)vec_or(vec_sr(qxs, v4), qh1); + + vector signed char q8y0 = vec_xl( 0, y[ib].qs); + vector signed char q8y1 = vec_xl( 16, y[ib].qs); + + vector signed int vsumi0 = v0; + + vsumi0 = vec_msum(q8y0, q5x0, vsumi0); + vsumi0 = vec_msum(q8y1, q5x1, vsumi0); + + vsumf0 = vec_madd(vec_ctf(vsumi0, 0), vd, vsumf0); + } + + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 4)); + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 8)); + + sumf = vec_extract(vsumf0, 0); + +#elif defined(__loongarch_asx) + // Initialize accumulator with zeros + __m256 acc = (__m256)__lasx_xvldi(0); + + float summs = 0.0f; + + // Main loop + for (; ib < nb; ++ib) { + const __m256 dx = __lasx_xvreplfr2vr_s(GGML_FP16_TO_FP32(x[ib].d)); + + summs += GGML_FP16_TO_FP32(x[ib].m) * GGML_FP16_TO_FP32(y[ib].s); + + __m256i qx = bytes_from_nibbles_32(x[ib].qs); + __m256i bxhi = bytes_from_bits_32(x[ib].qh); + bxhi = __lasx_xvand_v(bxhi, __lasx_xvreplgr2vr_b(0x10)); + qx = __lasx_xvor_v(qx, bxhi); + + const __m256 dy = __lasx_xvreplfr2vr_s(GGML_FP16_TO_FP32(y[ib].d)); + const __m256i qy = __lasx_xvld((const __m256i *)y[ib].qs, 0); + + const __m256 q = mul_sum_us8_pairs_float(qx, qy); + + acc = __lasx_xvfmadd_s(q, __lasx_xvfmul_s(dx, dy), acc); + } + + sumf = hsum_float_8(acc) + summs; +#endif + for (; ib < nb; ++ib) { + uint32_t qh; + memcpy(&qh, x[ib].qh, sizeof(qh)); + + int sumi0 = 0; + int sumi1 = 0; + + for (int j = 0; j < qk/2; ++j) { + const uint8_t xh_0 = ((qh >> (j + 0)) << 4) & 0x10; + const uint8_t xh_1 = ((qh >> (j + 12)) ) & 0x10; + + const int32_t x0 = (x[ib].qs[j] & 0xF) | xh_0; + const int32_t x1 = (x[ib].qs[j] >> 4) | xh_1; + + sumi0 += (x0 * y[ib].qs[j]); + sumi1 += (x1 * y[ib].qs[j + qk/2]); + } + + int sumi = sumi0 + sumi1; + sumf += (GGML_FP16_TO_FP32(x[ib].d)*GGML_FP16_TO_FP32(y[ib].d))*sumi + GGML_FP16_TO_FP32(x[ib].m)*GGML_FP16_TO_FP32(y[ib].s); + } + + *s = sumf; +} + +void ggml_vec_dot_q8_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { + const int qk = QK8_0; + const int nb = n / qk; + + assert(n % qk == 0); +#if defined(__ARM_FEATURE_MATMUL_INT8) + assert((nrc == 2) || (nrc == 1)); +#else + assert(nrc == 1); +#endif + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + + const block_q8_0 * restrict x = vx; + const block_q8_0 * restrict y = vy; + +#if defined(__ARM_FEATURE_MATMUL_INT8) + if (nrc == 2) { + const block_q8_0 * restrict vx0 = vx; + const block_q8_0 * restrict vx1 = (const block_q8_0 *) ((const uint8_t*)vx + bx); + const block_q8_0 * restrict vy0 = vy; + const block_q8_0 * restrict vy1 = (const block_q8_0 *) ((const uint8_t*)vy + by); + + float32x4_t sumv0 = vdupq_n_f32(0.0f); + + for (int i = 0; i < nb; i++) { + const block_q8_0 * restrict b_x0 = &vx0[i]; + const block_q8_0 * restrict b_y0 = &vy0[i]; + + const block_q8_0 * restrict b_x1 = &vx1[i]; + const block_q8_0 * restrict b_y1 = &vy1[i]; + + const int8x16_t x0_l = vld1q_s8(b_x0->qs); + const int8x16_t x0_h = vld1q_s8(b_x0->qs + 16); + const int8x16_t x1_l = vld1q_s8(b_x1->qs); + const int8x16_t x1_h = vld1q_s8(b_x1->qs + 16); + + // load y + const int8x16_t y0_l = vld1q_s8(b_y0->qs); + const int8x16_t y0_h = vld1q_s8(b_y0->qs + 16); + const int8x16_t y1_l = vld1q_s8(b_y1->qs); + const int8x16_t y1_h = vld1q_s8(b_y1->qs + 16); + + float32_t _scale[4] = {GGML_FP16_TO_FP32(b_x0->d)*GGML_FP16_TO_FP32(b_y0->d), + GGML_FP16_TO_FP32(b_x0->d)*GGML_FP16_TO_FP32(b_y1->d), + GGML_FP16_TO_FP32(b_x1->d)*GGML_FP16_TO_FP32(b_y0->d), + GGML_FP16_TO_FP32(b_x1->d)*GGML_FP16_TO_FP32(b_y1->d)}; + float32x4_t scale = vld1q_f32(_scale); + + int8x16_t l0 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(x0_l), vreinterpretq_s64_s8(x1_l))); + int8x16_t l1 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(x0_l), vreinterpretq_s64_s8(x1_l))); + + int8x16_t l2 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(x0_h), vreinterpretq_s64_s8(x1_h))); + int8x16_t l3 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(x0_h), vreinterpretq_s64_s8(x1_h))); + + int8x16_t r0 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(y0_l), vreinterpretq_s64_s8(y1_l))); + int8x16_t r1 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(y0_l), vreinterpretq_s64_s8(y1_l))); + + int8x16_t r2 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(y0_h), vreinterpretq_s64_s8(y1_h))); + int8x16_t r3 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(y0_h), vreinterpretq_s64_s8(y1_h))); + + sumv0 = vmlaq_f32(sumv0,(vcvtq_f32_s32(vmmlaq_s32((vmmlaq_s32((vmmlaq_s32((vmmlaq_s32(vdupq_n_s32(0), l0, r0)), + l1, r1)), l2, r2)), l3, r3))), scale); + } + float32x4_t sumv1 = vextq_f32(sumv0, sumv0, 2); + float32x4_t sumv2 = vzip1q_f32(sumv0, sumv1); + + vst1_f32(s, vget_low_f32(sumv2)); + vst1_f32(s + bs, vget_high_f32(sumv2)); + return; + } +#endif + + int ib = 0; + float sumf = 0; + +#if defined(__ARM_FEATURE_SVE) + svfloat32_t sumv0 = svdup_n_f32(0.0f); + svfloat32_t sumv1 = svdup_n_f32(0.0f); + + const int vector_length = ggml_cpu_get_sve_cnt()*8; + + //VLA Implemenation for SVE + switch (vector_length) { + case 128: + { + // predicate for activating lanes for 16 Int8 elements + const svbool_t ph16 = svptrue_pat_b8 (SV_VL16); + const svbool_t pl16 = svptrue_pat_b32(SV_VL4); + + for (; ib + 1 < nb; ib += 2) { + const block_q8_0 * restrict x0 = &x[ib + 0]; + const block_q8_0 * restrict x1 = &x[ib + 1]; + const block_q8_0 * restrict y0 = &y[ib + 0]; + const block_q8_0 * restrict y1 = &y[ib + 1]; + + // load x + const svint8_t qx0_0 = svld1_s8(ph16, x0->qs); + const svint8_t qx0_1 = svld1_s8(ph16, x0->qs+16); + const svint8_t qx1_0 = svld1_s8(ph16, x1->qs); + const svint8_t qx1_1 = svld1_s8(ph16, x1->qs+16); + + // load y + const svint8_t qy0_0 = svld1_s8(ph16, y0->qs); + const svint8_t qy0_1 = svld1_s8(ph16, y0->qs+16); + const svint8_t qy1_0 = svld1_s8(ph16, y1->qs); + const svint8_t qy1_1 = svld1_s8(ph16, y1->qs+16); + + sumv0 = svmla_n_f32_x(pl16, sumv0, svcvt_f32_s32_x(pl16, svadd_x(pl16, + svdot_s32(svdup_n_s32(0), qx0_0, qy0_0), + svdot_s32(svdup_n_s32(0), qx0_1, qy0_1))), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); + sumv1 = svmla_n_f32_x(pl16, sumv1, svcvt_f32_s32_x(pl16, svadd_x(pl16, + svdot_s32(svdup_n_s32(0), qx1_0, qy1_0), + svdot_s32(svdup_n_s32(0), qx1_1, qy1_1))), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); + } + + sumf = svaddv_f32(pl16, svadd_f32_x(pl16, sumv0, sumv1)); + } break; + case 256: + { + //printf("sve256"); + for (; ib + 1 < nb; ib += 2) { + const block_q8_0 * restrict x0 = &x[ib + 0]; + const block_q8_0 * restrict x1 = &x[ib + 1]; + const block_q8_0 * restrict y0 = &y[ib + 0]; + const block_q8_0 * restrict y1 = &y[ib + 1]; + + // load x + const svint8_t qx0 = svld1_s8(svptrue_b8(), x0->qs); + const svint8_t qx1 = svld1_s8(svptrue_b8(), x1->qs); + + // load y + const svint8_t qy0 = svld1_s8(svptrue_b8(), y0->qs); + const svint8_t qy1 = svld1_s8(svptrue_b8(), y1->qs); + + sumv0 = svmla_n_f32_x(svptrue_b32(), sumv0, svcvt_f32_s32_x(svptrue_b32(), + svdot_s32(svdup_n_s32(0), qx0, qy0)), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); + sumv1 = svmla_n_f32_x(svptrue_b32(), sumv1, svcvt_f32_s32_x(svptrue_b32(), + svdot_s32(svdup_n_s32(0), qx1, qy1)), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); + } + + sumf = svaddv_f32(svptrue_b32(), svadd_f32_x(svptrue_b32(), sumv0, sumv1)); + } break; + case 512: + { + // predicate for activating high 256 bit + const svbool_t ph32 = svptrue_pat_b8(SV_VL32); + // predicate for activating low 256 bit + const svbool_t pl32 = svnot_b_z(svptrue_b8(), ph32); + + // predicate for activating high lanes for 8 float32 elements + const svbool_t ph8 = svptrue_pat_b32(SV_VL8); + // predicate for activating low lanes for 8 float32 elements + const svbool_t pl8 = svnot_b_z(svptrue_b32(), ph8); + + svfloat32_t sumv00 = svdup_n_f32(0.0f); + + for (; ib + 1 < nb; ib += 2) { + const block_q8_0 * restrict x0 = &x[ib + 0]; + const block_q8_0 * restrict x1 = &x[ib + 1]; + const block_q8_0 * restrict y0 = &y[ib + 0]; + const block_q8_0 * restrict y1 = &y[ib + 1]; + + //load 32 int8_t in first half of vector and put another 32 int8_t in second vector lower bits + // and add them to make one 64 element vector + // load x + const svint8_t qx_32 = svld1_s8(ph32, x0->qs); + svint8_t qx_64 = svld1_s8(pl32, x0->qs + 2); + + qx_64 = svadd_s8_x(svptrue_b8(), qx_32, qx_64); + + // load y + const svint8_t qy_32 = svld1_s8(ph32, y0->qs); + svint8_t qy_64 = svld1_s8(pl32, y0->qs + 2); + + qy_64 = svadd_s8_x(svptrue_b8(), qy_32, qy_64); + + // scale creation + const float32_t deq1 = GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d); + const float32_t deq2 = GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d); + + // duplicate deq1 in first half of vector and deq2 in second half of vector + const svfloat32_t temp = svdup_f32_m(svdup_f32_z(ph8, deq1), pl8, deq2); + + const svfloat32_t sumvt = svcvt_f32_s32_x(svptrue_b32(), svdot_s32(svdup_n_s32(0), qx_64, qy_64)); + + sumv00 = svmla_f32_m(svptrue_b32(), sumv00, sumvt, temp); + } + + sumf = svaddv_f32(svptrue_b32(), sumv00); + break; + } + default: + assert(false && "Unsupported vector length"); + break; + } +#elif defined(__ARM_NEON) + float32x4_t sumv0 = vdupq_n_f32(0.0f); + float32x4_t sumv1 = vdupq_n_f32(0.0f); + + for (; ib + 1 < nb; ib += 2) { + const block_q8_0 * restrict x0 = &x[ib + 0]; + const block_q8_0 * restrict x1 = &x[ib + 1]; + const block_q8_0 * restrict y0 = &y[ib + 0]; + const block_q8_0 * restrict y1 = &y[ib + 1]; + + const int8x16_t x0_0 = vld1q_s8(x0->qs); + const int8x16_t x0_1 = vld1q_s8(x0->qs + 16); + const int8x16_t x1_0 = vld1q_s8(x1->qs); + const int8x16_t x1_1 = vld1q_s8(x1->qs + 16); + + // load y + const int8x16_t y0_0 = vld1q_s8(y0->qs); + const int8x16_t y0_1 = vld1q_s8(y0->qs + 16); + const int8x16_t y1_0 = vld1q_s8(y1->qs); + const int8x16_t y1_1 = vld1q_s8(y1->qs + 16); + + sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32( + ggml_vdotq_s32(vdupq_n_s32(0), x0_0, y0_0), + ggml_vdotq_s32(vdupq_n_s32(0), x0_1, y0_1))), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); + + sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32( + ggml_vdotq_s32(vdupq_n_s32(0), x1_0, y1_0), + ggml_vdotq_s32(vdupq_n_s32(0), x1_1, y1_1))), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); + } + + sumf = vaddvq_f32(sumv0) + vaddvq_f32(sumv1); +#elif defined(__AVX2__) || defined(__AVX__) + // Initialize accumulator with zeros + __m256 acc = _mm256_setzero_ps(); + + // Main loop + for (; ib < nb; ++ib) { + // Compute combined scale for the block + const __m256 d = _mm256_set1_ps(GGML_FP16_TO_FP32(x[ib].d) * GGML_FP16_TO_FP32(y[ib].d)); + __m256i qx = _mm256_loadu_si256((const __m256i *)x[ib].qs); + __m256i qy = _mm256_loadu_si256((const __m256i *)y[ib].qs); + + const __m256 q = mul_sum_i8_pairs_float(qx, qy); + + // Multiply q with scale and accumulate +#if defined(__AVX2__) + acc = _mm256_fmadd_ps( d, q, acc ); +#else + acc = _mm256_add_ps( _mm256_mul_ps( d, q ), acc ); +#endif + } + + sumf = hsum_float_8(acc); +#elif defined(__riscv_v_intrinsic) + size_t vl = __riscv_vsetvl_e8m1(qk); + + for (; ib < nb; ++ib) { + // load elements + vint8m1_t bx_0 = __riscv_vle8_v_i8m1(x[ib].qs, vl); + vint8m1_t by_0 = __riscv_vle8_v_i8m1(y[ib].qs, vl); + + vint16m2_t vw_mul = __riscv_vwmul_vv_i16m2(bx_0, by_0, vl); + + vint32m1_t v_zero = __riscv_vmv_v_x_i32m1(0, vl); + vint32m1_t v_sum = __riscv_vwredsum_vs_i16m2_i32m1(vw_mul, v_zero, vl); + + int sumi = __riscv_vmv_x_s_i32m1_i32(v_sum); + + sumf += sumi*(GGML_FP16_TO_FP32(x[ib].d)*GGML_FP16_TO_FP32(y[ib].d)); + } +#elif defined(__POWER9_VECTOR__) + const vector signed int v0 = vec_splats((int32_t)0); + vector float vsumf0 = vec_splats(0.0f); + +#pragma GCC unroll 8 + for (; ib < nb; ++ib) { + __builtin_prefetch(x[ib].qs, 0, 1); + __builtin_prefetch(y[ib].qs, 0, 1); + + vector float vxd = vec_splats(GGML_FP16_TO_FP32(x[ib].d)); + vector float vyd = vec_splats(GGML_FP16_TO_FP32(y[ib].d)); + vector float vd = vec_mul(vxd, vyd); + + vector signed char q8x0 = vec_xl( 0, x[ib].qs); + vector signed char q8x1 = vec_xl(16, x[ib].qs); + vector signed char q8y0 = vec_xl( 0, y[ib].qs); + vector signed char q8y1 = vec_xl(16, y[ib].qs); + + vector signed short qv0 = vec_mule(q8x0, q8y0); + vector signed short qv1 = vec_mulo(q8x0, q8y0); + vector signed short qv2 = vec_mule(q8x1, q8y1); + vector signed short qv3 = vec_mulo(q8x1, q8y1); + + vector signed int vsumi0 = v0; + vector signed int vsumi1 = v0; + + vsumi0 = vec_sum4s(qv0, vsumi0); + vsumi1 = vec_sum4s(qv1, vsumi1); + vsumi0 = vec_sum4s(qv2, vsumi0); + vsumi1 = vec_sum4s(qv3, vsumi1); + + vsumi0 = vec_add(vsumi0, vsumi1); + + vsumf0 = vec_madd(vec_ctf(vsumi0, 0), vd, vsumf0); + } + + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 4)); + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 8)); + + sumf = vec_extract(vsumf0, 0); + +#elif defined(__loongarch_asx) + // Initialize accumulator with zeros + __m256 acc = (__m256)__lasx_xvldi(0); + + // Main loop + for (; ib < nb; ++ib) { + // Compute combined scale for the block + const __m256 d = __lasx_xvreplfr2vr_s(GGML_FP16_TO_FP32(x[ib].d) * GGML_FP16_TO_FP32(y[ib].d)); + __m256i qx = __lasx_xvld((const __m256i *)x[ib].qs, 0); + __m256i qy = __lasx_xvld((const __m256i *)y[ib].qs, 0); + + const __m256 q = mul_sum_i8_pairs_float(qx, qy); + + // Multiply q with scale and accumulate + acc = __lasx_xvfmadd_s( d, q, acc ); + } + + sumf = hsum_float_8(acc); +#endif + for (; ib < nb; ++ib) { + int sumi = 0; + + for (int j = 0; j < qk; j++) { + sumi += x[ib].qs[j]*y[ib].qs[j]; + } + + sumf += sumi*(GGML_FP16_TO_FP32(x[ib].d)*GGML_FP16_TO_FP32(y[ib].d)); + } + + *s = sumf; +} + +void ggml_vec_dot_tq1_0_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + + const block_tq1_0 * restrict x = vx; + const block_q8_K * restrict y = vy; + + const int nb = n / QK_K; + +#if defined(__ARM_NEON) + float sumf = 0.0f; + + uint8_t k_shift[16] = {1, 1, 1, 1, 3, 3, 3, 3, 9, 9, 9, 9, 27, 27, 27, 27}; + + const uint8x16_t shift = vld1q_u8(k_shift); + + for (int i = 0; i < nb; ++i) { +#if defined(__ARM_FEATURE_DOTPROD) + int32x4_t sumi0 = vdupq_n_s32(0); + int32x4_t sumi1 = vdupq_n_s32(0); +#else + int16x8_t sumi0 = vdupq_n_s16(0); + int16x8_t sumi1 = vdupq_n_s16(0); +#endif + + // first 32 bytes of 5 elements + { + uint8x16_t qx0 = vld1q_u8(x[i].qs + 0); + uint8x16_t qx1 = vld1q_u8(x[i].qs + 16); + uint8x16_t qx2 = vmulq_u8(qx0, vdupq_n_u8(3)); + uint8x16_t qx3 = vmulq_u8(qx1, vdupq_n_u8(3)); + uint8x16_t qx4 = vmulq_u8(qx0, vdupq_n_u8(9)); + uint8x16_t qx5 = vmulq_u8(qx1, vdupq_n_u8(9)); + uint8x16_t qx6 = vmulq_u8(qx0, vdupq_n_u8(27)); + uint8x16_t qx7 = vmulq_u8(qx1, vdupq_n_u8(27)); + uint8x16_t qx8 = vmulq_u8(qx0, vdupq_n_u8(81)); + uint8x16_t qx9 = vmulq_u8(qx1, vdupq_n_u8(81)); + + // multiply by 3 and keep the 2 bits above 8 bits + int8x16_t sqx0 = vreinterpretq_s8_u8(vshrq_n_u8(vhaddq_u8(qx0, vshrq_n_u8(qx0, 1)), 6)); + int8x16_t sqx1 = vreinterpretq_s8_u8(vshrq_n_u8(vhaddq_u8(qx1, vshrq_n_u8(qx1, 1)), 6)); + int8x16_t sqx2 = vreinterpretq_s8_u8(vshrq_n_u8(vhaddq_u8(qx2, vshrq_n_u8(qx2, 1)), 6)); + int8x16_t sqx3 = vreinterpretq_s8_u8(vshrq_n_u8(vhaddq_u8(qx3, vshrq_n_u8(qx3, 1)), 6)); + int8x16_t sqx4 = vreinterpretq_s8_u8(vshrq_n_u8(vhaddq_u8(qx4, vshrq_n_u8(qx4, 1)), 6)); + int8x16_t sqx5 = vreinterpretq_s8_u8(vshrq_n_u8(vhaddq_u8(qx5, vshrq_n_u8(qx5, 1)), 6)); + int8x16_t sqx6 = vreinterpretq_s8_u8(vshrq_n_u8(vhaddq_u8(qx6, vshrq_n_u8(qx6, 1)), 6)); + int8x16_t sqx7 = vreinterpretq_s8_u8(vshrq_n_u8(vhaddq_u8(qx7, vshrq_n_u8(qx7, 1)), 6)); + int8x16_t sqx8 = vreinterpretq_s8_u8(vshrq_n_u8(vhaddq_u8(qx8, vshrq_n_u8(qx8, 1)), 6)); + int8x16_t sqx9 = vreinterpretq_s8_u8(vshrq_n_u8(vhaddq_u8(qx9, vshrq_n_u8(qx9, 1)), 6)); + + const int8x16_t qy0 = vld1q_s8(y[i].qs + 0); + const int8x16_t qy1 = vld1q_s8(y[i].qs + 16); + const int8x16_t qy2 = vld1q_s8(y[i].qs + 32); + const int8x16_t qy3 = vld1q_s8(y[i].qs + 48); + const int8x16_t qy4 = vld1q_s8(y[i].qs + 64); + const int8x16_t qy5 = vld1q_s8(y[i].qs + 80); + const int8x16_t qy6 = vld1q_s8(y[i].qs + 96); + const int8x16_t qy7 = vld1q_s8(y[i].qs + 112); + const int8x16_t qy8 = vld1q_s8(y[i].qs + 128); + const int8x16_t qy9 = vld1q_s8(y[i].qs + 144); + +#if defined(__ARM_FEATURE_DOTPROD) + sumi0 = vdotq_s32(sumi0, sqx0, qy0); + sumi1 = vdotq_s32(sumi1, sqx1, qy1); + sumi0 = vdotq_s32(sumi0, sqx2, qy2); + sumi1 = vdotq_s32(sumi1, sqx3, qy3); + sumi0 = vdotq_s32(sumi0, sqx4, qy4); + sumi1 = vdotq_s32(sumi1, sqx5, qy5); + sumi0 = vdotq_s32(sumi0, sqx6, qy6); + sumi1 = vdotq_s32(sumi1, sqx7, qy7); + sumi0 = vdotq_s32(sumi0, sqx8, qy8); + sumi1 = vdotq_s32(sumi1, sqx9, qy9); +#else + sumi0 = vmlal_s8(sumi0, vget_low_s8(sqx0), vget_low_s8(qy0)); + sumi1 = vmlal_s8(sumi1, vget_high_s8(sqx0), vget_high_s8(qy0)); + sumi0 = vmlal_s8(sumi0, vget_low_s8(sqx1), vget_low_s8(qy1)); + sumi1 = vmlal_s8(sumi1, vget_high_s8(sqx1), vget_high_s8(qy1)); + sumi0 = vmlal_s8(sumi0, vget_low_s8(sqx2), vget_low_s8(qy2)); + sumi1 = vmlal_s8(sumi1, vget_high_s8(sqx2), vget_high_s8(qy2)); + sumi0 = vmlal_s8(sumi0, vget_low_s8(sqx3), vget_low_s8(qy3)); + sumi1 = vmlal_s8(sumi1, vget_high_s8(sqx3), vget_high_s8(qy3)); + sumi0 = vmlal_s8(sumi0, vget_low_s8(sqx4), vget_low_s8(qy4)); + sumi1 = vmlal_s8(sumi1, vget_high_s8(sqx4), vget_high_s8(qy4)); + sumi0 = vmlal_s8(sumi0, vget_low_s8(sqx5), vget_low_s8(qy5)); + sumi1 = vmlal_s8(sumi1, vget_high_s8(sqx5), vget_high_s8(qy5)); + sumi0 = vmlal_s8(sumi0, vget_low_s8(sqx6), vget_low_s8(qy6)); + sumi1 = vmlal_s8(sumi1, vget_high_s8(sqx6), vget_high_s8(qy6)); + sumi0 = vmlal_s8(sumi0, vget_low_s8(sqx7), vget_low_s8(qy7)); + sumi1 = vmlal_s8(sumi1, vget_high_s8(sqx7), vget_high_s8(qy7)); + sumi0 = vmlal_s8(sumi0, vget_low_s8(sqx8), vget_low_s8(qy8)); + sumi1 = vmlal_s8(sumi1, vget_high_s8(sqx8), vget_high_s8(qy8)); + sumi0 = vmlal_s8(sumi0, vget_low_s8(sqx9), vget_low_s8(qy9)); + sumi1 = vmlal_s8(sumi1, vget_high_s8(sqx9), vget_high_s8(qy9)); +#endif + } + + // last 16 bytes of 5-element, along with the 4 bytes of 4 elements + { + uint8x16_t qx0 = vld1q_u8(x[i].qs + 32); + uint8x16_t qx1 = vmulq_u8(qx0, vdupq_n_u8(3)); + uint8x16_t qx2 = vmulq_u8(qx0, vdupq_n_u8(9)); + uint8x16_t qx3 = vmulq_u8(qx0, vdupq_n_u8(27)); + uint8x16_t qx4 = vmulq_u8(qx0, vdupq_n_u8(81)); + uint32_t qh; + memcpy(&qh, x[i].qh, sizeof(qh)); // potentially unaligned + uint8x16_t qx5 = vreinterpretq_u8_u32(vdupq_n_u32(qh)); + qx5 = vmulq_u8(qx5, shift); + + // multiply by 3 and keep the 2 bits above 8 bits + int8x16_t sqx0 = vreinterpretq_s8_u8(vshrq_n_u8(vhaddq_u8(qx0, vshrq_n_u8(qx0, 1)), 6)); + int8x16_t sqx1 = vreinterpretq_s8_u8(vshrq_n_u8(vhaddq_u8(qx1, vshrq_n_u8(qx1, 1)), 6)); + int8x16_t sqx2 = vreinterpretq_s8_u8(vshrq_n_u8(vhaddq_u8(qx2, vshrq_n_u8(qx2, 1)), 6)); + int8x16_t sqx3 = vreinterpretq_s8_u8(vshrq_n_u8(vhaddq_u8(qx3, vshrq_n_u8(qx3, 1)), 6)); + int8x16_t sqx4 = vreinterpretq_s8_u8(vshrq_n_u8(vhaddq_u8(qx4, vshrq_n_u8(qx4, 1)), 6)); + int8x16_t sqx5 = vreinterpretq_s8_u8(vshrq_n_u8(vhaddq_u8(qx5, vshrq_n_u8(qx5, 1)), 6)); + + const int8x16_t qy0 = vld1q_s8(y[i].qs + 160); + const int8x16_t qy1 = vld1q_s8(y[i].qs + 176); + const int8x16_t qy2 = vld1q_s8(y[i].qs + 192); + const int8x16_t qy3 = vld1q_s8(y[i].qs + 208); + const int8x16_t qy4 = vld1q_s8(y[i].qs + 224); + const int8x16_t qy5 = vld1q_s8(y[i].qs + 240); + +#if defined(__ARM_FEATURE_DOTPROD) + sumi0 = vdotq_s32(sumi0, sqx0, qy0); + sumi1 = vdotq_s32(sumi1, sqx1, qy1); + sumi0 = vdotq_s32(sumi0, sqx2, qy2); + sumi1 = vdotq_s32(sumi1, sqx3, qy3); + sumi0 = vdotq_s32(sumi0, sqx4, qy4); + sumi1 = vdotq_s32(sumi1, sqx5, qy5); +#else + sumi0 = vmlal_s8(sumi0, vget_low_s8(sqx0), vget_low_s8(qy0)); + sumi1 = vmlal_s8(sumi1, vget_high_s8(sqx0), vget_high_s8(qy0)); + sumi0 = vmlal_s8(sumi0, vget_low_s8(sqx1), vget_low_s8(qy1)); + sumi1 = vmlal_s8(sumi1, vget_high_s8(sqx1), vget_high_s8(qy1)); + sumi0 = vmlal_s8(sumi0, vget_low_s8(sqx2), vget_low_s8(qy2)); + sumi1 = vmlal_s8(sumi1, vget_high_s8(sqx2), vget_high_s8(qy2)); + sumi0 = vmlal_s8(sumi0, vget_low_s8(sqx3), vget_low_s8(qy3)); + sumi1 = vmlal_s8(sumi1, vget_high_s8(sqx3), vget_high_s8(qy3)); + sumi0 = vmlal_s8(sumi0, vget_low_s8(sqx4), vget_low_s8(qy4)); + sumi1 = vmlal_s8(sumi1, vget_high_s8(sqx4), vget_high_s8(qy4)); + sumi0 = vmlal_s8(sumi0, vget_low_s8(sqx5), vget_low_s8(qy5)); + sumi1 = vmlal_s8(sumi1, vget_high_s8(sqx5), vget_high_s8(qy5)); +#endif + } + + const int16x8_t ysum0 = vld1q_s16(y[i].bsums); + const int16x8_t ysum1 = vld1q_s16(y[i].bsums + 8); + + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + +#if defined(__ARM_FEATURE_DOTPROD) + sumi0 = vaddq_s32(sumi0, sumi1); + sumi0 = vsubq_s32(sumi0, vpaddlq_s16(vaddq_s16(ysum0, ysum1))); + + sumf += d * (float) vaddvq_s32(sumi0); +#else + sumi0 = vaddq_s16(sumi0, sumi1); + sumi0 = vsubq_s16(sumi0, vaddq_s16(ysum0, ysum1)); + + sumf += d * (float) vaddlvq_s16(sumi0); +#endif + } + + *s = sumf; + +#elif defined(__AVX2__) + __m256 sumf = _mm256_setzero_ps(); + + for (int i = 0; i < nb; ++i) { + // 16-bit sums + __m256i sumi0 = _mm256_setzero_si256(); + __m256i sumi1 = _mm256_setzero_si256(); + __m256i sumi2 = _mm256_setzero_si256(); + + // first 32 bytes of 5 elements + { + __m256i qx0 = _mm256_loadu_si256((const __m256i *) (x[i].qs)); + // 8-bit multiplies with shifts, masks and adds + __m256i qx1 = _mm256_add_epi8(qx0, _mm256_add_epi8(qx0, qx0)); // 1 * 3 + __m256i qx2 = _mm256_add_epi8(_mm256_and_si256(_mm256_slli_epi16(qx0, 3), _mm256_set1_epi8(-8)), qx0); // 1 * 9 + __m256i qx3 = _mm256_add_epi8(_mm256_and_si256(_mm256_slli_epi16(qx1, 3), _mm256_set1_epi8(-8)), qx1); // 3 * 9 + __m256i qx4 = _mm256_add_epi8(_mm256_and_si256(_mm256_slli_epi16(qx2, 3), _mm256_set1_epi8(-8)), qx2); // 9 * 9 + + // TODO: can _mm256_mulhi_epu16 be faster even if 16-bits? + + // Cancel the +1 from avg so that it behaves like a halving add + qx0 = _mm256_subs_epu8(qx0, _mm256_set1_epi8(1)); + qx1 = _mm256_subs_epu8(qx1, _mm256_set1_epi8(1)); + qx2 = _mm256_subs_epu8(qx2, _mm256_set1_epi8(1)); + qx3 = _mm256_subs_epu8(qx3, _mm256_set1_epi8(1)); + qx4 = _mm256_subs_epu8(qx4, _mm256_set1_epi8(1)); + // Multiply by 3 and get the top 2 bits + qx0 = _mm256_avg_epu8(qx0, _mm256_avg_epu8(qx0, _mm256_setzero_si256())); + qx1 = _mm256_avg_epu8(qx1, _mm256_avg_epu8(qx1, _mm256_setzero_si256())); + qx2 = _mm256_avg_epu8(qx2, _mm256_avg_epu8(qx2, _mm256_setzero_si256())); + qx3 = _mm256_avg_epu8(qx3, _mm256_avg_epu8(qx3, _mm256_setzero_si256())); + qx4 = _mm256_avg_epu8(qx4, _mm256_avg_epu8(qx4, _mm256_setzero_si256())); + qx0 = _mm256_and_si256(_mm256_srli_epi16(qx0, 6), _mm256_set1_epi8(3)); + qx1 = _mm256_and_si256(_mm256_srli_epi16(qx1, 6), _mm256_set1_epi8(3)); + qx2 = _mm256_and_si256(_mm256_srli_epi16(qx2, 6), _mm256_set1_epi8(3)); + qx3 = _mm256_and_si256(_mm256_srli_epi16(qx3, 6), _mm256_set1_epi8(3)); + qx4 = _mm256_and_si256(_mm256_srli_epi16(qx4, 6), _mm256_set1_epi8(3)); + + const __m256i qy0 = _mm256_loadu_si256((const __m256i *) (y[i].qs + 0)); + const __m256i qy1 = _mm256_loadu_si256((const __m256i *) (y[i].qs + 32)); + const __m256i qy2 = _mm256_loadu_si256((const __m256i *) (y[i].qs + 64)); + const __m256i qy3 = _mm256_loadu_si256((const __m256i *) (y[i].qs + 96)); + const __m256i qy4 = _mm256_loadu_si256((const __m256i *) (y[i].qs + 128)); + + qx0 = _mm256_maddubs_epi16(qx0, qy0); + qx1 = _mm256_maddubs_epi16(qx1, qy1); + qx2 = _mm256_maddubs_epi16(qx2, qy2); + qx3 = _mm256_maddubs_epi16(qx3, qy3); + qx4 = _mm256_maddubs_epi16(qx4, qy4); + + sumi0 = _mm256_add_epi16(sumi0, _mm256_add_epi16(qx0, qx1)); + sumi1 = _mm256_add_epi16(sumi1, _mm256_add_epi16(qx2, qx3)); + sumi2 = _mm256_add_epi16(sumi2, qx4); + } + + // last 16 bytes of 5-element, along with the 4 bytes of 4 elements + { + __m128i qx0 = _mm_loadu_si128((const __m128i *) (x[i].qs + 32)); + uint32_t qh; + memcpy(&qh, x[i].qh, sizeof(qh)); // potentially unaligned + __m256i qx5_l = _mm256_cvtepu8_epi16(_mm_set1_epi32(qh)); + __m128i qx1 = _mm_add_epi8(qx0, _mm_add_epi8(qx0, qx0)); // 1 * 3 + __m128i qx2 = _mm_add_epi8(_mm_and_si128(_mm_slli_epi16(qx0, 3), _mm_set1_epi8(-8)), qx0); // 1 * 9 + __m128i qx3 = _mm_add_epi8(_mm_and_si128(_mm_slli_epi16(qx1, 3), _mm_set1_epi8(-8)), qx1); // 3 * 9 + __m128i qx4 = _mm_add_epi8(_mm_and_si128(_mm_slli_epi16(qx2, 3), _mm_set1_epi8(-8)), qx2); // 9 * 9 + __m256i qx01 = MM256_SET_M128I(qx1, qx0); + __m256i qx23 = MM256_SET_M128I(qx3, qx2); + + // avx2 does not have 8-bit multiplies, so 16-bit it is. + qx5_l = _mm256_mullo_epi16(qx5_l, _mm256_set_epi16(27, 27, 27, 27, 9, 9, 9, 9, 3, 3, 3, 3, 1, 1, 1, 1)); + qx5_l = _mm256_and_si256(qx5_l, _mm256_set1_epi16(0xFF)); + __m128i qx5 = _mm_packus_epi16(_mm256_castsi256_si128(qx5_l), _mm256_extracti128_si256(qx5_l, 1)); + + __m256i qx45 = MM256_SET_M128I(qx5, qx4); + + // Cancel the +1 from avg so that it behaves like a halving add + qx01 = _mm256_subs_epu8(qx01, _mm256_set1_epi8(1)); + qx23 = _mm256_subs_epu8(qx23, _mm256_set1_epi8(1)); + qx45 = _mm256_subs_epu8(qx45, _mm256_set1_epi8(1)); + // Multiply by 3 and get the top 2 bits + qx01 = _mm256_avg_epu8(qx01, _mm256_avg_epu8(qx01, _mm256_setzero_si256())); + qx23 = _mm256_avg_epu8(qx23, _mm256_avg_epu8(qx23, _mm256_setzero_si256())); + qx45 = _mm256_avg_epu8(qx45, _mm256_avg_epu8(qx45, _mm256_setzero_si256())); + qx01 = _mm256_and_si256(_mm256_srli_epi16(qx01, 6), _mm256_set1_epi8(3)); + qx23 = _mm256_and_si256(_mm256_srli_epi16(qx23, 6), _mm256_set1_epi8(3)); + qx45 = _mm256_and_si256(_mm256_srli_epi16(qx45, 6), _mm256_set1_epi8(3)); + + const __m256i qy01 = _mm256_loadu_si256((const __m256i *) (y[i].qs + 160)); + const __m256i qy23 = _mm256_loadu_si256((const __m256i *) (y[i].qs + 192)); + const __m256i qy45 = _mm256_loadu_si256((const __m256i *) (y[i].qs + 224)); + + qx01 = _mm256_maddubs_epi16(qx01, qy01); + qx23 = _mm256_maddubs_epi16(qx23, qy23); + qx45 = _mm256_maddubs_epi16(qx45, qy45); + + sumi0 = _mm256_add_epi16(sumi0, qx01); + sumi1 = _mm256_add_epi16(sumi1, qx23); + sumi2 = _mm256_add_epi16(sumi2, qx45); + } + + const __m256i ysum = _mm256_loadu_si256((const __m256i *) y[i].bsums); + const __m256 d = _mm256_set1_ps(y[i].d * GGML_FP16_TO_FP32(x[i].d)); + + sumi0 = _mm256_sub_epi16(sumi0, ysum); + sumi0 = _mm256_add_epi16(sumi0, _mm256_add_epi16(sumi1, sumi2)); + sumi0 = _mm256_madd_epi16(sumi0, _mm256_set1_epi16(1)); + + sumf = _mm256_add_ps(_mm256_mul_ps(_mm256_cvtepi32_ps(sumi0), d), sumf); + } + + *s = hsum_float_8(sumf); + +#else + const uint8_t pow3[6] = {1, 3, 9, 27, 81, 243}; + + float sumf = 0.0f; + + for (int i = 0; i < nb; ++i) { + int sum = 0; + + for (size_t j = 0; j < sizeof(x->qs) - sizeof(x->qs) % 32; j += 32) { + for (size_t l = 0; l < 5; ++l) { + for (size_t m = 0; m < 32; ++m) { + uint8_t q = x[i].qs[j + m] * pow3[l]; + uint16_t xi = ((uint16_t) q * 3) >> 8; + sum += (xi - 1) * y[i].qs[j*5 + l*32 + m]; + } + } + } + for (size_t j = sizeof(x->qs) - sizeof(x->qs) % 32; j < sizeof(x->qs); j += 16) { + for (size_t l = 0; l < 5; ++l) { + for (size_t m = 0; m < 16; ++m) { + uint8_t q = x[i].qs[j + m] * pow3[l]; + uint16_t xi = ((uint16_t) q * 3) >> 8; + sum += (xi - 1) * y[i].qs[j*5 + l*16 + m]; + } + } + } + + for (size_t l = 0; l < 4; ++l) { + for (size_t j = 0; j < sizeof(x->qh); ++j) { + uint8_t q = x[i].qh[j] * pow3[l]; + uint16_t xi = ((uint16_t) q * 3) >> 8; + sum += (xi - 1) * y[i].qs[sizeof(x->qs)*5 + l*sizeof(x->qh) + j]; + } + } + + sumf += (float) sum * (GGML_FP16_TO_FP32(x[i].d) * y[i].d); + } + + *s = sumf; +#endif +} + +void ggml_vec_dot_tq2_0_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + + const block_tq2_0 * restrict x = vx; + const block_q8_K * restrict y = vy; + + const int nb = n / QK_K; + +#if defined(__ARM_NEON) + float sumf = 0.0f; + + const uint8x16_t m3 = vdupq_n_u8(3); + + for (int i = 0; i < nb; ++i) { +#if defined(__ARM_FEATURE_DOTPROD) + int32x4_t sumi0 = vdupq_n_s32(0); + int32x4_t sumi1 = vdupq_n_s32(0); +#else + int16x8_t sumi0 = vdupq_n_s16(0); + int16x8_t sumi1 = vdupq_n_s16(0); +#endif + + for (size_t j = 0; j < sizeof(x->qs); j += 32) { + uint8x16_t qx0 = vld1q_u8(x[i].qs + j); + uint8x16_t qx1 = vld1q_u8(x[i].qs + j + 16); + uint8x16_t qx2 = vshrq_n_u8(qx0, 2); + uint8x16_t qx3 = vshrq_n_u8(qx1, 2); + uint8x16_t qx4 = vshrq_n_u8(qx0, 4); + uint8x16_t qx5 = vshrq_n_u8(qx1, 4); + uint8x16_t qx6 = vshrq_n_u8(qx0, 6); + uint8x16_t qx7 = vshrq_n_u8(qx1, 6); + + int8x16_t sqx0 = vreinterpretq_s8_u8(vandq_u8(qx0, m3)); + int8x16_t sqx1 = vreinterpretq_s8_u8(vandq_u8(qx1, m3)); + int8x16_t sqx2 = vreinterpretq_s8_u8(vandq_u8(qx2, m3)); + int8x16_t sqx3 = vreinterpretq_s8_u8(vandq_u8(qx3, m3)); + int8x16_t sqx4 = vreinterpretq_s8_u8(vandq_u8(qx4, m3)); + int8x16_t sqx5 = vreinterpretq_s8_u8(vandq_u8(qx5, m3)); + int8x16_t sqx6 = vreinterpretq_s8_u8(vandq_u8(qx6, m3)); + int8x16_t sqx7 = vreinterpretq_s8_u8(vandq_u8(qx7, m3)); + + const int8x16_t qy0 = vld1q_s8(y[i].qs + j*4 + 0); + const int8x16_t qy1 = vld1q_s8(y[i].qs + j*4 + 16); + const int8x16_t qy2 = vld1q_s8(y[i].qs + j*4 + 32); + const int8x16_t qy3 = vld1q_s8(y[i].qs + j*4 + 48); + const int8x16_t qy4 = vld1q_s8(y[i].qs + j*4 + 64); + const int8x16_t qy5 = vld1q_s8(y[i].qs + j*4 + 80); + const int8x16_t qy6 = vld1q_s8(y[i].qs + j*4 + 96); + const int8x16_t qy7 = vld1q_s8(y[i].qs + j*4 + 112); + +#if defined(__ARM_FEATURE_DOTPROD) + sumi0 = vdotq_s32(sumi0, sqx0, qy0); + sumi1 = vdotq_s32(sumi1, sqx1, qy1); + sumi0 = vdotq_s32(sumi0, sqx2, qy2); + sumi1 = vdotq_s32(sumi1, sqx3, qy3); + sumi0 = vdotq_s32(sumi0, sqx4, qy4); + sumi1 = vdotq_s32(sumi1, sqx5, qy5); + sumi0 = vdotq_s32(sumi0, sqx6, qy6); + sumi1 = vdotq_s32(sumi1, sqx7, qy7); +#else + sumi0 = vmlal_s8(sumi0, vget_low_s8(sqx0), vget_low_s8(qy0)); + sumi1 = vmlal_s8(sumi1, vget_high_s8(sqx0), vget_high_s8(qy0)); + sumi0 = vmlal_s8(sumi0, vget_low_s8(sqx1), vget_low_s8(qy1)); + sumi1 = vmlal_s8(sumi1, vget_high_s8(sqx1), vget_high_s8(qy1)); + sumi0 = vmlal_s8(sumi0, vget_low_s8(sqx2), vget_low_s8(qy2)); + sumi1 = vmlal_s8(sumi1, vget_high_s8(sqx2), vget_high_s8(qy2)); + sumi0 = vmlal_s8(sumi0, vget_low_s8(sqx3), vget_low_s8(qy3)); + sumi1 = vmlal_s8(sumi1, vget_high_s8(sqx3), vget_high_s8(qy3)); + sumi0 = vmlal_s8(sumi0, vget_low_s8(sqx4), vget_low_s8(qy4)); + sumi1 = vmlal_s8(sumi1, vget_high_s8(sqx4), vget_high_s8(qy4)); + sumi0 = vmlal_s8(sumi0, vget_low_s8(sqx5), vget_low_s8(qy5)); + sumi1 = vmlal_s8(sumi1, vget_high_s8(sqx5), vget_high_s8(qy5)); + sumi0 = vmlal_s8(sumi0, vget_low_s8(sqx6), vget_low_s8(qy6)); + sumi1 = vmlal_s8(sumi1, vget_high_s8(sqx6), vget_high_s8(qy6)); + sumi0 = vmlal_s8(sumi0, vget_low_s8(sqx7), vget_low_s8(qy7)); + sumi1 = vmlal_s8(sumi1, vget_high_s8(sqx7), vget_high_s8(qy7)); +#endif + } + + const int16x8_t ysum0 = vld1q_s16(y[i].bsums); + const int16x8_t ysum1 = vld1q_s16(y[i].bsums + 8); + + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + +#if defined(__ARM_FEATURE_DOTPROD) + sumi0 = vaddq_s32(sumi0, sumi1); + sumi0 = vsubq_s32(sumi0, vpaddlq_s16(vaddq_s16(ysum0, ysum1))); + + sumf += d * (float) vaddvq_s32(sumi0); +#else + sumi0 = vaddq_s16(sumi0, sumi1); + sumi0 = vsubq_s16(sumi0, vaddq_s16(ysum0, ysum1)); + + sumf += d * (float) vaddlvq_s16(sumi0); +#endif + } + + *s = sumf; + +#elif defined(__AVX2__) + __m256 sumf = _mm256_setzero_ps(); + + for (int i = 0; i < nb; ++i) { + // 16-bit sums, because 256*127 still fits + __m256i sumi0 = _mm256_setzero_si256(); + __m256i sumi1 = _mm256_setzero_si256(); + + for (size_t j = 0; j < sizeof(x->qs); j += 32) { + __m256i qx0 = _mm256_loadu_si256((const __m256i *) (x[i].qs + j)); + __m256i qx1 = _mm256_srli_epi16(qx0, 2); + __m256i qx2 = _mm256_srli_epi16(qx0, 4); + __m256i qx3 = _mm256_srli_epi16(qx0, 6); + + // 0, 1, 2 (should not be 3) + qx0 = _mm256_and_si256(qx0, _mm256_set1_epi8(3)); + qx1 = _mm256_and_si256(qx1, _mm256_set1_epi8(3)); + qx2 = _mm256_and_si256(qx2, _mm256_set1_epi8(3)); + qx3 = _mm256_and_si256(qx3, _mm256_set1_epi8(3)); + + const __m256i qy0 = _mm256_loadu_si256((const __m256i *) (y[i].qs + j*4 + 0)); + const __m256i qy1 = _mm256_loadu_si256((const __m256i *) (y[i].qs + j*4 + 32)); + const __m256i qy2 = _mm256_loadu_si256((const __m256i *) (y[i].qs + j*4 + 64)); + const __m256i qy3 = _mm256_loadu_si256((const __m256i *) (y[i].qs + j*4 + 96)); + + qx0 = _mm256_maddubs_epi16(qx0, qy0); + qx1 = _mm256_maddubs_epi16(qx1, qy1); + qx2 = _mm256_maddubs_epi16(qx2, qy2); + qx3 = _mm256_maddubs_epi16(qx3, qy3); + + sumi0 = _mm256_add_epi16(sumi0, _mm256_add_epi16(qx0, qx1)); + sumi1 = _mm256_add_epi16(sumi1, _mm256_add_epi16(qx2, qx3)); + } + + const __m256i ysum = _mm256_loadu_si256((const __m256i *) y[i].bsums); + const __m256 d = _mm256_set1_ps(y[i].d * GGML_FP16_TO_FP32(x[i].d)); + + sumi0 = _mm256_add_epi16(sumi0, sumi1); + sumi0 = _mm256_sub_epi16(sumi0, ysum); + sumi0 = _mm256_madd_epi16(sumi0, _mm256_set1_epi16(1)); + + sumf = _mm256_add_ps(_mm256_mul_ps(_mm256_cvtepi32_ps(sumi0), d), sumf); + } + + *s = hsum_float_8(sumf); + +#else + float sumf = 0.0f; + + for (int i = 0; i < nb; ++i) { + int32_t sumi = 0; + + for (size_t j = 0; j < sizeof(x->qs); j += 32) { + for (size_t l = 0; l < 4; ++l) { + for (size_t k = 0; k < 32; ++k) { + sumi += y[i].qs[j*4 + l*32 + k] * (((x[i].qs[j + k] >> (l*2)) & 3) - 1); + } + } + } + + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + + sumf += (float) sumi * d; + } + + *s = sumf; +#endif +} + +void ggml_vec_dot_q2_K_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + + const block_q2_K * restrict x = vx; + const block_q8_K * restrict y = vy; + + const int nb = n / QK_K; + +#ifdef __ARM_NEON + const uint8x16_t m3 = vdupq_n_u8(0x3); + const uint8x16_t m4 = vdupq_n_u8(0xF); + + const int32x4_t vzero = vdupq_n_s32(0); + + ggml_int8x16x2_t q2bytes; + uint8_t aux[16]; + + float sum = 0; + + for (int i = 0; i < nb; ++i) { + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = -y[i].d * GGML_FP16_TO_FP32(x[i].dmin); + + const uint8_t * restrict q2 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + const uint8_t * restrict sc = x[i].scales; + + const uint8x16_t mins_and_scales = vld1q_u8(sc); + const uint8x16_t scales = vandq_u8(mins_and_scales, m4); + vst1q_u8(aux, scales); + + const uint8x16_t mins = vshrq_n_u8(mins_and_scales, 4); + const ggml_int16x8x2_t q8sums = ggml_vld1q_s16_x2(y[i].bsums); + const ggml_int16x8x2_t mins16 = {{vreinterpretq_s16_u16(vmovl_u8(vget_low_u8(mins))), vreinterpretq_s16_u16(vmovl_u8(vget_high_u8(mins)))}}; + const int32x4_t s0 = vaddq_s32(vmull_s16(vget_low_s16 (mins16.val[0]), vget_low_s16 (q8sums.val[0])), + vmull_s16(vget_high_s16(mins16.val[0]), vget_high_s16(q8sums.val[0]))); + const int32x4_t s1 = vaddq_s32(vmull_s16(vget_low_s16 (mins16.val[1]), vget_low_s16 (q8sums.val[1])), + vmull_s16(vget_high_s16(mins16.val[1]), vget_high_s16(q8sums.val[1]))); + sum += dmin * vaddvq_s32(vaddq_s32(s0, s1)); + + int isum = 0; + int is = 0; + +// We use this macro instead of a function call because for some reason +// the code runs 2-3% slower, even if the function is declared inline +#define MULTIPLY_ACCUM_WITH_SCALE(index)\ + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q2bytes.val[0], q8bytes.val[0])) * aux[is+(index)];\ + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q2bytes.val[1], q8bytes.val[1])) * aux[is+1+(index)]; + +#define SHIFT_MULTIPLY_ACCUM_WITH_SCALE(shift, index)\ + q8bytes = ggml_vld1q_s8_x2(q8); q8 += 32;\ + q2bytes.val[0] = vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q2bits.val[0], (shift)), m3));\ + q2bytes.val[1] = vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q2bits.val[1], (shift)), m3));\ + MULTIPLY_ACCUM_WITH_SCALE((index)); + + for (int j = 0; j < QK_K/128; ++j) { + const ggml_uint8x16x2_t q2bits = ggml_vld1q_u8_x2(q2); q2 += 32; + + ggml_int8x16x2_t q8bytes = ggml_vld1q_s8_x2(q8); q8 += 32; + q2bytes.val[0] = vreinterpretq_s8_u8(vandq_u8(q2bits.val[0], m3)); + q2bytes.val[1] = vreinterpretq_s8_u8(vandq_u8(q2bits.val[1], m3)); + + MULTIPLY_ACCUM_WITH_SCALE(0); + + SHIFT_MULTIPLY_ACCUM_WITH_SCALE(2, 2); + SHIFT_MULTIPLY_ACCUM_WITH_SCALE(4, 4); + SHIFT_MULTIPLY_ACCUM_WITH_SCALE(6, 6); + + is += 8; + } + + sum += d * isum; + } + + *s = sum; + +#elif defined __AVX2__ + + const __m256i m3 = _mm256_set1_epi8(3); + const __m128i m4 = _mm_set1_epi8(0xF); + + __m256 acc = _mm256_setzero_ps(); + + for (int i = 0; i < nb; ++i) { + + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = -y[i].d * GGML_FP16_TO_FP32(x[i].dmin); + + const uint8_t * restrict q2 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + + const __m128i mins_and_scales = _mm_loadu_si128((const __m128i*)x[i].scales); + const __m128i scales8 = _mm_and_si128(mins_and_scales, m4); + const __m128i mins8 = _mm_and_si128(_mm_srli_epi16(mins_and_scales, 4), m4); + const __m256i mins = _mm256_cvtepi8_epi16(mins8); + const __m256i prod = _mm256_madd_epi16(mins, _mm256_loadu_si256((const __m256i*)y[i].bsums)); + + acc = _mm256_fmadd_ps(_mm256_broadcast_ss(&dmin), _mm256_cvtepi32_ps(prod), acc); + + const __m256i all_scales = _mm256_cvtepi8_epi16(scales8); + const __m128i l_scales = _mm256_extracti128_si256(all_scales, 0); + const __m128i h_scales = _mm256_extracti128_si256(all_scales, 1); + const __m256i scales[2] = {MM256_SET_M128I(l_scales, l_scales), MM256_SET_M128I(h_scales, h_scales)}; + + __m256i sumi = _mm256_setzero_si256(); + + for (int j = 0; j < QK_K/128; ++j) { + + const __m256i q2bits = _mm256_loadu_si256((const __m256i*)q2); q2 += 32; + + const __m256i q8_0 = _mm256_loadu_si256((const __m256i*)q8); q8 += 32; + const __m256i q8_1 = _mm256_loadu_si256((const __m256i*)q8); q8 += 32; + const __m256i q8_2 = _mm256_loadu_si256((const __m256i*)q8); q8 += 32; + const __m256i q8_3 = _mm256_loadu_si256((const __m256i*)q8); q8 += 32; + + const __m256i q2_0 = _mm256_and_si256(q2bits, m3); + const __m256i q2_1 = _mm256_and_si256(_mm256_srli_epi16(q2bits, 2), m3); + const __m256i q2_2 = _mm256_and_si256(_mm256_srli_epi16(q2bits, 4), m3); + const __m256i q2_3 = _mm256_and_si256(_mm256_srli_epi16(q2bits, 6), m3); + + __m256i p0 = _mm256_maddubs_epi16(q2_0, q8_0); + __m256i p1 = _mm256_maddubs_epi16(q2_1, q8_1); + __m256i p2 = _mm256_maddubs_epi16(q2_2, q8_2); + __m256i p3 = _mm256_maddubs_epi16(q2_3, q8_3); + + p0 = _mm256_madd_epi16(_mm256_shuffle_epi8(scales[j], get_scale_shuffle_q3k(0)), p0); + p1 = _mm256_madd_epi16(_mm256_shuffle_epi8(scales[j], get_scale_shuffle_q3k(1)), p1); + p2 = _mm256_madd_epi16(_mm256_shuffle_epi8(scales[j], get_scale_shuffle_q3k(2)), p2); + p3 = _mm256_madd_epi16(_mm256_shuffle_epi8(scales[j], get_scale_shuffle_q3k(3)), p3); + + p0 = _mm256_add_epi32(p0, p1); + p2 = _mm256_add_epi32(p2, p3); + + sumi = _mm256_add_epi32(sumi, _mm256_add_epi32(p0, p2)); + } + + acc = _mm256_fmadd_ps(_mm256_broadcast_ss(&d), _mm256_cvtepi32_ps(sumi), acc); + + } + + *s = hsum_float_8(acc); + +#elif defined __AVX__ + + const __m128i m3 = _mm_set1_epi8(0x3); + const __m128i m4 = _mm_set1_epi8(0xF); + const __m128i m2 = _mm_set1_epi8(0x2); + + __m256 acc = _mm256_setzero_ps(); + + for (int i = 0; i < nb; ++i) { + + const float dall = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = -y[i].d * GGML_FP16_TO_FP32(x[i].dmin); + + const uint8_t * restrict q2 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + + // load mins and scales from block_q2_K.scales[QK_K/16] + const __m128i mins_and_scales = _mm_loadu_si128((const __m128i*)x[i].scales); + const __m128i scales16 = _mm_and_si128(mins_and_scales, m4); + const __m128i mins16 = _mm_and_si128(_mm_srli_epi16(mins_and_scales, 4), m4); + const __m128i mins_0 = _mm_cvtepi8_epi16(mins16); + const __m128i mins_1 = _mm_cvtepi8_epi16(_mm_unpackhi_epi64(mins16, mins16)); + + // summs = y[i].bsums * (x[i].scales >> 4) in 16bits*8*2 to 32bits*4*2 + const __m128i summs_0 = _mm_madd_epi16(mins_0, _mm_loadu_si128((const __m128i*)&y[i].bsums[0])); + const __m128i summs_1 = _mm_madd_epi16(mins_1, _mm_loadu_si128((const __m128i*)&y[i].bsums[8])); + + // sumf += -dmin * summs in 32bits*8 + acc = _mm256_add_ps(_mm256_mul_ps(_mm256_broadcast_ss(&dmin), _mm256_cvtepi32_ps(MM256_SET_M128I(summs_1, summs_0))), acc); + + const __m128i scales_0 = _mm_cvtepi8_epi16(scales16); + const __m128i scales_1 = _mm_cvtepi8_epi16(_mm_unpackhi_epi64(scales16, scales16)); + const __m128i scales[2] = { scales_0, scales_1 }; + + __m128i sumi_0 = _mm_setzero_si128(); + __m128i sumi_1 = _mm_setzero_si128(); + + for (int j = 0; j < QK_K/128; ++j) { + + // load Q8 quants int8*16*8 from block_q8_K.qs[QK_K] + const __m128i q8_0 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + const __m128i q8_1 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + const __m128i q8_2 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + const __m128i q8_3 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + const __m128i q8_4 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + const __m128i q8_5 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + const __m128i q8_6 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + const __m128i q8_7 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + + // load 2bits*16*8 from block_q2_K.qs[QK_K/4] + __m128i q2bits = _mm_loadu_si128((const __m128i*)q2); q2 += 16; + const __m128i q2_0 = _mm_and_si128(q2bits, m3); + const __m128i q2_2 = _mm_and_si128(_mm_srli_epi16(q2bits, 2), m3); + const __m128i q2_4 = _mm_and_si128(_mm_srli_epi16(q2bits, 4), m3); + const __m128i q2_6 = _mm_and_si128(_mm_srli_epi16(q2bits, 6), m3); + q2bits = _mm_loadu_si128((const __m128i*)q2); q2 += 16; + const __m128i q2_1 = _mm_and_si128(q2bits, m3); + const __m128i q2_3 = _mm_and_si128(_mm_srli_epi16(q2bits, 2), m3); + const __m128i q2_5 = _mm_and_si128(_mm_srli_epi16(q2bits, 4), m3); + const __m128i q2_7 = _mm_and_si128(_mm_srli_epi16(q2bits, 6), m3); + + // isuml = q8[l] * ((q2[l] >> shift) & 3) in 8bits*16*8 to 16bits*8*8 + __m128i p0 = _mm_maddubs_epi16(q2_0, q8_0); + __m128i p1 = _mm_maddubs_epi16(q2_1, q8_1); + __m128i p2 = _mm_maddubs_epi16(q2_2, q8_2); + __m128i p3 = _mm_maddubs_epi16(q2_3, q8_3); + __m128i p4 = _mm_maddubs_epi16(q2_4, q8_4); + __m128i p5 = _mm_maddubs_epi16(q2_5, q8_5); + __m128i p6 = _mm_maddubs_epi16(q2_6, q8_6); + __m128i p7 = _mm_maddubs_epi16(q2_7, q8_7); + + // isum += (x[i].scales[is++] & 0xF) * isuml in 16bits*8*8 to 32bits*4*8 + __m128i shuffle = _mm_set1_epi16(0x0100); + p0 = _mm_madd_epi16(_mm_shuffle_epi8(scales[j], shuffle), p0); + shuffle = _mm_add_epi16(shuffle, m2); + p1 = _mm_madd_epi16(_mm_shuffle_epi8(scales[j], shuffle), p1); + shuffle = _mm_add_epi16(shuffle, m2); + p2 = _mm_madd_epi16(_mm_shuffle_epi8(scales[j], shuffle), p2); + shuffle = _mm_add_epi16(shuffle, m2); + p3 = _mm_madd_epi16(_mm_shuffle_epi8(scales[j], shuffle), p3); + shuffle = _mm_add_epi16(shuffle, m2); + p4 = _mm_madd_epi16(_mm_shuffle_epi8(scales[j], shuffle), p4); + shuffle = _mm_add_epi16(shuffle, m2); + p5 = _mm_madd_epi16(_mm_shuffle_epi8(scales[j], shuffle), p5); + shuffle = _mm_add_epi16(shuffle, m2); + p6 = _mm_madd_epi16(_mm_shuffle_epi8(scales[j], shuffle), p6); + shuffle = _mm_add_epi16(shuffle, m2); + p7 = _mm_madd_epi16(_mm_shuffle_epi8(scales[j], shuffle), p7); + + p0 = _mm_add_epi32(p0, p1); + p2 = _mm_add_epi32(p2, p3); + p4 = _mm_add_epi32(p4, p5); + p6 = _mm_add_epi32(p6, p7); + + // isum in 32bits*4*2 + sumi_0 = _mm_add_epi32(sumi_0, _mm_add_epi32(p0, p2)); + sumi_1 = _mm_add_epi32(sumi_1, _mm_add_epi32(p4, p6)); + } + + // sumf += dall * isum - dmin * summs in 32bits + __m256i sumi = MM256_SET_M128I(sumi_1, sumi_0); + acc = _mm256_add_ps(_mm256_mul_ps(_mm256_broadcast_ss(&dall), _mm256_cvtepi32_ps(sumi)), acc); + } + + *s = hsum_float_8(acc); + +#elif defined __riscv_v_intrinsic + + float sumf = 0; + uint8_t temp_01[32] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}; + + for (int i = 0; i < nb; ++i) { + + const uint8_t * q2 = x[i].qs; + const int8_t * q8 = y[i].qs; + const uint8_t * sc = x[i].scales; + + const float dall = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = -y[i].d * GGML_FP16_TO_FP32(x[i].dmin); + + size_t vl = 16; + + vuint8m1_t scales = __riscv_vle8_v_u8m1(sc, vl); + vuint8m1_t aux = __riscv_vand_vx_u8m1(scales, 0x0F, vl); + + vint16m1_t q8sums = __riscv_vle16_v_i16m1(y[i].bsums, vl); + + vuint8mf2_t scales_2 = __riscv_vle8_v_u8mf2(sc, vl); + vuint8mf2_t mins8 = __riscv_vsrl_vx_u8mf2(scales_2, 0x4, vl); + vint16m1_t mins = __riscv_vreinterpret_v_u16m1_i16m1(__riscv_vzext_vf2_u16m1(mins8, vl)); + vint32m2_t prod = __riscv_vwmul_vv_i32m2(q8sums, mins, vl); + vint32m1_t vsums = __riscv_vredsum_vs_i32m2_i32m1(prod, __riscv_vmv_v_x_i32m1(0, 1), vl); + + sumf += dmin * __riscv_vmv_x_s_i32m1_i32(vsums); + + vl = 32; + + vint32m1_t vzero = __riscv_vmv_v_x_i32m1(0, 1); + vuint8m1_t v_b = __riscv_vle8_v_u8m1(temp_01, vl); + + uint8_t is=0; + int isum=0; + + for (int j = 0; j < QK_K/128; ++j) { + // load Q2 + vuint8m1_t q2_x = __riscv_vle8_v_u8m1(q2, vl); + + vuint8m1_t q2_0 = __riscv_vand_vx_u8m1(q2_x, 0x03, vl); + vuint8m1_t q2_1 = __riscv_vand_vx_u8m1(__riscv_vsrl_vx_u8m1(q2_x, 0x2, vl), 0x03 , vl); + vuint8m1_t q2_2 = __riscv_vand_vx_u8m1(__riscv_vsrl_vx_u8m1(q2_x, 0x4, vl), 0x03 , vl); + vuint8m1_t q2_3 = __riscv_vand_vx_u8m1(__riscv_vsrl_vx_u8m1(q2_x, 0x6, vl), 0x03 , vl); + + // duplicate scale elements for product + vuint8m1_t sc0 = __riscv_vrgather_vv_u8m1(aux, __riscv_vadd_vx_u8m1(v_b, 0+is, vl), vl); + vuint8m1_t sc1 = __riscv_vrgather_vv_u8m1(aux, __riscv_vadd_vx_u8m1(v_b, 2+is, vl), vl); + vuint8m1_t sc2 = __riscv_vrgather_vv_u8m1(aux, __riscv_vadd_vx_u8m1(v_b, 4+is, vl), vl); + vuint8m1_t sc3 = __riscv_vrgather_vv_u8m1(aux, __riscv_vadd_vx_u8m1(v_b, 6+is, vl), vl); + + vint16m2_t p0 = __riscv_vreinterpret_v_u16m2_i16m2(__riscv_vwmulu_vv_u16m2(q2_0, sc0, vl)); + vint16m2_t p1 = __riscv_vreinterpret_v_u16m2_i16m2(__riscv_vwmulu_vv_u16m2(q2_1, sc1, vl)); + vint16m2_t p2 = __riscv_vreinterpret_v_u16m2_i16m2(__riscv_vwmulu_vv_u16m2(q2_2, sc2, vl)); + vint16m2_t p3 = __riscv_vreinterpret_v_u16m2_i16m2(__riscv_vwmulu_vv_u16m2(q2_3, sc3, vl)); + + // load Q8 + vint8m1_t q8_0 = __riscv_vle8_v_i8m1(q8, vl); + vint8m1_t q8_1 = __riscv_vle8_v_i8m1(q8+32, vl); + vint8m1_t q8_2 = __riscv_vle8_v_i8m1(q8+64, vl); + vint8m1_t q8_3 = __riscv_vle8_v_i8m1(q8+96, vl); + + vint32m4_t s0 = __riscv_vwmul_vv_i32m4(p0, __riscv_vwcvt_x_x_v_i16m2(q8_0, vl), vl); + vint32m4_t s1 = __riscv_vwmul_vv_i32m4(p1, __riscv_vwcvt_x_x_v_i16m2(q8_1, vl), vl); + vint32m4_t s2 = __riscv_vwmul_vv_i32m4(p2, __riscv_vwcvt_x_x_v_i16m2(q8_2, vl), vl); + vint32m4_t s3 = __riscv_vwmul_vv_i32m4(p3, __riscv_vwcvt_x_x_v_i16m2(q8_3, vl), vl); + + vint32m1_t isum0 = __riscv_vredsum_vs_i32m4_i32m1(__riscv_vadd_vv_i32m4(s0, s1, vl), vzero, vl); + vint32m1_t isum1 = __riscv_vredsum_vs_i32m4_i32m1(__riscv_vadd_vv_i32m4(s2, s3, vl), isum0, vl); + + isum += __riscv_vmv_x_s_i32m1_i32(isum1); + + q2+=32; q8+=128; is=8; + + } + + sumf += dall * isum; + + } + + *s = sumf; + +#elif defined(__POWER9_VECTOR__) + const vector signed char lowMask = vec_splats((signed char)0x3); + const vector signed char lowScaleMask = vec_splats((signed char)0xF); + const vector int v0 = vec_splats((int32_t)0); + const vector unsigned char v2 = vec_splats((unsigned char)0x2); + const vector unsigned char v6 = vec_splats((unsigned char)0x6); + const vector unsigned char v4 = vec_splats((unsigned char)0x4); + + vector float vsumf0 = vec_splats(0.0f); + vector float vsumf1 = vec_splats(0.0f); + vector float vsumf2 = vec_splats(0.0f); + vector float vsumf3 = vec_splats(0.0f); + + for (int i = 0; i < nb; ++i) { + vector float vxd = vec_splats(GGML_FP16_TO_FP32(x[i].d)); + vector float vyd = vec_splats(y[i].d); + vector float vd = vec_mul(vxd, vyd); + + vector float vxmin = vec_splats(GGML_FP16_TO_FP32(x[i].dmin)); + vector float vdmin = vec_mul(vxmin, vyd); + + vector signed short q8ysums0 = vec_xl( 0, y[i].bsums); + vector signed short q8ysums1 = vec_xl(16, y[i].bsums); + + vector signed char q2xmins = (vector signed char)vec_xl( 0, x[i].scales); + vector signed char vscales = vec_and(q2xmins, lowScaleMask); + + q2xmins = vec_sr(q2xmins, v4); + vector signed short q2xmins0 = vec_unpackh(q2xmins); + vector signed short q2xmins1 = vec_unpackl(q2xmins); + + vector signed int prod0 = vec_mule(q2xmins0, q8ysums0); + vector signed int prod1 = vec_mulo(q2xmins0, q8ysums0); + vector signed int prod2 = vec_mule(q2xmins1, q8ysums1); + vector signed int prod3 = vec_mulo(q2xmins1, q8ysums1); + + vsumf0 = vec_nmsub(vec_ctf(prod0, 0), vdmin, vsumf0); + vsumf1 = vec_nmsub(vec_ctf(prod1, 0), vdmin, vsumf1); + vsumf2 = vec_nmsub(vec_ctf(prod2, 0), vdmin, vsumf2); + vsumf3 = vec_nmsub(vec_ctf(prod3, 0), vdmin, vsumf3); + + vector signed int vsumi0 = v0; + vector signed int vsumi1 = v0; + vector signed int vsumi2 = v0; + vector signed int vsumi3 = v0; + vector signed int vsumi4 = v0; + vector signed int vsumi5 = v0; + vector signed int vsumi6 = v0; + vector signed int vsumi7 = v0; + + const uint8_t * restrict q2 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + + for (int j = 0; j < QK_K/128; ++j) { + __builtin_prefetch(q2, 0, 1); + __builtin_prefetch(q8, 0, 1); + + vector signed char qxs0 = (vector signed char)vec_xl( 0, q2); + vector signed char qxs1 = (vector signed char)vec_xl(16, q2); + q2 += 32; + + vector unsigned char q2x00 = (vector unsigned char)vec_and(qxs0, lowMask); + vector unsigned char q2x01 = (vector unsigned char)vec_and(vec_sr(qxs0, v2), lowMask); + vector unsigned char q2x02 = (vector unsigned char)vec_and(vec_sr(qxs0, v4), lowMask); + vector unsigned char q2x03 = (vector unsigned char)vec_and(vec_sr(qxs0, v6), lowMask); + vector unsigned char q2x10 = (vector unsigned char)vec_and(qxs1, lowMask); + vector unsigned char q2x11 = (vector unsigned char)vec_and(vec_sr(qxs1, v2), lowMask); + vector unsigned char q2x12 = (vector unsigned char)vec_and(vec_sr(qxs1, v4), lowMask); + vector unsigned char q2x13 = (vector unsigned char)vec_and(vec_sr(qxs1, v6), lowMask); + + vector signed char q8y00 = vec_xl( 0, q8); + vector signed char q8y10 = vec_xl( 16, q8); + vector signed char q8y01 = vec_xl( 32, q8); + vector signed char q8y11 = vec_xl( 48, q8); + vector signed char q8y02 = vec_xl( 64, q8); + vector signed char q8y12 = vec_xl( 80, q8); + vector signed char q8y03 = vec_xl( 96, q8); + vector signed char q8y13 = vec_xl(112, q8); + q8 += 128; + + vector signed int qv0 = vec_msum(q8y00, q2x00, v0); + vector signed int qv1 = vec_msum(q8y01, q2x01, v0); + vector signed int qv2 = vec_msum(q8y02, q2x02, v0); + vector signed int qv3 = vec_msum(q8y03, q2x03, v0); + vector signed int qv4 = vec_msum(q8y10, q2x10, v0); + vector signed int qv5 = vec_msum(q8y11, q2x11, v0); + vector signed int qv6 = vec_msum(q8y12, q2x12, v0); + vector signed int qv7 = vec_msum(q8y13, q2x13, v0); + + vector signed short vscales_07 = vec_unpackh(vscales); + vector signed int vscales_03 = vec_unpackh(vscales_07); + vector signed int vscales_47 = vec_unpackl(vscales_07); + vector signed int vs0 = vec_splat(vscales_03, 0); + vector signed int vs1 = vec_splat(vscales_03, 1); + vector signed int vs2 = vec_splat(vscales_03, 2); + vector signed int vs3 = vec_splat(vscales_03, 3); + vector signed int vs4 = vec_splat(vscales_47, 0); + vector signed int vs5 = vec_splat(vscales_47, 1); + vector signed int vs6 = vec_splat(vscales_47, 2); + vector signed int vs7 = vec_splat(vscales_47, 3); + vscales = vec_sld(vscales, vscales, 8); + + vsumi0 = vec_add(vec_mul(qv0, vs0), vsumi0); + vsumi1 = vec_add(vec_mul(qv1, vs2), vsumi1); + vsumi2 = vec_add(vec_mul(qv2, vs4), vsumi2); + vsumi3 = vec_add(vec_mul(qv3, vs6), vsumi3); + vsumi4 = vec_add(vec_mul(qv4, vs1), vsumi4); + vsumi5 = vec_add(vec_mul(qv5, vs3), vsumi5); + vsumi6 = vec_add(vec_mul(qv6, vs5), vsumi6); + vsumi7 = vec_add(vec_mul(qv7, vs7), vsumi7); + } + + vsumi0 = vec_add(vsumi0, vsumi4); + vsumi1 = vec_add(vsumi1, vsumi5); + vsumi2 = vec_add(vsumi2, vsumi6); + vsumi3 = vec_add(vsumi3, vsumi7); + + vsumf0 = vec_madd(vec_ctf(vsumi0, 0), vd, vsumf0); + vsumf1 = vec_madd(vec_ctf(vsumi1, 0), vd, vsumf1); + vsumf2 = vec_madd(vec_ctf(vsumi2, 0), vd, vsumf2); + vsumf3 = vec_madd(vec_ctf(vsumi3, 0), vd, vsumf3); + } + + vsumf0 = vec_add(vsumf0, vsumf2); + vsumf1 = vec_add(vsumf1, vsumf3); + + vsumf0 = vec_add(vsumf0, vsumf1); + + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 4)); + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 8)); + + *s = vec_extract(vsumf0, 0); + +#elif defined __loongarch_asx + + const __m256i m3 = __lasx_xvreplgr2vr_b(3); + const __m128i m4 = __lsx_vreplgr2vr_b(0xF); + + __m256 acc = (__m256)__lasx_xvldi(0); + + for (int i = 0; i < nb; ++i) { + + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = -y[i].d * GGML_FP16_TO_FP32(x[i].dmin); + + const uint8_t * restrict q2 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + + const __m128i mins_and_scales = __lsx_vld((const __m128i*)x[i].scales, 0); + const __m128i scales8 = __lsx_vand_v(mins_and_scales, m4); + const __m128i mins8 = __lsx_vand_v(__lsx_vsrli_h(mins_and_scales, 4), m4); + const __m256i mins = lasx_ext8_16(mins8); + const __m256i prod = lasx_madd_h(mins, __lasx_xvld((const __m256i*)y[i].bsums, 0)); + + acc = __lasx_xvfmadd_s(__lasx_xvreplfr2vr_s(dmin), __lasx_xvffint_s_w(prod), acc); + + const __m256i all_scales = lasx_ext8_16(scales8); + const __m128i l_scales = lasx_extracti128(all_scales, 0); + const __m128i h_scales = lasx_extracti128(all_scales, 1); + const __m256i scales[2] = {lasx_insertf128(l_scales, l_scales), lasx_insertf128(h_scales, h_scales)}; + + __m256i sumi = __lasx_xvldi(0); + + for (int j = 0; j < QK_K/128; ++j) { + + const __m256i q2bits = __lasx_xvld((const __m256i*)q2, 0); q2 += 32; + + const __m256i q8_0 = __lasx_xvld((const __m256i*)q8, 0); q8 += 32; + const __m256i q8_1 = __lasx_xvld((const __m256i*)q8, 0); q8 += 32; + const __m256i q8_2 = __lasx_xvld((const __m256i*)q8, 0); q8 += 32; + const __m256i q8_3 = __lasx_xvld((const __m256i*)q8, 0); q8 += 32; + + const __m256i q2_0 = __lasx_xvand_v(q2bits, m3); + const __m256i q2_1 = __lasx_xvand_v(__lasx_xvsrli_h(q2bits, 2), m3); + const __m256i q2_2 = __lasx_xvand_v(__lasx_xvsrli_h(q2bits, 4), m3); + const __m256i q2_3 = __lasx_xvand_v(__lasx_xvsrli_h(q2bits, 6), m3); + + __m256i p0 = lasx_maddubs_h(q2_0, q8_0); + __m256i p1 = lasx_maddubs_h(q2_1, q8_1); + __m256i p2 = lasx_maddubs_h(q2_2, q8_2); + __m256i p3 = lasx_maddubs_h(q2_3, q8_3); + + p0 = lasx_madd_h(lasx_shuffle_b(scales[j], get_scale_shuffle_q3k(0)), p0); + p1 = lasx_madd_h(lasx_shuffle_b(scales[j], get_scale_shuffle_q3k(1)), p1); + p2 = lasx_madd_h(lasx_shuffle_b(scales[j], get_scale_shuffle_q3k(2)), p2); + p3 = lasx_madd_h(lasx_shuffle_b(scales[j], get_scale_shuffle_q3k(3)), p3); + + p0 = __lasx_xvadd_w(p0, p1); + p2 = __lasx_xvadd_w(p2, p3); + + sumi = __lasx_xvadd_w(sumi, __lasx_xvadd_w(p0, p2)); + } + + acc = __lasx_xvfmadd_s(__lasx_xvreplfr2vr_s(d), __lasx_xvffint_s_w(sumi), acc); + + } + + *s = hsum_float_8(acc); + +#else + + float sumf = 0; + + for (int i = 0; i < nb; ++i) { + + const uint8_t * q2 = x[i].qs; + const int8_t * q8 = y[i].qs; + const uint8_t * sc = x[i].scales; + + int summs = 0; + for (int j = 0; j < 16; ++j) { + summs += y[i].bsums[j] * (sc[j] >> 4); + } + + const float dall = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = y[i].d * GGML_FP16_TO_FP32(x[i].dmin); + + int isum = 0; + int is = 0; + int d; + for (int k = 0; k < QK_K/128; ++k) { + int shift = 0; + for (int j = 0; j < 4; ++j) { + d = sc[is++] & 0xF; + int isuml = 0; + for (int l = 0; l < 16; ++l) isuml += q8[l] * ((q2[l] >> shift) & 3); + isum += d * isuml; + d = sc[is++] & 0xF; + isuml = 0; + for (int l = 16; l < 32; ++l) isuml += q8[l] * ((q2[l] >> shift) & 3); + isum += d * isuml; + shift += 2; + q8 += 32; + } + q2 += 32; + } + sumf += dall * isum - dmin * summs; + } + *s = sumf; +#endif +} + +void ggml_vec_dot_q3_K_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { + assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + + const uint32_t kmask1 = 0x03030303; + const uint32_t kmask2 = 0x0f0f0f0f; + + const block_q3_K * restrict x = vx; + const block_q8_K * restrict y = vy; + + const int nb = n / QK_K; + +#ifdef __ARM_NEON + + uint32_t aux[3]; + uint32_t utmp[4]; + + const uint8x16_t m3b = vdupq_n_u8(0x3); + const int32x4_t vzero = vdupq_n_s32(0); + + const uint8x16_t m0 = vdupq_n_u8(1); + const uint8x16_t m1 = vshlq_n_u8(m0, 1); + const uint8x16_t m2 = vshlq_n_u8(m0, 2); + const uint8x16_t m3 = vshlq_n_u8(m0, 3); + const int8_t m32 = 32; + + ggml_int8x16x4_t q3bytes; + + float sum = 0; + + for (int i = 0; i < nb; ++i) { + + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + + const uint8_t * restrict q3 = x[i].qs; + const uint8_t * restrict qh = x[i].hmask; + const int8_t * restrict q8 = y[i].qs; + + ggml_uint8x16x2_t qhbits = ggml_vld1q_u8_x2(qh); + + ggml_uint8x16x4_t q3h; + + int32_t isum = 0; + + // Set up scales + memcpy(aux, x[i].scales, 12); + utmp[3] = ((aux[1] >> 4) & kmask2) | (((aux[2] >> 6) & kmask1) << 4); + utmp[2] = ((aux[0] >> 4) & kmask2) | (((aux[2] >> 4) & kmask1) << 4); + utmp[1] = (aux[1] & kmask2) | (((aux[2] >> 2) & kmask1) << 4); + utmp[0] = (aux[0] & kmask2) | (((aux[2] >> 0) & kmask1) << 4); + + int8_t * scale = (int8_t *)utmp; + for (int j = 0; j < 16; ++j) scale[j] -= m32; + + for (int j = 0; j < QK_K/128; ++j) { + + const ggml_uint8x16x2_t q3bits = ggml_vld1q_u8_x2(q3); q3 += 32; + const ggml_int8x16x4_t q8bytes_1 = ggml_vld1q_s8_x4(q8); q8 += 64; + const ggml_int8x16x4_t q8bytes_2 = ggml_vld1q_s8_x4(q8); q8 += 64; + + q3h.val[0] = vshlq_n_u8(vbicq_u8(m0, qhbits.val[0]), 2); + q3h.val[1] = vshlq_n_u8(vbicq_u8(m0, qhbits.val[1]), 2); + q3h.val[2] = vshlq_n_u8(vbicq_u8(m1, qhbits.val[0]), 1); + q3h.val[3] = vshlq_n_u8(vbicq_u8(m1, qhbits.val[1]), 1); + + q3bytes.val[0] = vsubq_s8(vreinterpretq_s8_u8(vandq_u8(q3bits.val[0], m3b)), vreinterpretq_s8_u8(q3h.val[0])); + q3bytes.val[1] = vsubq_s8(vreinterpretq_s8_u8(vandq_u8(q3bits.val[1], m3b)), vreinterpretq_s8_u8(q3h.val[1])); + q3bytes.val[2] = vsubq_s8(vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q3bits.val[0], 2), m3b)), vreinterpretq_s8_u8(q3h.val[2])); + q3bytes.val[3] = vsubq_s8(vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q3bits.val[1], 2), m3b)), vreinterpretq_s8_u8(q3h.val[3])); + + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[0], q8bytes_1.val[0])) * scale[0]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[1], q8bytes_1.val[1])) * scale[1]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[2], q8bytes_1.val[2])) * scale[2]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[3], q8bytes_1.val[3])) * scale[3]; + + scale += 4; + + q3h.val[0] = vbicq_u8(m2, qhbits.val[0]); + q3h.val[1] = vbicq_u8(m2, qhbits.val[1]); + q3h.val[2] = vshrq_n_u8(vbicq_u8(m3, qhbits.val[0]), 1); + q3h.val[3] = vshrq_n_u8(vbicq_u8(m3, qhbits.val[1]), 1); + + q3bytes.val[0] = vsubq_s8(vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q3bits.val[0], 4), m3b)), vreinterpretq_s8_u8(q3h.val[0])); + q3bytes.val[1] = vsubq_s8(vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q3bits.val[1], 4), m3b)), vreinterpretq_s8_u8(q3h.val[1])); + q3bytes.val[2] = vsubq_s8(vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q3bits.val[0], 6), m3b)), vreinterpretq_s8_u8(q3h.val[2])); + q3bytes.val[3] = vsubq_s8(vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q3bits.val[1], 6), m3b)), vreinterpretq_s8_u8(q3h.val[3])); + + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[0], q8bytes_2.val[0])) * scale[0]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[1], q8bytes_2.val[1])) * scale[1]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[2], q8bytes_2.val[2])) * scale[2]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[3], q8bytes_2.val[3])) * scale[3]; + + scale += 4; + + if (j == 0) { + qhbits.val[0] = vshrq_n_u8(qhbits.val[0], 4); + qhbits.val[1] = vshrq_n_u8(qhbits.val[1], 4); + } + + } + sum += d * isum; + + } + + *s = sum; + +#elif defined __AVX2__ + + const __m256i m3 = _mm256_set1_epi8(3); + const __m256i mone = _mm256_set1_epi8(1); + const __m128i m32 = _mm_set1_epi8(32); + + __m256 acc = _mm256_setzero_ps(); + + uint32_t aux[3]; + + for (int i = 0; i < nb; ++i) { + + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + + const uint8_t * restrict q3 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + + // Set up scales + memcpy(aux, x[i].scales, 12); + __m128i scales128 = _mm_set_epi32( + ((aux[1] >> 4) & kmask2) | (((aux[2] >> 6) & kmask1) << 4), + ((aux[0] >> 4) & kmask2) | (((aux[2] >> 4) & kmask1) << 4), + (aux[1] & kmask2) | (((aux[2] >> 2) & kmask1) << 4), + (aux[0] & kmask2) | (((aux[2] >> 0) & kmask1) << 4)); + scales128 = _mm_sub_epi8(scales128, m32); + const __m256i all_scales = _mm256_cvtepi8_epi16(scales128); + const __m128i l_scales = _mm256_extracti128_si256(all_scales, 0); + const __m128i h_scales = _mm256_extracti128_si256(all_scales, 1); + const __m256i scales[2] = {MM256_SET_M128I(l_scales, l_scales), MM256_SET_M128I(h_scales, h_scales)}; + + // high bit + const __m256i hbits = _mm256_loadu_si256((const __m256i*)x[i].hmask); + + // integer accumulator + __m256i sumi = _mm256_setzero_si256(); + + int bit = 0; + int is = 0; + + for (int j = 0; j < QK_K/128; ++j) { + // load low 2 bits + const __m256i q3bits = _mm256_loadu_si256((const __m256i*)q3); q3 += 32; + + // prepare low and high bits + const __m256i q3l_0 = _mm256_and_si256(q3bits, m3); + const __m256i q3h_0 = _mm256_slli_epi16(_mm256_srli_epi16(_mm256_andnot_si256(hbits, _mm256_slli_epi16(mone, bit)), bit), 2); + ++bit; + + const __m256i q3l_1 = _mm256_and_si256(_mm256_srli_epi16(q3bits, 2), m3); + const __m256i q3h_1 = _mm256_slli_epi16(_mm256_srli_epi16(_mm256_andnot_si256(hbits, _mm256_slli_epi16(mone, bit)), bit), 2); + ++bit; + + const __m256i q3l_2 = _mm256_and_si256(_mm256_srli_epi16(q3bits, 4), m3); + const __m256i q3h_2 = _mm256_slli_epi16(_mm256_srli_epi16(_mm256_andnot_si256(hbits, _mm256_slli_epi16(mone, bit)), bit), 2); + ++bit; + + const __m256i q3l_3 = _mm256_and_si256(_mm256_srli_epi16(q3bits, 6), m3); + const __m256i q3h_3 = _mm256_slli_epi16(_mm256_srli_epi16(_mm256_andnot_si256(hbits, _mm256_slli_epi16(mone, bit)), bit), 2); + ++bit; + + // load Q8 quants + const __m256i q8_0 = _mm256_loadu_si256((const __m256i*)q8); q8 += 32; + const __m256i q8_1 = _mm256_loadu_si256((const __m256i*)q8); q8 += 32; + const __m256i q8_2 = _mm256_loadu_si256((const __m256i*)q8); q8 += 32; + const __m256i q8_3 = _mm256_loadu_si256((const __m256i*)q8); q8 += 32; + + // Dot product: we multiply the 2 low bits and 1 high bit part separately, so we can use _mm256_maddubs_epi16, + // and then subtract. The high bit part has the 2 already subtracted (and so, it is zero if the high bit was not set, + // and 2 if the high bit was set) + __m256i q8s_0 = _mm256_maddubs_epi16(q3h_0, q8_0); + __m256i q8s_1 = _mm256_maddubs_epi16(q3h_1, q8_1); + __m256i q8s_2 = _mm256_maddubs_epi16(q3h_2, q8_2); + __m256i q8s_3 = _mm256_maddubs_epi16(q3h_3, q8_3); + + __m256i p16_0 = _mm256_maddubs_epi16(q3l_0, q8_0); + __m256i p16_1 = _mm256_maddubs_epi16(q3l_1, q8_1); + __m256i p16_2 = _mm256_maddubs_epi16(q3l_2, q8_2); + __m256i p16_3 = _mm256_maddubs_epi16(q3l_3, q8_3); + + p16_0 = _mm256_sub_epi16(p16_0, q8s_0); + p16_1 = _mm256_sub_epi16(p16_1, q8s_1); + p16_2 = _mm256_sub_epi16(p16_2, q8s_2); + p16_3 = _mm256_sub_epi16(p16_3, q8s_3); + + // multiply with scales + p16_0 = _mm256_madd_epi16(_mm256_shuffle_epi8(scales[j], get_scale_shuffle_q3k(is + 0)), p16_0); + p16_1 = _mm256_madd_epi16(_mm256_shuffle_epi8(scales[j], get_scale_shuffle_q3k(is + 1)), p16_1); + p16_2 = _mm256_madd_epi16(_mm256_shuffle_epi8(scales[j], get_scale_shuffle_q3k(is + 2)), p16_2); + p16_3 = _mm256_madd_epi16(_mm256_shuffle_epi8(scales[j], get_scale_shuffle_q3k(is + 3)), p16_3); + + // accumulate + p16_0 = _mm256_add_epi32(p16_0, p16_1); + p16_2 = _mm256_add_epi32(p16_2, p16_3); + sumi = _mm256_add_epi32(sumi, _mm256_add_epi32(p16_0, p16_2)); + + } + + // multiply with block scale and accumulate + acc = _mm256_fmadd_ps(_mm256_broadcast_ss(&d), _mm256_cvtepi32_ps(sumi), acc); + + } + + *s = hsum_float_8(acc); + +#elif defined __AVX__ + + const __m128i m3 = _mm_set1_epi8(3); + const __m128i mone = _mm_set1_epi8(1); + const __m128i m32 = _mm_set1_epi8(32); + const __m128i m2 = _mm_set1_epi8(2); + + __m256 acc = _mm256_setzero_ps(); + + const uint32_t *aux; + + for (int i = 0; i < nb; ++i) { + + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + + const uint8_t * restrict q3 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + + // Set up scales + aux = (const uint32_t *)x[i].scales; + __m128i scales128 = _mm_set_epi32( + ((aux[1] >> 4) & kmask2) | (((aux[2] >> 6) & kmask1) << 4), + ((aux[0] >> 4) & kmask2) | (((aux[2] >> 4) & kmask1) << 4), + (aux[1] & kmask2) | (((aux[2] >> 2) & kmask1) << 4), + (aux[0] & kmask2) | (((aux[2] >> 0) & kmask1) << 4)); + scales128 = _mm_sub_epi8(scales128, m32); + const __m128i scales_0 = _mm_cvtepi8_epi16(scales128); + const __m128i scales_1 = _mm_cvtepi8_epi16(_mm_unpackhi_epi64(scales128, scales128)); + const __m128i scales[2] = { scales_0, scales_1 }; + + // high bit *128*2 from block_q3_K.hmask[QK_K/8] + const __m128i hbits_0 = _mm_loadu_si128((const __m128i*)&x[i].hmask[0]); + const __m128i hbits_1 = _mm_loadu_si128((const __m128i*)&x[i].hmask[16]); + + // integer accumulator + __m128i sumi_0 = _mm_setzero_si128(); + __m128i sumi_1 = _mm_setzero_si128(); + + for (int j = 0; j < QK_K/128; ++j) { + // load low 2 bits *64*2 from block_q3_K.qs[QK_K/4] + const __m128i q3bits_0 = _mm_loadu_si128((const __m128i*)q3); q3 += 16; + const __m128i q3bits_1 = _mm_loadu_si128((const __m128i*)q3); q3 += 16; + + // prepare low and high bits + const int bit = j << 2; + + const __m128i q3l_0 = _mm_and_si128(q3bits_0, m3); + const __m128i q3l_1 = _mm_and_si128(q3bits_1, m3); + const __m128i q3h_0 = _mm_slli_epi16(_mm_srli_epi16(_mm_andnot_si128(hbits_0, _mm_slli_epi16(mone, bit)), bit), 2); + const __m128i q3h_1 = _mm_slli_epi16(_mm_srli_epi16(_mm_andnot_si128(hbits_1, _mm_slli_epi16(mone, bit)), bit), 2); + + const __m128i q3l_2 = _mm_and_si128(_mm_srli_epi16(q3bits_0, 2), m3); + const __m128i q3l_3 = _mm_and_si128(_mm_srli_epi16(q3bits_1, 2), m3); + const __m128i q3h_2 = _mm_slli_epi16(_mm_srli_epi16(_mm_andnot_si128(hbits_0, _mm_slli_epi16(mone, bit+1)), bit+1), 2); + const __m128i q3h_3 = _mm_slli_epi16(_mm_srli_epi16(_mm_andnot_si128(hbits_1, _mm_slli_epi16(mone, bit+1)), bit+1), 2); + + const __m128i q3l_4 = _mm_and_si128(_mm_srli_epi16(q3bits_0, 4), m3); + const __m128i q3l_5 = _mm_and_si128(_mm_srli_epi16(q3bits_1, 4), m3); + const __m128i q3h_4 = _mm_slli_epi16(_mm_srli_epi16(_mm_andnot_si128(hbits_0, _mm_slli_epi16(mone, bit+2)), bit+2), 2); + const __m128i q3h_5 = _mm_slli_epi16(_mm_srli_epi16(_mm_andnot_si128(hbits_1, _mm_slli_epi16(mone, bit+2)), bit+2), 2); + + const __m128i q3l_6 = _mm_and_si128(_mm_srli_epi16(q3bits_0, 6), m3); + const __m128i q3l_7 = _mm_and_si128(_mm_srli_epi16(q3bits_1, 6), m3); + const __m128i q3h_6 = _mm_slli_epi16(_mm_srli_epi16(_mm_andnot_si128(hbits_0, _mm_slli_epi16(mone, bit+3)), bit+3), 2); + const __m128i q3h_7 = _mm_slli_epi16(_mm_srli_epi16(_mm_andnot_si128(hbits_1, _mm_slli_epi16(mone, bit+3)), bit+3), 2); + + // load Q8 quants from block_q8_K.qs[QK_K] + const __m128i q8_0 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + const __m128i q8_1 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + const __m128i q8_2 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + const __m128i q8_3 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + const __m128i q8_4 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + const __m128i q8_5 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + const __m128i q8_6 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + const __m128i q8_7 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + + // Dot product: we multiply the 2 low bits and 1 high bit part separately, so we can use _mm256_maddubs_epi16, + // and then subtract. The high bit part has the 2 already subtracted (and so, it is zero if the high bit was not set, + // and 2 if the high bit was set) + __m128i q8s_0 = _mm_maddubs_epi16(q3h_0, q8_0); + __m128i q8s_1 = _mm_maddubs_epi16(q3h_1, q8_1); + __m128i q8s_2 = _mm_maddubs_epi16(q3h_2, q8_2); + __m128i q8s_3 = _mm_maddubs_epi16(q3h_3, q8_3); + __m128i q8s_4 = _mm_maddubs_epi16(q3h_4, q8_4); + __m128i q8s_5 = _mm_maddubs_epi16(q3h_5, q8_5); + __m128i q8s_6 = _mm_maddubs_epi16(q3h_6, q8_6); + __m128i q8s_7 = _mm_maddubs_epi16(q3h_7, q8_7); + + __m128i p16_0 = _mm_maddubs_epi16(q3l_0, q8_0); + __m128i p16_1 = _mm_maddubs_epi16(q3l_1, q8_1); + __m128i p16_2 = _mm_maddubs_epi16(q3l_2, q8_2); + __m128i p16_3 = _mm_maddubs_epi16(q3l_3, q8_3); + __m128i p16_4 = _mm_maddubs_epi16(q3l_4, q8_4); + __m128i p16_5 = _mm_maddubs_epi16(q3l_5, q8_5); + __m128i p16_6 = _mm_maddubs_epi16(q3l_6, q8_6); + __m128i p16_7 = _mm_maddubs_epi16(q3l_7, q8_7); + + p16_0 = _mm_sub_epi16(p16_0, q8s_0); + p16_1 = _mm_sub_epi16(p16_1, q8s_1); + p16_2 = _mm_sub_epi16(p16_2, q8s_2); + p16_3 = _mm_sub_epi16(p16_3, q8s_3); + p16_4 = _mm_sub_epi16(p16_4, q8s_4); + p16_5 = _mm_sub_epi16(p16_5, q8s_5); + p16_6 = _mm_sub_epi16(p16_6, q8s_6); + p16_7 = _mm_sub_epi16(p16_7, q8s_7); + + // multiply with scales + __m128i shuffle = _mm_set1_epi16(0x0100); + p16_0 = _mm_madd_epi16(_mm_shuffle_epi8(scales[j], shuffle), p16_0); + shuffle = _mm_add_epi16(shuffle, m2); + p16_1 = _mm_madd_epi16(_mm_shuffle_epi8(scales[j], shuffle), p16_1); + shuffle = _mm_add_epi16(shuffle, m2); + p16_2 = _mm_madd_epi16(_mm_shuffle_epi8(scales[j], shuffle), p16_2); + shuffle = _mm_add_epi16(shuffle, m2); + p16_3 = _mm_madd_epi16(_mm_shuffle_epi8(scales[j], shuffle), p16_3); + shuffle = _mm_add_epi16(shuffle, m2); + p16_4 = _mm_madd_epi16(_mm_shuffle_epi8(scales[j], shuffle), p16_4); + shuffle = _mm_add_epi16(shuffle, m2); + p16_5 = _mm_madd_epi16(_mm_shuffle_epi8(scales[j], shuffle), p16_5); + shuffle = _mm_add_epi16(shuffle, m2); + p16_6 = _mm_madd_epi16(_mm_shuffle_epi8(scales[j], shuffle), p16_6); + shuffle = _mm_add_epi16(shuffle, m2); + p16_7 = _mm_madd_epi16(_mm_shuffle_epi8(scales[j], shuffle), p16_7); + + // accumulate + p16_0 = _mm_add_epi32(p16_0, p16_1); + p16_2 = _mm_add_epi32(p16_2, p16_3); + p16_4 = _mm_add_epi32(p16_4, p16_5); + p16_6 = _mm_add_epi32(p16_6, p16_7); + sumi_0 = _mm_add_epi32(sumi_0, _mm_add_epi32(p16_0, p16_2)); + sumi_1 = _mm_add_epi32(sumi_1, _mm_add_epi32(p16_4, p16_6)); + + } + + // multiply with block scale and accumulate + __m256i sumi = MM256_SET_M128I(sumi_1, sumi_0); + acc = _mm256_add_ps(_mm256_mul_ps(_mm256_broadcast_ss(&d), _mm256_cvtepi32_ps(sumi)), acc); + + } + + *s = hsum_float_8(acc); + +#elif defined __riscv_v_intrinsic + + uint32_t aux[3]; + uint32_t utmp[4]; + + float sumf = 0; + for (int i = 0; i < nb; ++i) { + + const uint8_t * restrict q3 = x[i].qs; + const uint8_t * restrict qh = x[i].hmask; + const int8_t * restrict q8 = y[i].qs; + + memcpy(aux, x[i].scales, 12); + utmp[3] = ((aux[1] >> 4) & kmask2) | (((aux[2] >> 6) & kmask1) << 4); + utmp[2] = ((aux[0] >> 4) & kmask2) | (((aux[2] >> 4) & kmask1) << 4); + utmp[1] = (aux[1] & kmask2) | (((aux[2] >> 2) & kmask1) << 4); + utmp[0] = (aux[0] & kmask2) | (((aux[2] >> 0) & kmask1) << 4); + + int8_t * scale = (int8_t *)utmp; + for (int j = 0; j < 16; ++j) scale[j] -= 32; + + + size_t vl = 32; + uint8_t m = 1; + + vint32m1_t vzero = __riscv_vmv_v_x_i32m1(0, 1); + vuint8m1_t vqh = __riscv_vle8_v_u8m1(qh, vl); + + int sum_t = 0; + + for (int j = 0; j < QK_K; j += 128) { + + vl = 32; + + // load Q3 + vuint8m1_t q3_x = __riscv_vle8_v_u8m1(q3, vl); + + vint8m1_t q3_0 = __riscv_vreinterpret_v_u8m1_i8m1(__riscv_vand_vx_u8m1(q3_x, 0x03, vl)); + vint8m1_t q3_1 = __riscv_vreinterpret_v_u8m1_i8m1(__riscv_vand_vx_u8m1(__riscv_vsrl_vx_u8m1(q3_x, 0x2, vl), 0x03 , vl)); + vint8m1_t q3_2 = __riscv_vreinterpret_v_u8m1_i8m1(__riscv_vand_vx_u8m1(__riscv_vsrl_vx_u8m1(q3_x, 0x4, vl), 0x03 , vl)); + vint8m1_t q3_3 = __riscv_vreinterpret_v_u8m1_i8m1(__riscv_vand_vx_u8m1(__riscv_vsrl_vx_u8m1(q3_x, 0x6, vl), 0x03 , vl)); + + // compute mask for subtraction + vuint8m1_t qh_m0 = __riscv_vand_vx_u8m1(vqh, m, vl); + vbool8_t vmask_0 = __riscv_vmseq_vx_u8m1_b8(qh_m0, 0, vl); + vint8m1_t q3_m0 = __riscv_vsub_vx_i8m1_mu(vmask_0, q3_0, q3_0, 0x4, vl); + m <<= 1; + + vuint8m1_t qh_m1 = __riscv_vand_vx_u8m1(vqh, m, vl); + vbool8_t vmask_1 = __riscv_vmseq_vx_u8m1_b8(qh_m1, 0, vl); + vint8m1_t q3_m1 = __riscv_vsub_vx_i8m1_mu(vmask_1, q3_1, q3_1, 0x4, vl); + m <<= 1; + + vuint8m1_t qh_m2 = __riscv_vand_vx_u8m1(vqh, m, vl); + vbool8_t vmask_2 = __riscv_vmseq_vx_u8m1_b8(qh_m2, 0, vl); + vint8m1_t q3_m2 = __riscv_vsub_vx_i8m1_mu(vmask_2, q3_2, q3_2, 0x4, vl); + m <<= 1; + + vuint8m1_t qh_m3 = __riscv_vand_vx_u8m1(vqh, m, vl); + vbool8_t vmask_3 = __riscv_vmseq_vx_u8m1_b8(qh_m3, 0, vl); + vint8m1_t q3_m3 = __riscv_vsub_vx_i8m1_mu(vmask_3, q3_3, q3_3, 0x4, vl); + m <<= 1; + + // load Q8 and take product with Q3 + vint16m2_t a0 = __riscv_vwmul_vv_i16m2(q3_m0, __riscv_vle8_v_i8m1(q8, vl), vl); + vint16m2_t a1 = __riscv_vwmul_vv_i16m2(q3_m1, __riscv_vle8_v_i8m1(q8+32, vl), vl); + vint16m2_t a2 = __riscv_vwmul_vv_i16m2(q3_m2, __riscv_vle8_v_i8m1(q8+64, vl), vl); + vint16m2_t a3 = __riscv_vwmul_vv_i16m2(q3_m3, __riscv_vle8_v_i8m1(q8+96, vl), vl); + + vl = 16; + + // retrieve lane to multiply with scale + vint32m2_t aux0_0 = __riscv_vwmul_vx_i32m2(__riscv_vget_v_i16m2_i16m1(a0, 0), (scale[0]), vl); + vint32m2_t aux0_1 = __riscv_vwmul_vx_i32m2(__riscv_vget_v_i16m2_i16m1(a0, 1), (scale[1]), vl); + vint32m2_t aux1_0 = __riscv_vwmul_vx_i32m2(__riscv_vget_v_i16m2_i16m1(a1, 0), (scale[2]), vl); + vint32m2_t aux1_1 = __riscv_vwmul_vx_i32m2(__riscv_vget_v_i16m2_i16m1(a1, 1), (scale[3]), vl); + vint32m2_t aux2_0 = __riscv_vwmul_vx_i32m2(__riscv_vget_v_i16m2_i16m1(a2, 0), (scale[4]), vl); + vint32m2_t aux2_1 = __riscv_vwmul_vx_i32m2(__riscv_vget_v_i16m2_i16m1(a2, 1), (scale[5]), vl); + vint32m2_t aux3_0 = __riscv_vwmul_vx_i32m2(__riscv_vget_v_i16m2_i16m1(a3, 0), (scale[6]), vl); + vint32m2_t aux3_1 = __riscv_vwmul_vx_i32m2(__riscv_vget_v_i16m2_i16m1(a3, 1), (scale[7]), vl); + + vint32m1_t isum0 = __riscv_vredsum_vs_i32m2_i32m1(__riscv_vadd_vv_i32m2(aux0_0, aux0_1, vl), vzero, vl); + vint32m1_t isum1 = __riscv_vredsum_vs_i32m2_i32m1(__riscv_vadd_vv_i32m2(aux1_0, aux1_1, vl), isum0, vl); + vint32m1_t isum2 = __riscv_vredsum_vs_i32m2_i32m1(__riscv_vadd_vv_i32m2(aux2_0, aux2_1, vl), isum1, vl); + vint32m1_t isum3 = __riscv_vredsum_vs_i32m2_i32m1(__riscv_vadd_vv_i32m2(aux3_0, aux3_1, vl), isum2, vl); + + sum_t += __riscv_vmv_x_s_i32m1_i32(isum3); + + q3 += 32; q8 += 128; scale += 8; + + } + + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + + sumf += d*sum_t; + + } + + *s = sumf; + +#elif defined(__POWER9_VECTOR__) + const vector signed char lowMask = vec_splats((signed char)0x3); + const vector signed char lowMask1 = vec_splats((int8_t)0xf); + const vector signed char lowMask2 = vec_splats((int8_t)0x30); + const vector int v0 = vec_splats((int32_t)0); + const vector signed char v1 = vec_splats((signed char)0x1); + const vector unsigned char v2 = vec_splats((unsigned char)0x2); + const vector unsigned char v3 = vec_splats((unsigned char)0x3); + const vector unsigned char v4 = vec_splats((unsigned char)0x4); + const vector unsigned char v6 = vec_splats((unsigned char)0x6); + const vector signed char off = vec_splats((signed char)0x20); + + vector float vsumf0 = vec_splats(0.0f); + vector float vsumf1 = vec_splats(0.0f); + vector float vsumf2 = vec_splats(0.0f); + vector float vsumf3 = vec_splats(0.0f); + + for (int i = 0; i < nb; ++i) { + vector float vxd = vec_splats(GGML_FP16_TO_FP32(x[i].d)); + vector float vyd = vec_splats(y[i].d); + vector float vd = vec_mul(vxd, vyd); + + UNUSED(kmask1); + UNUSED(kmask2); + + vector signed char u0 = (vector signed char)vec_xl_len(x[i].scales, 8); + vector signed char u1 = vec_and(u0, lowMask1); + vector signed char u2 = (vector signed char)vec_xl_len(x[i].scales + 8, 4); + vector signed char u3 = (vector signed char)vec_mergeh((vector signed int)u2, (vector signed int)vec_sr(u2, v2)); + vector signed char u30 = vec_sl(vec_and(u3, lowMask), v4); + vector signed char u31 = vec_and(u3, lowMask2); + + u1 = vec_or(u1, u30); + u2 = vec_or(vec_sr(u0, v4), u31); + + vector signed char vscales = (vector signed char)vec_mergeh((vector signed long long)u1, (vector signed long long)u2); + vector signed char qxhs0 = (vector signed char)vec_xl( 0, x[i].hmask); + vector signed char qxhs1 = (vector signed char)vec_xl(16, x[i].hmask); + + vscales = vec_sub(vscales, off); + + vector signed int vsumi0 = v0; + vector signed int vsumi1 = v0; + vector signed int vsumi2 = v0; + vector signed int vsumi3 = v0; + vector signed int vsumi4 = v0; + vector signed int vsumi5 = v0; + vector signed int vsumi6 = v0; + vector signed int vsumi7 = v0; + + const uint8_t * restrict q3 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + + for (int j = 0; j < QK_K/128; ++j) { + __builtin_prefetch(q3, 0, 1); + __builtin_prefetch(q8, 0, 1); + + vector signed char qxs0 = (vector signed char)vec_xl( 0, q3); + vector signed char qxs1 = (vector signed char)vec_xl(16, q3); + q3 += 32; + + //the low 2 bits + vector signed char qxs00 = vec_and(qxs0, lowMask); + vector signed char qxs01 = vec_and(vec_sr(qxs0, v2), lowMask); + vector signed char qxs02 = vec_and(vec_sr(qxs0, v4), lowMask); + vector signed char qxs03 = vec_and(vec_sr(qxs0, v6), lowMask); + vector signed char qxs10 = vec_and(qxs1, lowMask); + vector signed char qxs11 = vec_and(vec_sr(qxs1, v2), lowMask); + vector signed char qxs12 = vec_and(vec_sr(qxs1, v4), lowMask); + vector signed char qxs13 = vec_and(vec_sr(qxs1, v6), lowMask); + + //the 3rd bit + vector signed char qxh00 = vec_sl(vec_andc(v1, qxhs0), v2); + vector signed char qxh01 = vec_sl(vec_andc(v1, vec_sr(qxhs0, (vector unsigned char)v1)), v2); + vector signed char qxh02 = vec_sl(vec_andc(v1, vec_sr(qxhs0, v2)), v2); + vector signed char qxh03 = vec_sl(vec_andc(v1, vec_sr(qxhs0, v3)), v2); + vector signed char qxh10 = vec_sl(vec_andc(v1, qxhs1), v2); + vector signed char qxh11 = vec_sl(vec_andc(v1, vec_sr(qxhs1, (vector unsigned char)v1)), v2); + vector signed char qxh12 = vec_sl(vec_andc(v1, vec_sr(qxhs1, v2)), v2); + vector signed char qxh13 = vec_sl(vec_andc(v1, vec_sr(qxhs1, v3)), v2); + qxhs0 = vec_sr(qxhs0, v4); + qxhs1 = vec_sr(qxhs1, v4); + + vector signed char q3x00 = vec_sub(qxs00, qxh00); + vector signed char q3x01 = vec_sub(qxs01, qxh01); + vector signed char q3x02 = vec_sub(qxs02, qxh02); + vector signed char q3x03 = vec_sub(qxs03, qxh03); + vector signed char q3x10 = vec_sub(qxs10, qxh10); + vector signed char q3x11 = vec_sub(qxs11, qxh11); + vector signed char q3x12 = vec_sub(qxs12, qxh12); + vector signed char q3x13 = vec_sub(qxs13, qxh13); + + vector signed char q8y00 = vec_xl( 0, q8); + vector signed char q8y10 = vec_xl( 16, q8); + vector signed char q8y01 = vec_xl( 32, q8); + vector signed char q8y11 = vec_xl( 48, q8); + vector signed char q8y02 = vec_xl( 64, q8); + vector signed char q8y12 = vec_xl( 80, q8); + vector signed char q8y03 = vec_xl( 96, q8); + vector signed char q8y13 = vec_xl(112, q8); + q8 += 128; + + vector signed short vscales_h = vec_unpackh(vscales); + vector signed short vs0 = vec_splat(vscales_h, 0); + vector signed short vs1 = vec_splat(vscales_h, 1); + vector signed short vs2 = vec_splat(vscales_h, 2); + vector signed short vs3 = vec_splat(vscales_h, 3); + vector signed short vs4 = vec_splat(vscales_h, 4); + vector signed short vs5 = vec_splat(vscales_h, 5); + vector signed short vs6 = vec_splat(vscales_h, 6); + vector signed short vs7 = vec_splat(vscales_h, 7); + vscales = vec_sld(vscales, vscales, 8); + + vector signed short qv00 = vec_add(vec_mule(q3x00, q8y00), vec_mulo(q3x00, q8y00)); + vector signed short qv01 = vec_add(vec_mule(q3x01, q8y01), vec_mulo(q3x01, q8y01)); + vector signed short qv02 = vec_add(vec_mule(q3x02, q8y02), vec_mulo(q3x02, q8y02)); + vector signed short qv03 = vec_add(vec_mule(q3x03, q8y03), vec_mulo(q3x03, q8y03)); + vector signed short qv10 = vec_add(vec_mule(q3x10, q8y10), vec_mulo(q3x10, q8y10)); + vector signed short qv11 = vec_add(vec_mule(q3x11, q8y11), vec_mulo(q3x11, q8y11)); + vector signed short qv12 = vec_add(vec_mule(q3x12, q8y12), vec_mulo(q3x12, q8y12)); + vector signed short qv13 = vec_add(vec_mule(q3x13, q8y13), vec_mulo(q3x13, q8y13)); + + vsumi0 = vec_msum(qv00, vs0, vsumi0); + vsumi1 = vec_msum(qv01, vs2, vsumi1); + vsumi2 = vec_msum(qv02, vs4, vsumi2); + vsumi3 = vec_msum(qv03, vs6, vsumi3); + vsumi4 = vec_msum(qv10, vs1, vsumi4); + vsumi5 = vec_msum(qv11, vs3, vsumi5); + vsumi6 = vec_msum(qv12, vs5, vsumi6); + vsumi7 = vec_msum(qv13, vs7, vsumi7); + } + + vsumi0 = vec_add(vsumi0, vsumi4); + vsumi1 = vec_add(vsumi1, vsumi5); + vsumi2 = vec_add(vsumi2, vsumi6); + vsumi3 = vec_add(vsumi3, vsumi7); + + vsumf0 = vec_madd(vec_ctf(vsumi0, 0), vd, vsumf0); + vsumf1 = vec_madd(vec_ctf(vsumi1, 0), vd, vsumf1); + vsumf2 = vec_madd(vec_ctf(vsumi2, 0), vd, vsumf2); + vsumf3 = vec_madd(vec_ctf(vsumi3, 0), vd, vsumf3); + } + + vsumf0 = vec_add(vsumf0, vsumf2); + vsumf1 = vec_add(vsumf1, vsumf3); + + vsumf0 = vec_add(vsumf0, vsumf1); + + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 4)); + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 8)); + + *s = vec_extract(vsumf0, 0); + +#elif defined __loongarch_asx + + const __m256i m3 = __lasx_xvreplgr2vr_b(3); + const __m256i mone = __lasx_xvreplgr2vr_b(1); + const __m128i m32 = __lsx_vreplgr2vr_b(32); + + __m256 acc = (__m256)__lasx_xvldi(0); + + uint32_t aux[3]; + + for (int i = 0; i < nb; ++i) { + + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const uint8_t * restrict q3 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + // Set up scales + memcpy(aux, x[i].scales, 12); + __m128i scales128 = lsx_set_w( + ((aux[1] >> 4) & kmask2) | (((aux[2] >> 6) & kmask1) << 4), + ((aux[0] >> 4) & kmask2) | (((aux[2] >> 4) & kmask1) << 4), + (aux[1] & kmask2) | (((aux[2] >> 2) & kmask1) << 4), + (aux[0] & kmask2) | (((aux[2] >> 0) & kmask1) << 4)); + scales128 = __lsx_vsub_b(scales128, m32); + const __m256i all_scales = lasx_ext8_16(scales128); + const __m128i l_scales = lasx_extracti128(all_scales, 0); + const __m128i h_scales = lasx_extracti128(all_scales, 1); + const __m256i scales[2] = {lasx_insertf128(l_scales, l_scales), lasx_insertf128(h_scales, h_scales)}; + + // high bit + const __m256i hbits = __lasx_xvld((const __m256i*)x[i].hmask, 0); + + // integer accumulator + __m256i sumi = __lasx_xvldi(0); + + int bit = 0; + int is = 0; + __m256i xvbit; + + + for (int j = 0; j < QK_K/128; ++j) { + // load low 2 bits + const __m256i q3bits = __lasx_xvld((const __m256i*)q3, 0); q3 += 32; + + xvbit = __lasx_xvreplgr2vr_h(bit); + // prepare low and high bits + const __m256i q3l_0 = __lasx_xvand_v(q3bits, m3); + const __m256i q3h_0 = __lasx_xvslli_h(__lasx_xvsrl_h(__lasx_xvandn_v(hbits, __lasx_xvsll_h(mone, xvbit)), xvbit), 2); + ++bit; + + xvbit = __lasx_xvreplgr2vr_h(bit); + const __m256i q3l_1 = __lasx_xvand_v(__lasx_xvsrli_h(q3bits, 2), m3); + const __m256i q3h_1 = __lasx_xvslli_h(__lasx_xvsrl_h(__lasx_xvandn_v(hbits, __lasx_xvsll_h(mone, xvbit)), xvbit), 2); + ++bit; + + xvbit = __lasx_xvreplgr2vr_h(bit); + const __m256i q3l_2 = __lasx_xvand_v(__lasx_xvsrli_h(q3bits, 4), m3); + const __m256i q3h_2 = __lasx_xvslli_h(__lasx_xvsrl_h(__lasx_xvandn_v(hbits, __lasx_xvsll_h(mone, xvbit)), xvbit), 2); + ++bit; + + xvbit = __lasx_xvreplgr2vr_h(bit); + const __m256i q3l_3 = __lasx_xvand_v(__lasx_xvsrli_h(q3bits, 6), m3); + const __m256i q3h_3 = __lasx_xvslli_h(__lasx_xvsrl_h(__lasx_xvandn_v(hbits, __lasx_xvsll_h(mone, xvbit)), xvbit), 2); + ++bit; + + // load Q8 quants + const __m256i q8_0 = __lasx_xvld((const __m256i*)q8, 0); q8 += 32; + const __m256i q8_1 = __lasx_xvld((const __m256i*)q8, 0); q8 += 32; + const __m256i q8_2 = __lasx_xvld((const __m256i*)q8, 0); q8 += 32; + const __m256i q8_3 = __lasx_xvld((const __m256i*)q8, 0); q8 += 32; + + // Dot product: we multiply the 2 low bits and 1 high bit part separately, so we can use lasx_maddubs_h, + // and then subtract. The high bit part has the 2 already subtracted (and so, it is zero if the high bit was not set, + // and 2 if the high bit was set) + __m256i q8s_0 = lasx_maddubs_h(q3h_0, q8_0); + __m256i q8s_1 = lasx_maddubs_h(q3h_1, q8_1); + __m256i q8s_2 = lasx_maddubs_h(q3h_2, q8_2); + __m256i q8s_3 = lasx_maddubs_h(q3h_3, q8_3); + + __m256i p16_0 = lasx_maddubs_h(q3l_0, q8_0); + __m256i p16_1 = lasx_maddubs_h(q3l_1, q8_1); + __m256i p16_2 = lasx_maddubs_h(q3l_2, q8_2); + __m256i p16_3 = lasx_maddubs_h(q3l_3, q8_3); + + p16_0 = __lasx_xvsub_h(p16_0, q8s_0); + p16_1 = __lasx_xvsub_h(p16_1, q8s_1); + p16_2 = __lasx_xvsub_h(p16_2, q8s_2); + p16_3 = __lasx_xvsub_h(p16_3, q8s_3); + + // multiply with scales + p16_0 = lasx_madd_h(lasx_shuffle_b(scales[j], get_scale_shuffle_q3k(is + 0)), p16_0); + p16_1 = lasx_madd_h(lasx_shuffle_b(scales[j], get_scale_shuffle_q3k(is + 1)), p16_1); + p16_2 = lasx_madd_h(lasx_shuffle_b(scales[j], get_scale_shuffle_q3k(is + 2)), p16_2); + p16_3 = lasx_madd_h(lasx_shuffle_b(scales[j], get_scale_shuffle_q3k(is + 3)), p16_3); + + // accumulate + p16_0 = __lasx_xvadd_w(p16_0, p16_1); + p16_2 = __lasx_xvadd_w(p16_2, p16_3); + sumi = __lasx_xvadd_w(sumi, __lasx_xvadd_w(p16_0, p16_2)); + } + // multiply with block scale and accumulate + acc = __lasx_xvfmadd_s(__lasx_xvreplfr2vr_s(d), __lasx_xvffint_s_w(sumi), acc);//FIXME + } + + *s = hsum_float_8(acc); + +#else + // scalar version + // This function is written like this so the compiler can manage to vectorize most of it + // Using -Ofast, GCC and clang manage to produce code that is within a factor of 2 or so from the + // manually vectorized version above. Every other version I tried would run at least 4 times slower. + // The ideal situation would be if we could just write the code once, and the compiler would + // automatically produce the best possible set of machine instructions, instead of us having to manually + // write vectorized versions for AVX, ARM_NEON, etc. + + int8_t aux8[QK_K]; + int16_t aux16[8]; + float sums [8]; + int32_t aux32[8]; + memset(sums, 0, 8*sizeof(float)); + + uint32_t auxs[4]; + const int8_t * scales = (const int8_t*)auxs; + + float sumf = 0; + for (int i = 0; i < nb; ++i) { + const uint8_t * restrict q3 = x[i].qs; + const uint8_t * restrict hm = x[i].hmask; + const int8_t * restrict q8 = y[i].qs; + memset(aux32, 0, 8*sizeof(int32_t)); + int8_t * restrict a = aux8; + uint8_t m = 1; + for (int j = 0; j < QK_K; j += 128) { + for (int l = 0; l < 32; ++l) a[l] = q3[l] & 3; + for (int l = 0; l < 32; ++l) a[l] -= (hm[l] & m ? 0 : 4); + a += 32; m <<= 1; + for (int l = 0; l < 32; ++l) a[l] = (q3[l] >> 2) & 3; + for (int l = 0; l < 32; ++l) a[l] -= (hm[l] & m ? 0 : 4); + a += 32; m <<= 1; + for (int l = 0; l < 32; ++l) a[l] = (q3[l] >> 4) & 3; + for (int l = 0; l < 32; ++l) a[l] -= (hm[l] & m ? 0 : 4); + a += 32; m <<= 1; + for (int l = 0; l < 32; ++l) a[l] = (q3[l] >> 6) & 3; + for (int l = 0; l < 32; ++l) a[l] -= (hm[l] & m ? 0 : 4); + a += 32; m <<= 1; + q3 += 32; + } + a = aux8; + + memcpy(auxs, x[i].scales, 12); + uint32_t tmp = auxs[2]; + auxs[2] = ((auxs[0] >> 4) & kmask2) | (((tmp >> 4) & kmask1) << 4); + auxs[3] = ((auxs[1] >> 4) & kmask2) | (((tmp >> 6) & kmask1) << 4); + auxs[0] = (auxs[0] & kmask2) | (((tmp >> 0) & kmask1) << 4); + auxs[1] = (auxs[1] & kmask2) | (((tmp >> 2) & kmask1) << 4); + for (int j = 0; j < QK_K/16; ++j) { + for (int l = 0; l < 8; ++l) aux16[l] = q8[l] * a[l]; + for (int l = 0; l < 8; ++l) aux32[l] += (scales[j] - 32) * aux16[l]; + q8 += 8; a += 8; + for (int l = 0; l < 8; ++l) aux16[l] = q8[l] * a[l]; + for (int l = 0; l < 8; ++l) aux32[l] += (scales[j] - 32) * aux16[l]; + q8 += 8; a += 8; + } + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + for (int l = 0; l < 8; ++l) sums[l] += d * aux32[l]; + } + for (int l = 0; l < 8; ++l) sumf += sums[l]; + *s = sumf; + +#endif + +} + +void ggml_vec_dot_q4_K_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { + assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + + const block_q4_K * restrict x = vx; + const block_q8_K * restrict y = vy; + + const int nb = n / QK_K; + + static const uint32_t kmask1 = 0x3f3f3f3f; + static const uint32_t kmask2 = 0x0f0f0f0f; + static const uint32_t kmask3 = 0x03030303; + + uint32_t utmp[4]; + +#ifdef __ARM_NEON + const uint8x16_t m4b = vdupq_n_u8(0xf); + const int32x4_t mzero = vdupq_n_s32(0); + + ggml_int8x16x2_t q4bytes; + ggml_int8x16x2_t q8bytes; + + float sumf = 0; + + for (int i = 0; i < nb; ++i) { + + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = y[i].d * GGML_FP16_TO_FP32(x[i].dmin); + + const int16x8_t q8sums = vpaddq_s16(vld1q_s16(y[i].bsums), vld1q_s16(y[i].bsums + 8)); + + memcpy(utmp, x[i].scales, 12); + + uint32x2_t mins8 = { 0 }; + mins8 = vset_lane_u32(utmp[1] & kmask1, mins8, 0); + mins8 = vset_lane_u32(((utmp[2] >> 4) & kmask2) | (((utmp[1] >> 6) & kmask3) << 4), mins8, 1); + + utmp[1] = (utmp[2] & kmask2) | (((utmp[0] >> 6) & kmask3) << 4); + utmp[0] &= kmask1; + + const int16x8_t mins = vreinterpretq_s16_u16(vmovl_u8(vreinterpret_u8_u32(mins8))); + const int32x4_t prod = vaddq_s32(vmull_s16(vget_low_s16 (q8sums), vget_low_s16 (mins)), + vmull_s16(vget_high_s16(q8sums), vget_high_s16(mins))); + sumf -= dmin * vaddvq_s32(prod); + + const uint8_t * scales = (const uint8_t *)utmp; + + const uint8_t * restrict q4 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + + int32_t sumi1 = 0; + int32_t sumi2 = 0; + + for (int j = 0; j < QK_K/64; ++j) { + const ggml_uint8x16x2_t q4bits = ggml_vld1q_u8_x2(q4); q4 += 32; + + q8bytes = ggml_vld1q_s8_x2(q8); q8 += 32; + q4bytes.val[0] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[0], m4b)); + q4bytes.val[1] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[1], m4b)); + + const int32x4_t p1 = ggml_vdotq_s32(ggml_vdotq_s32(mzero, q4bytes.val[0], q8bytes.val[0]), q4bytes.val[1], q8bytes.val[1]); + sumi1 += vaddvq_s32(p1) * scales[2*j+0]; + + q8bytes = ggml_vld1q_s8_x2(q8); q8 += 32; + q4bytes.val[0] = vreinterpretq_s8_u8(vshrq_n_u8(q4bits.val[0], 4)); + q4bytes.val[1] = vreinterpretq_s8_u8(vshrq_n_u8(q4bits.val[1], 4)); + + const int32x4_t p2 = ggml_vdotq_s32(ggml_vdotq_s32(mzero, q4bytes.val[0], q8bytes.val[0]), q4bytes.val[1], q8bytes.val[1]); + + sumi2 += vaddvq_s32(p2) * scales[2*j+1]; + } + + sumf += d * (sumi1 + sumi2); + + } + + *s = sumf; + +#elif defined __AVX2__ + + const __m256i m4 = _mm256_set1_epi8(0xF); + + __m256 acc = _mm256_setzero_ps(); + __m128 acc_m = _mm_setzero_ps(); + + for (int i = 0; i < nb; ++i) { + + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = -y[i].d * GGML_FP16_TO_FP32(x[i].dmin); + + memcpy(utmp, x[i].scales, 12); + utmp[3] = ((utmp[2] >> 4) & kmask2) | (((utmp[1] >> 6) & kmask3) << 4); + const uint32_t uaux = utmp[1] & kmask1; + utmp[1] = (utmp[2] & kmask2) | (((utmp[0] >> 6) & kmask3) << 4); + utmp[2] = uaux; + utmp[0] &= kmask1; + + const uint8_t * restrict q4 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + + const __m256i mins_and_scales = _mm256_cvtepu8_epi16(_mm_set_epi32(utmp[3], utmp[2], utmp[1], utmp[0])); + + const __m256i q8sums = _mm256_loadu_si256((const __m256i*)y[i].bsums); + const __m128i q8s = _mm_hadd_epi16(_mm256_extracti128_si256(q8sums, 0), _mm256_extracti128_si256(q8sums, 1)); + const __m128i prod = _mm_madd_epi16(_mm256_extracti128_si256(mins_and_scales, 1), q8s); + acc_m = _mm_fmadd_ps(_mm_set1_ps(dmin), _mm_cvtepi32_ps(prod), acc_m); + + const __m128i sc128 = _mm256_extracti128_si256(mins_and_scales, 0); + const __m256i scales = MM256_SET_M128I(sc128, sc128); + + __m256i sumi = _mm256_setzero_si256(); + + for (int j = 0; j < QK_K/64; ++j) { + + const __m256i scale_l = _mm256_shuffle_epi8(scales, get_scale_shuffle_k4(2*j+0)); + const __m256i scale_h = _mm256_shuffle_epi8(scales, get_scale_shuffle_k4(2*j+1)); + + const __m256i q4bits = _mm256_loadu_si256((const __m256i*)q4); q4 += 32; + const __m256i q4l = _mm256_and_si256(q4bits, m4); + const __m256i q4h = _mm256_and_si256(_mm256_srli_epi16(q4bits, 4), m4); + + const __m256i q8l = _mm256_loadu_si256((const __m256i*)q8); q8 += 32; + __m256i p16l = _mm256_maddubs_epi16(q4l, q8l); + p16l = _mm256_madd_epi16(scale_l, p16l); + + const __m256i q8h = _mm256_loadu_si256((const __m256i*)q8); q8 += 32; + __m256i p16h = _mm256_maddubs_epi16(q4h, q8h); + p16h = _mm256_madd_epi16(scale_h, p16h); + const __m256i sumj = _mm256_add_epi32(p16l, p16h); + + sumi = _mm256_add_epi32(sumi, sumj); + } + + __m256 vd = _mm256_set1_ps(d); + acc = _mm256_fmadd_ps(vd, _mm256_cvtepi32_ps(sumi), acc); + + } + + acc_m = _mm_add_ps(acc_m, _mm_movehl_ps(acc_m, acc_m)); + acc_m = _mm_add_ss(acc_m, _mm_movehdup_ps(acc_m)); + + *s = hsum_float_8(acc) + _mm_cvtss_f32(acc_m); + +#elif defined __AVX__ + + const __m128i m4 = _mm_set1_epi8(0xF); + const __m128i m2 = _mm_set1_epi8(0x2); + + __m256 acc = _mm256_setzero_ps(); + __m128 acc_m = _mm_setzero_ps(); + + for (int i = 0; i < nb; ++i) { + + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = -y[i].d * GGML_FP16_TO_FP32(x[i].dmin); + + const uint8_t * restrict q4 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + + memcpy(utmp, x[i].scales, 12); + utmp[3] = ((utmp[2] >> 4) & kmask2) | (((utmp[1] >> 6) & kmask3) << 4); + const uint32_t uaux = utmp[1] & kmask1; + utmp[1] = (utmp[2] & kmask2) | (((utmp[0] >> 6) & kmask3) << 4); + utmp[2] = uaux; + utmp[0] &= kmask1; + + const __m128i utmps = _mm_set_epi32(utmp[3], utmp[2], utmp[1], utmp[0]); + const __m128i scales = _mm_cvtepu8_epi16(utmps); + const __m128i mins = _mm_cvtepu8_epi16(_mm_unpackhi_epi64(utmps, utmps)); + + const __m128i q8sums_0 = _mm_loadu_si128((const __m128i*)&y[i].bsums[0]); + const __m128i q8sums_1 = _mm_loadu_si128((const __m128i*)&y[i].bsums[8]); + const __m128i q8s = _mm_hadd_epi16(q8sums_0, q8sums_1); + const __m128i prod = _mm_madd_epi16(mins, q8s); + acc_m = _mm_add_ps(_mm_mul_ps(_mm_set1_ps(dmin), _mm_cvtepi32_ps(prod)), acc_m); + + __m128i sumi_0 = _mm_setzero_si128(); + __m128i sumi_1 = _mm_setzero_si128(); + + __m128i shuffle = _mm_set1_epi16(0x0100); + for (int j = 0; j < QK_K/64; ++j) { + + const __m128i scale_l = _mm_shuffle_epi8(scales, shuffle); + shuffle = _mm_add_epi16(shuffle, m2); + const __m128i scale_h = _mm_shuffle_epi8(scales, shuffle); + shuffle = _mm_add_epi16(shuffle, m2); + + __m128i q4bits = _mm_loadu_si128((const __m128i*)q4); q4 += 16; + const __m128i q4l_0 = _mm_and_si128(q4bits, m4); + const __m128i q4h_0 = _mm_and_si128(_mm_srli_epi16(q4bits, 4), m4); + q4bits = _mm_loadu_si128((const __m128i*)q4); q4 += 16; + const __m128i q4l_1 = _mm_and_si128(q4bits, m4); + const __m128i q4h_1 = _mm_and_si128(_mm_srli_epi16(q4bits, 4), m4); + + const __m128i q8l_0 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + __m128i p16l = _mm_maddubs_epi16(q4l_0, q8l_0); + p16l = _mm_madd_epi16(scale_l, p16l); + sumi_0 = _mm_add_epi32(sumi_0, p16l); + const __m128i q8l_1 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + p16l = _mm_maddubs_epi16(q4l_1, q8l_1); + p16l = _mm_madd_epi16(scale_l, p16l); + sumi_1 = _mm_add_epi32(sumi_1, p16l); + + const __m128i q8h_0 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + __m128i p16h = _mm_maddubs_epi16(q4h_0, q8h_0); + p16h = _mm_madd_epi16(scale_h, p16h); + sumi_0 = _mm_add_epi32(sumi_0, p16h); + const __m128i q8h_1 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + p16h = _mm_maddubs_epi16(q4h_1, q8h_1); + p16h = _mm_madd_epi16(scale_h, p16h); + sumi_1 = _mm_add_epi32(sumi_1, p16h); + + } + + __m256 vd = _mm256_set1_ps(d); + __m256i sumi = MM256_SET_M128I(sumi_1, sumi_0); + acc = _mm256_add_ps(_mm256_mul_ps(vd, _mm256_cvtepi32_ps(sumi)), acc); + + } + + acc_m = _mm_add_ps(acc_m, _mm_movehl_ps(acc_m, acc_m)); + acc_m = _mm_add_ss(acc_m, _mm_movehdup_ps(acc_m)); + + *s = hsum_float_8(acc) + _mm_cvtss_f32(acc_m); + +#elif defined __riscv_v_intrinsic + + const uint8_t * scales = (const uint8_t*)&utmp[0]; + const uint8_t * mins = (const uint8_t*)&utmp[2]; + + float sumf = 0; + + for (int i = 0; i < nb; ++i) { + + size_t vl = 8; + + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = y[i].d * GGML_FP16_TO_FP32(x[i].dmin); + + vint16mf2_t q8sums_0 = __riscv_vlse16_v_i16mf2(y[i].bsums, 4, vl); + vint16mf2_t q8sums_1 = __riscv_vlse16_v_i16mf2(y[i].bsums+1, 4, vl); + vint16mf2_t q8sums = __riscv_vadd_vv_i16mf2(q8sums_0, q8sums_1, vl); + + memcpy(utmp, x[i].scales, 12); + utmp[3] = ((utmp[2] >> 4) & kmask2) | (((utmp[1] >> 6) & kmask3) << 4); + const uint32_t uaux = utmp[1] & kmask1; + utmp[1] = (utmp[2] & kmask2) | (((utmp[0] >> 6) & kmask3) << 4); + utmp[2] = uaux; + utmp[0] &= kmask1; + + vuint8mf4_t mins8 = __riscv_vle8_v_u8mf4(mins, vl); + vint16mf2_t v_mins = __riscv_vreinterpret_v_u16mf2_i16mf2(__riscv_vzext_vf2_u16mf2(mins8, vl)); + vint32m1_t prod = __riscv_vwmul_vv_i32m1(q8sums, v_mins, vl); + + vint32m1_t sumi = __riscv_vredsum_vs_i32m1_i32m1(prod, __riscv_vmv_v_x_i32m1(0, 1), vl); + sumf -= dmin * __riscv_vmv_x_s_i32m1_i32(sumi); + + const uint8_t * restrict q4 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + + vl = 32; + + int32_t sum_1 = 0; + int32_t sum_2 = 0; + + vint16m1_t vzero = __riscv_vmv_v_x_i16m1(0, 1); + + for (int j = 0; j < QK_K/64; ++j) { + // load Q4 + vuint8m1_t q4_x = __riscv_vle8_v_u8m1(q4, vl); + + // load Q8 and multiply it with lower Q4 nibble + vint8m1_t q8_0 = __riscv_vle8_v_i8m1(q8, vl); + vint8m1_t q4_0 = __riscv_vreinterpret_v_u8m1_i8m1(__riscv_vand_vx_u8m1(q4_x, 0x0F, vl)); + vint16m2_t qv_0 = __riscv_vwmul_vv_i16m2(q4_0, q8_0, vl); + vint16m1_t vs_0 = __riscv_vredsum_vs_i16m2_i16m1(qv_0, vzero, vl); + + sum_1 += __riscv_vmv_x_s_i16m1_i16(vs_0) * scales[2*j+0]; + + // load Q8 and multiply it with upper Q4 nibble + vint8m1_t q8_1 = __riscv_vle8_v_i8m1(q8+32, vl); + vint8m1_t q4_1 = __riscv_vreinterpret_v_u8m1_i8m1(__riscv_vsrl_vx_u8m1(q4_x, 0x04, vl)); + vint16m2_t qv_1 = __riscv_vwmul_vv_i16m2(q4_1, q8_1, vl); + vint16m1_t vs_1 = __riscv_vredsum_vs_i16m2_i16m1(qv_1, vzero, vl); + + sum_2 += __riscv_vmv_x_s_i16m1_i16(vs_1) * scales[2*j+1]; + + q4 += 32; q8 += 64; + + } + + sumf += d*(sum_1 + sum_2); + + } + + *s = sumf; + +#elif defined(__POWER9_VECTOR__) + const vector signed char lowMask = vec_splats((signed char)0xF); + const vector signed char lowMask1 = vec_splats((int8_t)0x3f); + const vector signed char lowMask2 = vec_splats((int8_t)0x30); + const vector int v0 = vec_splats((int32_t)0); + const vector unsigned char v2 = vec_splats((uint8_t)2); + const vector unsigned char v4 = vec_splats((unsigned char)0x4); + + vector float vsumf0 = vec_splats(0.0f); + vector float vsumf1 = vec_splats(0.0f); + vector float vsumf2 = vec_splats(0.0f); + vector float vsumf3 = vec_splats(0.0f); + + for (int i = 0; i < nb; ++i) { + vector float vxd = vec_splats(GGML_FP16_TO_FP32(x[i].d)); + vector float vyd = vec_splats(y[i].d); + vector float vd = vec_mul(vxd, vyd); + + vector float vxmin = vec_splats(GGML_FP16_TO_FP32(x[i].dmin)); + vector float vdmin = vec_mul(vxmin, vyd); + + vector signed short q8ysums0 = vec_xl( 0, y[i].bsums); + vector signed short q8ysums1 = vec_xl(16, y[i].bsums); + + UNUSED(kmask1); + UNUSED(kmask2); + UNUSED(kmask3); + UNUSED(utmp); + + vector signed char u0 = (vector signed char)vec_xl_len(x[i].scales, 8); + vector signed char u1 = vec_and(vec_sr(u0, v2), lowMask2); + vector signed char u2 = (vector signed char)vec_xl_len(x[i].scales + 8, 4); + vector signed char u3 = vec_sr(u2, v4); + + vector signed char u30 = u1; + vector signed char u31 = (vector signed char)vec_mergeh((vector signed int)vec_and(u2, lowMask), (vector signed int)u3); + + u1 = vec_and(u0, lowMask1); + u2 = vec_or(u30, u31); + + vector signed char utmps = (vector signed char)vec_mergeh((vector signed int)u1, (vector signed int)u2); + + vector signed short vscales = vec_unpackh(utmps); + vector signed short q4xmins = vec_unpackl(utmps); + vector signed short q4xmins0 = vec_mergeh(q4xmins, q4xmins); + vector signed short q4xmins1 = vec_mergel(q4xmins, q4xmins); + + vector signed int prod0 = vec_mule(q4xmins0, q8ysums0); + vector signed int prod1 = vec_mule(q4xmins1, q8ysums1); + vector signed int prod2 = vec_mulo(q4xmins0, q8ysums0); + vector signed int prod3 = vec_mulo(q4xmins1, q8ysums1); + + vsumf0 = vec_nmsub(vec_ctf(prod0, 0), vdmin, vsumf0); + vsumf1 = vec_nmsub(vec_ctf(prod1, 0), vdmin, vsumf1); + vsumf2 = vec_nmsub(vec_ctf(prod2, 0), vdmin, vsumf2); + vsumf3 = vec_nmsub(vec_ctf(prod3, 0), vdmin, vsumf3); + + vector signed int vsumi0 = v0; + vector signed int vsumi1 = v0; + vector signed int vsumi2 = v0; + vector signed int vsumi3 = v0; + + const uint8_t * restrict q4 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + + for (int j = 0; j < QK_K/64; j+=2) { + __builtin_prefetch(q4, 0, 1); + __builtin_prefetch(q8, 0, 1); + + vector signed char qxs0 = (vector signed char)vec_xl( 0, q4); + vector signed char qxs1 = (vector signed char)vec_xl(16, q4); + vector signed char qxs2 = (vector signed char)vec_xl(32, q4); + vector signed char qxs3 = (vector signed char)vec_xl(48, q4); + q4 += 64; + + vector unsigned char q4x00 = (vector unsigned char)vec_and(qxs0, lowMask); + vector unsigned char q4x01 = (vector unsigned char)vec_sr(qxs0, v4); + vector unsigned char q4x10 = (vector unsigned char)vec_and(qxs1, lowMask); + vector unsigned char q4x11 = (vector unsigned char)vec_sr(qxs1, v4); + vector unsigned char q4x20 = (vector unsigned char)vec_and(qxs2, lowMask); + vector unsigned char q4x21 = (vector unsigned char)vec_sr(qxs2, v4); + vector unsigned char q4x30 = (vector unsigned char)vec_and(qxs3, lowMask); + vector unsigned char q4x31 = (vector unsigned char)vec_sr(qxs3, v4); + + vector signed char q8y00 = vec_xl( 0, q8); + vector signed char q8y10 = vec_xl( 16, q8); + vector signed char q8y01 = vec_xl( 32, q8); + vector signed char q8y11 = vec_xl( 48, q8); + vector signed char q8y20 = vec_xl( 64, q8); + vector signed char q8y30 = vec_xl( 80, q8); + vector signed char q8y21 = vec_xl( 96, q8); + vector signed char q8y31 = vec_xl(112, q8); + q8 += 128; + + vector signed int qv00 = vec_msum(q8y00, q4x00, v0); + vector signed int qv01 = vec_msum(q8y01, q4x01, v0); + vector signed int qv10 = vec_msum(q8y10, q4x10, v0); + vector signed int qv11 = vec_msum(q8y11, q4x11, v0); + vector signed int qv20 = vec_msum(q8y20, q4x20, v0); + vector signed int qv21 = vec_msum(q8y21, q4x21, v0); + vector signed int qv30 = vec_msum(q8y30, q4x30, v0); + vector signed int qv31 = vec_msum(q8y31, q4x31, v0); + + vector signed int vscales_h = vec_unpackh(vscales); + vector signed int vs0 = vec_splat(vscales_h, 0); + vector signed int vs1 = vec_splat(vscales_h, 1); + vector signed int vs2 = vec_splat(vscales_h, 2); + vector signed int vs3 = vec_splat(vscales_h, 3); + vscales = vec_sld(vscales, vscales, 8); + + vsumi0 = vec_add(vec_mul(qv00, vs0), vsumi0); + vsumi1 = vec_add(vec_mul(qv01, vs1), vsumi1); + vsumi2 = vec_add(vec_mul(qv20, vs2), vsumi2); + vsumi3 = vec_add(vec_mul(qv21, vs3), vsumi3); + + vsumi0 = vec_add(vec_mul(qv10, vs0), vsumi0); + vsumi1 = vec_add(vec_mul(qv11, vs1), vsumi1); + vsumi2 = vec_add(vec_mul(qv30, vs2), vsumi2); + vsumi3 = vec_add(vec_mul(qv31, vs3), vsumi3); + } + + vsumf0 = vec_madd(vec_ctf(vsumi0, 0), vd, vsumf0); + vsumf1 = vec_madd(vec_ctf(vsumi1, 0), vd, vsumf1); + vsumf2 = vec_madd(vec_ctf(vsumi2, 0), vd, vsumf2); + vsumf3 = vec_madd(vec_ctf(vsumi3, 0), vd, vsumf3); + } + + vsumf0 = vec_add(vsumf0, vsumf2); + vsumf1 = vec_add(vsumf1, vsumf3); + + vsumf0 = vec_add(vsumf0, vsumf1); + + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 4)); + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 8)); + + *s = vec_extract(vsumf0, 0); + +#elif defined __loongarch_asx + GGML_UNUSED(kmask1); + GGML_UNUSED(kmask2); + GGML_UNUSED(kmask3); + + const __m256i m4 = __lasx_xvreplgr2vr_b(0xF); + + __m256 acc = (__m256)__lasx_xvldi(0); + __m128 acc_m = (__m128)__lsx_vldi(0); + + for (int i = 0; i < nb; ++i) { + + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = -y[i].d * GGML_FP16_TO_FP32(x[i].dmin); + + memcpy(utmp, x[i].scales, 12); + utmp[3] = ((utmp[2] >> 4) & kmask2) | (((utmp[1] >> 6) & kmask3) << 4); + const uint32_t uaux = utmp[1] & kmask1; + utmp[1] = (utmp[2] & kmask2) | (((utmp[0] >> 6) & kmask3) << 4); + utmp[2] = uaux; + utmp[0] &= kmask1; + + const uint8_t * restrict q4 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + + const __m256i mins_and_scales = lasx_extu8_16(lsx_set_w(utmp[3], utmp[2], utmp[1], utmp[0])); + + const __m256i q8sums = __lasx_xvld((const __m256i*)y[i].bsums, 0); + const __m128i q8s = lsx_hadd_h(lasx_extracti128(q8sums, 0), lasx_extracti128(q8sums, 1)); + const __m128i prod = lsx_madd_h(lasx_extracti128(mins_and_scales, 1), q8s); + acc_m = __lsx_vfmadd_s(__lsx_vreplfr2vr_s(dmin), __lsx_vffint_s_w(prod), acc_m); + + const __m128i sc128 = lasx_extracti128(mins_and_scales, 0); + const __m256i scales = lasx_insertf128(sc128, sc128); + + __m256i sumi = __lasx_xvldi(0); + + for (int j = 0; j < QK_K/64; ++j) { + + const __m256i scale_l = lasx_shuffle_b(scales, get_scale_shuffle_k4(2*j+0)); + const __m256i scale_h = lasx_shuffle_b(scales, get_scale_shuffle_k4(2*j+1)); + + const __m256i q4bits = __lasx_xvld((const __m256i*)q4, 0); q4 += 32; + const __m256i q4l = __lasx_xvand_v(q4bits, m4); + const __m256i q4h = __lasx_xvand_v(__lasx_xvsrli_h(q4bits, 4), m4); + + const __m256i q8l = __lasx_xvld((const __m256i*)q8, 0); q8 += 32; + __m256i p16l = lasx_maddubs_h(q4l, q8l); + p16l = lasx_madd_h(scale_l, p16l); + + const __m256i q8h = __lasx_xvld((const __m256i*)q8, 0); q8 += 32; + __m256i p16h = lasx_maddubs_h(q4h, q8h); + p16h = lasx_madd_h(scale_h, p16h); + const __m256i sumj = __lasx_xvadd_w(p16l, p16h); + + sumi = __lasx_xvadd_w(sumi, sumj); + } + + __m256 vd = __lasx_xvreplfr2vr_s(d); + acc = __lasx_xvfmadd_s(vd, __lasx_xvffint_s_w(sumi), acc); + + } + + acc_m = __lsx_vfadd_s(acc_m, (__m128)__lsx_vpermi_w((__m128i)acc_m, (__m128i)acc_m, 0xee)); + __m128i tmp1 = __lsx_vinsgr2vr_w(__lsx_vldi(0), __lsx_vpickve2gr_w((__m128i)acc_m, 1), 0); + acc_m = __lsx_vfadd_s(acc_m, (__m128)tmp1); + + + ft_union fi; + fi.i = __lsx_vpickve2gr_w(acc_m, 0); + *s = hsum_float_8(acc) + fi.f ; +#else + + const uint8_t * scales = (const uint8_t*)&utmp[0]; + const uint8_t * mins = (const uint8_t*)&utmp[2]; + + int8_t aux8[QK_K]; + int16_t aux16[8]; + float sums [8]; + int32_t aux32[8]; + memset(sums, 0, 8*sizeof(float)); + + float sumf = 0; + for (int i = 0; i < nb; ++i) { + const uint8_t * restrict q4 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + memset(aux32, 0, 8*sizeof(int32_t)); + int8_t * restrict a = aux8; + for (int j = 0; j < QK_K/64; ++j) { + for (int l = 0; l < 32; ++l) a[l] = (int8_t)(q4[l] & 0xF); + a += 32; + for (int l = 0; l < 32; ++l) a[l] = (int8_t)(q4[l] >> 4); + a += 32; q4 += 32; + } + memcpy(utmp, x[i].scales, 12); + utmp[3] = ((utmp[2] >> 4) & kmask2) | (((utmp[1] >> 6) & kmask3) << 4); + const uint32_t uaux = utmp[1] & kmask1; + utmp[1] = (utmp[2] & kmask2) | (((utmp[0] >> 6) & kmask3) << 4); + utmp[2] = uaux; + utmp[0] &= kmask1; + + int sumi = 0; + for (int j = 0; j < QK_K/16; ++j) sumi += y[i].bsums[j] * mins[j/2]; + a = aux8; + int is = 0; + for (int j = 0; j < QK_K/32; ++j) { + int32_t scale = scales[is++]; + for (int l = 0; l < 8; ++l) aux16[l] = q8[l] * a[l]; + for (int l = 0; l < 8; ++l) aux32[l] += scale * aux16[l]; + q8 += 8; a += 8; + for (int l = 0; l < 8; ++l) aux16[l] = q8[l] * a[l]; + for (int l = 0; l < 8; ++l) aux32[l] += scale * aux16[l]; + q8 += 8; a += 8; + for (int l = 0; l < 8; ++l) aux16[l] = q8[l] * a[l]; + for (int l = 0; l < 8; ++l) aux32[l] += scale * aux16[l]; + q8 += 8; a += 8; + for (int l = 0; l < 8; ++l) aux16[l] = q8[l] * a[l]; + for (int l = 0; l < 8; ++l) aux32[l] += scale * aux16[l]; + q8 += 8; a += 8; + } + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + for (int l = 0; l < 8; ++l) sums[l] += d * aux32[l]; + const float dmin = GGML_FP16_TO_FP32(x[i].dmin) * y[i].d; + sumf -= dmin * sumi; + } + for (int l = 0; l < 8; ++l) sumf += sums[l]; + *s = sumf; +#endif +} + +void ggml_vec_dot_q5_K_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { + assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + + const block_q5_K * restrict x = vx; + const block_q8_K * restrict y = vy; + + const int nb = n / QK_K; + + static const uint32_t kmask1 = 0x3f3f3f3f; + static const uint32_t kmask2 = 0x0f0f0f0f; + static const uint32_t kmask3 = 0x03030303; + + uint32_t utmp[4]; + +#ifdef __ARM_NEON + const uint8x16_t m4b = vdupq_n_u8(0xf); + const uint8x16_t mone = vdupq_n_u8(1); + const uint8x16_t mtwo = vdupq_n_u8(2); + const int32x4_t mzero = vdupq_n_s32(0); + + ggml_int8x16x4_t q5bytes; + + float sumf = 0; + + for (int i = 0; i < nb; ++i) { + + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = y[i].d * GGML_FP16_TO_FP32(x[i].dmin); + + const int16x8_t q8sums = vpaddq_s16(vld1q_s16(y[i].bsums), vld1q_s16(y[i].bsums + 8)); + + memcpy(utmp, x[i].scales, 12); + utmp[3] = ((utmp[2] >> 4) & kmask2) | (((utmp[1] >> 6) & kmask3) << 4); + const uint32_t uaux = utmp[1] & kmask1; + utmp[1] = (utmp[2] & kmask2) | (((utmp[0] >> 6) & kmask3) << 4); + utmp[2] = uaux; + utmp[0] &= kmask1; + + const uint8x8_t mins8 = vld1_u8((const uint8_t*)utmp + 8); + const int16x8_t mins = vreinterpretq_s16_u16(vmovl_u8(mins8)); + const int32x4_t prod = vaddq_s32(vmull_s16(vget_low_s16 (q8sums), vget_low_s16 (mins)), + vmull_s16(vget_high_s16(q8sums), vget_high_s16(mins))); + int32_t sumi_mins = vaddvq_s32(prod); + + const uint8_t * scales = (const uint8_t *)utmp; + + const uint8_t * restrict q5 = x[i].qs; + const uint8_t * restrict qh = x[i].qh; + const int8_t * restrict q8 = y[i].qs; + + ggml_uint8x16x2_t qhbits = ggml_vld1q_u8_x2(qh); + + ggml_uint8x16x4_t q5h; + + int32_t sumi = 0; + + for (int j = 0; j < QK_K/64; ++j) { + + const ggml_uint8x16x2_t q5bits = ggml_vld1q_u8_x2(q5); q5 += 32; + const ggml_int8x16x4_t q8bytes = ggml_vld1q_s8_x4(q8); q8 += 64; + + q5h.val[0] = vshlq_n_u8(vandq_u8(mone, qhbits.val[0]), 4); + q5h.val[1] = vshlq_n_u8(vandq_u8(mone, qhbits.val[1]), 4); + q5h.val[2] = vshlq_n_u8(vandq_u8(mtwo, qhbits.val[0]), 3); + q5h.val[3] = vshlq_n_u8(vandq_u8(mtwo, qhbits.val[1]), 3); + qhbits.val[0] = vshrq_n_u8(qhbits.val[0], 2); + qhbits.val[1] = vshrq_n_u8(qhbits.val[1], 2); + + q5bytes.val[0] = vreinterpretq_s8_u8(vorrq_u8(vandq_u8(q5bits.val[0], m4b), q5h.val[0])); + q5bytes.val[1] = vreinterpretq_s8_u8(vorrq_u8(vandq_u8(q5bits.val[1], m4b), q5h.val[1])); + q5bytes.val[2] = vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q5bits.val[0], 4), q5h.val[2])); + q5bytes.val[3] = vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q5bits.val[1], 4), q5h.val[3])); + + sumi += vaddvq_s32(ggml_vdotq_s32(ggml_vdotq_s32(mzero, q5bytes.val[0], q8bytes.val[0]), q5bytes.val[1], q8bytes.val[1])) * *scales++; + sumi += vaddvq_s32(ggml_vdotq_s32(ggml_vdotq_s32(mzero, q5bytes.val[2], q8bytes.val[2]), q5bytes.val[3], q8bytes.val[3])) * *scales++; + } + + sumf += d * sumi - dmin * sumi_mins; + } + + *s = sumf; + +#elif defined __AVX2__ + + const __m256i m4 = _mm256_set1_epi8(0xF); + const __m128i mzero = _mm_setzero_si128(); + const __m256i mone = _mm256_set1_epi8(1); + + __m256 acc = _mm256_setzero_ps(); + + float summs = 0.f; + + for (int i = 0; i < nb; ++i) { + const uint8_t * restrict q5 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = -y[i].d * GGML_FP16_TO_FP32(x[i].dmin); + + memcpy(utmp, x[i].scales, 12); + utmp[3] = ((utmp[2] >> 4) & kmask2) | (((utmp[1] >> 6) & kmask3) << 4); + const uint32_t uaux = utmp[1] & kmask1; + utmp[1] = (utmp[2] & kmask2) | (((utmp[0] >> 6) & kmask3) << 4); + utmp[2] = uaux; + utmp[0] &= kmask1; + + const __m256i mins_and_scales = _mm256_cvtepu8_epi16(_mm_set_epi32(utmp[3], utmp[2], utmp[1], utmp[0])); + + const __m256i q8sums = _mm256_loadu_si256((const __m256i*)y[i].bsums); + const __m128i q8s = _mm_hadd_epi16(_mm256_extracti128_si256(q8sums, 0), _mm256_extracti128_si256(q8sums, 1)); + const __m128i prod = _mm_madd_epi16(_mm256_extracti128_si256(mins_and_scales, 1), q8s); + const __m128i hsum = _mm_hadd_epi32(_mm_hadd_epi32(prod, mzero), mzero); + summs += dmin * _mm_extract_epi32(hsum, 0); + + const __m128i sc128 = _mm256_extracti128_si256(mins_and_scales, 0); + const __m256i scales = MM256_SET_M128I(sc128, sc128); + + const __m256i hbits = _mm256_loadu_si256((const __m256i*)x[i].qh); + __m256i hmask = mone; + + __m256i sumi = _mm256_setzero_si256(); + + int bit = 0; + + for (int j = 0; j < QK_K/64; ++j) { + + const __m256i scale_0 = _mm256_shuffle_epi8(scales, get_scale_shuffle_k4(2*j+0)); + const __m256i scale_1 = _mm256_shuffle_epi8(scales, get_scale_shuffle_k4(2*j+1)); + + const __m256i q5bits = _mm256_loadu_si256((const __m256i*)q5); q5 += 32; + + const __m256i q5l_0 = _mm256_and_si256(q5bits, m4); + const __m256i q5h_0 = _mm256_slli_epi16(_mm256_srli_epi16(_mm256_and_si256(hbits, hmask), bit++), 4); + const __m256i q5_0 = _mm256_add_epi8(q5l_0, q5h_0); + hmask = _mm256_slli_epi16(hmask, 1); + + const __m256i q5l_1 = _mm256_and_si256(_mm256_srli_epi16(q5bits, 4), m4); + const __m256i q5h_1 = _mm256_slli_epi16(_mm256_srli_epi16(_mm256_and_si256(hbits, hmask), bit++), 4); + const __m256i q5_1 = _mm256_add_epi8(q5l_1, q5h_1); + hmask = _mm256_slli_epi16(hmask, 1); + + const __m256i q8_0 = _mm256_loadu_si256((const __m256i*)q8); q8 += 32; + const __m256i q8_1 = _mm256_loadu_si256((const __m256i*)q8); q8 += 32; + + __m256i p16_0 = _mm256_maddubs_epi16(q5_0, q8_0); + __m256i p16_1 = _mm256_maddubs_epi16(q5_1, q8_1); + + p16_0 = _mm256_madd_epi16(scale_0, p16_0); + p16_1 = _mm256_madd_epi16(scale_1, p16_1); + + sumi = _mm256_add_epi32(sumi, _mm256_add_epi32(p16_0, p16_1)); + + } + + __m256 vd = _mm256_set1_ps(d); + acc = _mm256_fmadd_ps(vd, _mm256_cvtepi32_ps(sumi), acc); + + } + + *s = hsum_float_8(acc) + summs; + +#elif defined __AVX__ + + const __m128i m4 = _mm_set1_epi8(0xF); + const __m128i mzero = _mm_setzero_si128(); + const __m128i mone = _mm_set1_epi8(1); + const __m128i m2 = _mm_set1_epi8(2); + + __m256 acc = _mm256_setzero_ps(); + + float summs = 0.f; + + for (int i = 0; i < nb; ++i) { + + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = -y[i].d * GGML_FP16_TO_FP32(x[i].dmin); + + const uint8_t * restrict q5 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + + memcpy(utmp, x[i].scales, 12); + utmp[3] = ((utmp[2] >> 4) & kmask2) | (((utmp[1] >> 6) & kmask3) << 4); + const uint32_t uaux = utmp[1] & kmask1; + utmp[1] = (utmp[2] & kmask2) | (((utmp[0] >> 6) & kmask3) << 4); + utmp[2] = uaux; + utmp[0] &= kmask1; + + const __m128i utmps = _mm_set_epi32(utmp[3], utmp[2], utmp[1], utmp[0]); + const __m128i scales = _mm_cvtepu8_epi16(utmps); + const __m128i mins = _mm_cvtepu8_epi16(_mm_unpackhi_epi64(utmps, utmps)); + + const __m128i q8sums_0 = _mm_loadu_si128((const __m128i*)&y[i].bsums[0]); + const __m128i q8sums_1 = _mm_loadu_si128((const __m128i*)&y[i].bsums[8]); + const __m128i q8s = _mm_hadd_epi16(q8sums_0, q8sums_1); + const __m128i prod = _mm_madd_epi16(mins, q8s); + const __m128i hsum = _mm_hadd_epi32(_mm_hadd_epi32(prod, mzero), mzero); + summs += dmin * _mm_extract_epi32(hsum, 0); + + const __m128i hbits_0 = _mm_loadu_si128((const __m128i*)&x[i].qh[0]); + const __m128i hbits_1 = _mm_loadu_si128((const __m128i*)&x[i].qh[16]); + __m128i hmask = mone; + + __m128i sumi_0 = _mm_setzero_si128(); + __m128i sumi_1 = _mm_setzero_si128(); + + int bit = 0; + + __m128i shuffle = _mm_set1_epi16(0x0100); + for (int j = 0; j < QK_K/64; ++j) { + + const __m128i scale_0 = _mm_shuffle_epi8(scales, shuffle); + shuffle = _mm_add_epi16(shuffle, m2); + const __m128i scale_1 = _mm_shuffle_epi8(scales, shuffle); + shuffle = _mm_add_epi16(shuffle, m2); + + const __m128i q5bits_0 = _mm_loadu_si128((const __m128i*)q5); q5 += 16; + const __m128i q5bits_1 = _mm_loadu_si128((const __m128i*)q5); q5 += 16; + + __m128i q5l_0 = _mm_and_si128(q5bits_0, m4); + __m128i q5l_1 = _mm_and_si128(q5bits_1, m4); + __m128i q5h_0 = _mm_slli_epi16(_mm_srli_epi16(_mm_and_si128(hbits_0, hmask), bit), 4); + __m128i q5h_1 = _mm_slli_epi16(_mm_srli_epi16(_mm_and_si128(hbits_1, hmask), bit++), 4); + __m128i q5_0 = _mm_add_epi8(q5l_0, q5h_0); + __m128i q5_1 = _mm_add_epi8(q5l_1, q5h_1); + hmask = _mm_slli_epi16(hmask, 1); + + __m128i q8_0 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + __m128i q8_1 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + __m128i p16_0 = _mm_maddubs_epi16(q5_0, q8_0); + __m128i p16_1 = _mm_maddubs_epi16(q5_1, q8_1); + p16_0 = _mm_madd_epi16(scale_0, p16_0); + p16_1 = _mm_madd_epi16(scale_0, p16_1); + + q5l_0 = _mm_and_si128(_mm_srli_epi16(q5bits_0, 4), m4); + q5l_1 = _mm_and_si128(_mm_srli_epi16(q5bits_1, 4), m4); + q5h_0 = _mm_slli_epi16(_mm_srli_epi16(_mm_and_si128(hbits_0, hmask), bit), 4); + q5h_1 = _mm_slli_epi16(_mm_srli_epi16(_mm_and_si128(hbits_1, hmask), bit++), 4); + q5_0 = _mm_add_epi8(q5l_0, q5h_0); + q5_1 = _mm_add_epi8(q5l_1, q5h_1); + hmask = _mm_slli_epi16(hmask, 1); + + q8_0 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + q8_1 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + __m128i p16_2 = _mm_maddubs_epi16(q5_0, q8_0); + __m128i p16_3 = _mm_maddubs_epi16(q5_1, q8_1); + p16_2 = _mm_madd_epi16(scale_1, p16_2); + p16_3 = _mm_madd_epi16(scale_1, p16_3); + + sumi_0 = _mm_add_epi32(sumi_0, _mm_add_epi32(p16_0, p16_2)); + sumi_1 = _mm_add_epi32(sumi_1, _mm_add_epi32(p16_1, p16_3)); + + } + + __m256 vd = _mm256_set1_ps(d); + __m256i sumi = MM256_SET_M128I(sumi_1, sumi_0); + acc = _mm256_add_ps(_mm256_mul_ps(vd, _mm256_cvtepi32_ps(sumi)), acc); + + } + + *s = hsum_float_8(acc) + summs; + +#elif defined __riscv_v_intrinsic + + const uint8_t * scales = (const uint8_t*)&utmp[0]; + const uint8_t * mins = (const uint8_t*)&utmp[2]; + + float sumf = 0; + float sums = 0.0; + + size_t vl; + + for (int i = 0; i < nb; ++i) { + + vl = 8; + + const uint8_t * restrict q5 = x[i].qs; + const uint8_t * restrict hm = x[i].qh; + const int8_t * restrict q8 = y[i].qs; + + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const float dmin = GGML_FP16_TO_FP32(x[i].dmin) * y[i].d; + + vint16mf2_t q8sums_0 = __riscv_vlse16_v_i16mf2(y[i].bsums, 4, vl); + vint16mf2_t q8sums_1 = __riscv_vlse16_v_i16mf2(y[i].bsums+1, 4, vl); + vint16mf2_t q8sums = __riscv_vadd_vv_i16mf2(q8sums_0, q8sums_1, vl); + + memcpy(utmp, x[i].scales, 12); + utmp[3] = ((utmp[2] >> 4) & kmask2) | (((utmp[1] >> 6) & kmask3) << 4); + const uint32_t uaux = utmp[1] & kmask1; + utmp[1] = (utmp[2] & kmask2) | (((utmp[0] >> 6) & kmask3) << 4); + utmp[2] = uaux; + utmp[0] &= kmask1; + + vuint8mf4_t mins8 = __riscv_vle8_v_u8mf4(mins, vl); + vint16mf2_t v_mins = __riscv_vreinterpret_v_u16mf2_i16mf2(__riscv_vzext_vf2_u16mf2(mins8, vl)); + vint32m1_t prod = __riscv_vwmul_vv_i32m1(q8sums, v_mins, vl); + + vint32m1_t sumi = __riscv_vredsum_vs_i32m1_i32m1(prod, __riscv_vmv_v_x_i32m1(0, 1), vl); + sumf -= dmin * __riscv_vmv_x_s_i32m1_i32(sumi); + + vl = 32; + int32_t aux32 = 0; + int is = 0; + + uint8_t m = 1; + vint32m1_t vzero = __riscv_vmv_v_x_i32m1(0, 1); + vuint8m1_t vqh = __riscv_vle8_v_u8m1(hm, vl); + + for (int j = 0; j < QK_K/64; ++j) { + // load Q5 and Q8 + vuint8m1_t q5_x = __riscv_vle8_v_u8m1(q5, vl); + vint8m1_t q8_y1 = __riscv_vle8_v_i8m1(q8, vl); + vint8m1_t q8_y2 = __riscv_vle8_v_i8m1(q8+32, vl); + + // compute mask for addition + vint8m1_t q5_a = __riscv_vreinterpret_v_u8m1_i8m1(__riscv_vand_vx_u8m1(q5_x, 0x0F, vl)); + vuint8m1_t qh_m1 = __riscv_vand_vx_u8m1(vqh, m, vl); + vbool8_t vmask_1 = __riscv_vmsne_vx_u8m1_b8(qh_m1, 0, vl); + vint8m1_t q5_m1 = __riscv_vadd_vx_i8m1_mu(vmask_1, q5_a, q5_a, 16, vl); + m <<= 1; + + vint8m1_t q5_l = __riscv_vreinterpret_v_u8m1_i8m1(__riscv_vsrl_vx_u8m1(q5_x, 0x04, vl)); + vuint8m1_t qh_m2 = __riscv_vand_vx_u8m1(vqh, m, vl); + vbool8_t vmask_2 = __riscv_vmsne_vx_u8m1_b8(qh_m2, 0, vl); + vint8m1_t q5_m2 = __riscv_vadd_vx_i8m1_mu(vmask_2, q5_l, q5_l, 16, vl); + m <<= 1; + + vint16m2_t v0 = __riscv_vwmul_vv_i16m2(q5_m1, q8_y1, vl); + vint16m2_t v1 = __riscv_vwmul_vv_i16m2(q5_m2, q8_y2, vl); + + vint32m4_t vs1 = __riscv_vwmul_vx_i32m4(v0, scales[is++], vl); + vint32m4_t vs2 = __riscv_vwmul_vx_i32m4(v1, scales[is++], vl); + + vint32m1_t vacc1 = __riscv_vredsum_vs_i32m4_i32m1(vs1, vzero, vl); + vint32m1_t vacc2 = __riscv_vredsum_vs_i32m4_i32m1(vs2, vzero, vl); + + aux32 += __riscv_vmv_x_s_i32m1_i32(vacc1) + __riscv_vmv_x_s_i32m1_i32(vacc2); + q5 += 32; q8 += 64; + + } + + vfloat32m1_t vaux = __riscv_vfmul_vf_f32m1(__riscv_vfmv_v_f_f32m1(aux32, 1), d, 1); + sums += __riscv_vfmv_f_s_f32m1_f32(vaux); + + } + + *s = sumf+sums; + +#elif defined(__POWER9_VECTOR__) + const vector signed char lowMask = vec_splats((signed char)0xF); + const vector signed char lowMask1 = vec_splats((int8_t)0x3f); + const vector signed char lowMask2 = vec_splats((int8_t)0x30); + const vector int v0 = vec_splats((int32_t)0); + const vector unsigned char v1 = vec_splats((unsigned char)0x1); + const vector unsigned char v2 = vec_splats((unsigned char)0x2); + const vector unsigned char v3 = vec_splats((unsigned char)0x3); + const vector unsigned char v4 = vec_splats((unsigned char)0x4); + + vector float vsumf0 = vec_splats(0.0f); + vector float vsumf1 = vec_splats(0.0f); + vector float vsumf2 = vec_splats(0.0f); + vector float vsumf3 = vec_splats(0.0f); + + for (int i = 0; i < nb; ++i) { + vector float vxd = vec_splats(GGML_FP16_TO_FP32(x[i].d)); + vector float vyd = vec_splats(y[i].d); + vector float vd = vec_mul(vxd, vyd); + + vector float vxmin = vec_splats(GGML_FP16_TO_FP32(x[i].dmin)); + vector float vdmin = vec_mul(vxmin, vyd); + + UNUSED(kmask1); + UNUSED(kmask2); + UNUSED(kmask3); + UNUSED(utmp); + + vector signed char u0 = (vector signed char)vec_xl_len(x[i].scales, 8); + vector signed char u1 = vec_and(vec_sr(u0, v2), lowMask2); + vector signed char u2 = (vector signed char)vec_xl_len(x[i].scales + 8, 4); + vector signed char u3 = vec_sr(u2, v4); + + vector signed char u30 = u1; + vector signed char u31 = (vector signed char)vec_mergeh((vector signed int)vec_and(u2, lowMask), (vector signed int)u3); + + u1 = vec_and(u0, lowMask1); + u2 = vec_or(u30, u31); + + vector signed char utmps = (vector signed char)vec_mergeh((vector signed int)u1, (vector signed int)u2); + + vector signed short q8ysums0 = vec_xl( 0, y[i].bsums); + vector signed short q8ysums1 = vec_xl(16, y[i].bsums); + + vector signed short vscales = vec_unpackh(utmps); + + vector signed short q5xmins = vec_unpackl(utmps); + vector signed short q5xmins0 = vec_mergeh(q5xmins, q5xmins); + vector signed short q5xmins1 = vec_mergel(q5xmins, q5xmins); + + vector signed int prod0 = vec_mule(q5xmins0, q8ysums0); + vector signed int prod1 = vec_mule(q5xmins1, q8ysums1); + vector signed int prod2 = vec_mulo(q5xmins0, q8ysums0); + vector signed int prod3 = vec_mulo(q5xmins1, q8ysums1); + + vsumf0 = vec_nmsub(vec_ctf(prod0, 0), vdmin, vsumf0); + vsumf1 = vec_nmsub(vec_ctf(prod1, 0), vdmin, vsumf1); + vsumf2 = vec_nmsub(vec_ctf(prod2, 0), vdmin, vsumf2); + vsumf3 = vec_nmsub(vec_ctf(prod3, 0), vdmin, vsumf3); + + vector signed char qxhs0 = (vector signed char)vec_xl( 0, x[i].qh); + vector signed char qxhs1 = (vector signed char)vec_xl(16, x[i].qh); + + vector signed int vsumi0 = v0; + vector signed int vsumi1 = v0; + vector signed int vsumi2 = v0; + vector signed int vsumi3 = v0; + + const uint8_t * restrict q5 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + + for (int j = 0; j < QK_K/64; ++j) { + __builtin_prefetch(q5, 0, 1); + __builtin_prefetch(q8, 0, 1); + + vector signed char qxs0 = (vector signed char)vec_xl( 0, q5); + vector signed char qxs1 = (vector signed char)vec_xl(16, q5); + q5 += 32; + + vector signed char qxs00 = vec_and(qxs0, lowMask); + vector signed char qxs01 = vec_sr(qxs0, v4); + vector signed char qxs10 = vec_and(qxs1, lowMask); + vector signed char qxs11 = vec_sr(qxs1, v4); + + vector signed char q5h00 = vec_sl(vec_and((vector signed char)v1, qxhs0), v4); + vector signed char q5h01 = vec_sl(vec_and((vector signed char)v2, qxhs0), v3); + vector signed char q5h10 = vec_sl(vec_and((vector signed char)v1, qxhs1), v4); + vector signed char q5h11 = vec_sl(vec_and((vector signed char)v2, qxhs1), v3); + qxhs0 = vec_sr(qxhs0, v2); + qxhs1 = vec_sr(qxhs1, v2); + + vector unsigned char q5x00 = (vector unsigned char)vec_or(q5h00, qxs00); + vector unsigned char q5x01 = (vector unsigned char)vec_or(q5h01, qxs01); + vector unsigned char q5x10 = (vector unsigned char)vec_or(q5h10, qxs10); + vector unsigned char q5x11 = (vector unsigned char)vec_or(q5h11, qxs11); + + vector signed char q8y00 = vec_xl( 0, q8); + vector signed char q8y10 = vec_xl(16, q8); + vector signed char q8y01 = vec_xl(32, q8); + vector signed char q8y11 = vec_xl(48, q8); + q8 += 64; + + vector signed int qv00 = vec_msum(q8y00, q5x00, v0); + vector signed int qv01 = vec_msum(q8y01, q5x01, v0); + vector signed int qv10 = vec_msum(q8y10, q5x10, v0); + vector signed int qv11 = vec_msum(q8y11, q5x11, v0); + + vector signed int vscales_h = vec_unpackh(vscales); + vector signed int vs0 = vec_splat(vscales_h, 0); + vector signed int vs1 = vec_splat(vscales_h, 1); + vscales = vec_sld(vscales, vscales, 12); + + vsumi0 = vec_add(vec_mul(qv00, vs0), vsumi0); + vsumi1 = vec_add(vec_mul(qv10, vs0), vsumi1); + vsumi2 = vec_add(vec_mul(qv01, vs1), vsumi2); + vsumi3 = vec_add(vec_mul(qv11, vs1), vsumi3); + } + + vsumf0 = vec_madd(vec_ctf(vsumi0, 0), vd, vsumf0); + vsumf1 = vec_madd(vec_ctf(vsumi1, 0), vd, vsumf1); + vsumf2 = vec_madd(vec_ctf(vsumi2, 0), vd, vsumf2); + vsumf3 = vec_madd(vec_ctf(vsumi3, 0), vd, vsumf3); + } + + vsumf0 = vec_add(vsumf0, vsumf2); + vsumf1 = vec_add(vsumf1, vsumf3); + + vsumf0 = vec_add(vsumf0, vsumf1); + + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 4)); + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 8)); + + *s = vec_extract(vsumf0, 0); + +#elif defined __loongarch_asx + GGML_UNUSED(kmask1); + GGML_UNUSED(kmask2); + GGML_UNUSED(kmask3); + + const __m256i m4 = __lasx_xvreplgr2vr_b(0xF); + const __m128i mzero = __lsx_vldi(0); + const __m256i mone = __lasx_xvreplgr2vr_b(1); + + __m256 acc = (__m256)__lasx_xvldi(0); + + float summs = 0.f; + + for (int i = 0; i < nb; ++i) { + + const uint8_t * restrict q5 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = -y[i].d * GGML_FP16_TO_FP32(x[i].dmin); + + memcpy(utmp, x[i].scales, 12); + utmp[3] = ((utmp[2] >> 4) & kmask2) | (((utmp[1] >> 6) & kmask3) << 4); + const uint32_t uaux = utmp[1] & kmask1; + utmp[1] = (utmp[2] & kmask2) | (((utmp[0] >> 6) & kmask3) << 4); + utmp[2] = uaux; + utmp[0] &= kmask1; + + const __m256i mins_and_scales = lasx_extu8_16(lsx_set_w(utmp[3], utmp[2], utmp[1], utmp[0])); + + const __m256i q8sums = __lasx_xvld((const __m256i*)y[i].bsums, 0); + const __m128i q8s = lsx_hadd_h(lasx_extracti128(q8sums, 0), lasx_extracti128(q8sums, 1)); + const __m128i prod = lsx_madd_h(lasx_extracti128(mins_and_scales, 1), q8s); + const __m128i hsum = lsx_hadd_w(lsx_hadd_w(prod, mzero), mzero); + summs += dmin * __lsx_vpickve2gr_w(hsum, 0); //TODO check + + const __m128i sc128 = lasx_extracti128(mins_and_scales, 0); + const __m256i scales = lasx_insertf128(sc128, sc128); + + const __m256i hbits = __lasx_xvld((const __m256i*)x[i].qh, 0); + __m256i hmask = mone; + + __m256i sumi = __lasx_xvldi(0); + + int bit = 0; + __m256i xvbit; + + for (int j = 0; j < QK_K/64; ++j) { + + const __m256i scale_0 = lasx_shuffle_b(scales, get_scale_shuffle_k4(2*j+0)); + const __m256i scale_1 = lasx_shuffle_b(scales, get_scale_shuffle_k4(2*j+1)); + + const __m256i q5bits = __lasx_xvld((const __m256i*)q5, 0); q5 += 32; + + xvbit = __lasx_xvreplgr2vr_h(bit++); + const __m256i q5l_0 = __lasx_xvand_v(q5bits, m4); + const __m256i q5h_0 = __lasx_xvslli_h(__lasx_xvsrl_h(__lasx_xvand_v(hbits, hmask), xvbit), 4); + const __m256i q5_0 = __lasx_xvadd_b(q5l_0, q5h_0); + hmask = __lasx_xvslli_h(hmask, 1); + + xvbit = __lasx_xvreplgr2vr_h(bit++); + const __m256i q5l_1 = __lasx_xvand_v(__lasx_xvsrli_h(q5bits, 4), m4); + const __m256i q5h_1 = __lasx_xvslli_h(__lasx_xvsrl_h(__lasx_xvand_v(hbits, hmask), xvbit), 4); + const __m256i q5_1 = __lasx_xvadd_b(q5l_1, q5h_1); + hmask = __lasx_xvslli_h(hmask, 1); + + const __m256i q8_0 = __lasx_xvld((const __m256i*)q8, 0); q8 += 32; + const __m256i q8_1 = __lasx_xvld((const __m256i*)q8, 0); q8 += 32; + + __m256i p16_0 = lasx_maddubs_h(q5_0, q8_0); + __m256i p16_1 = lasx_maddubs_h(q5_1, q8_1); + + p16_0 = lasx_madd_h(scale_0, p16_0); + p16_1 = lasx_madd_h(scale_1, p16_1); + + sumi = __lasx_xvadd_w(sumi, __lasx_xvadd_w(p16_0, p16_1)); + + } + + __m256 vd = __lasx_xvreplfr2vr_s(d); + acc = __lasx_xvfmadd_s(vd, __lasx_xvffint_s_w(sumi), acc); + + } + + *s = hsum_float_8(acc) + summs; + +#else + + const uint8_t * scales = (const uint8_t*)&utmp[0]; + const uint8_t * mins = (const uint8_t*)&utmp[2]; + + int8_t aux8[QK_K]; + int16_t aux16[8]; + float sums [8]; + int32_t aux32[8]; + memset(sums, 0, 8*sizeof(float)); + + float sumf = 0; + for (int i = 0; i < nb; ++i) { + const uint8_t * restrict q4 = x[i].qs; + const uint8_t * restrict hm = x[i].qh; + const int8_t * restrict q8 = y[i].qs; + memset(aux32, 0, 8*sizeof(int32_t)); + int8_t * restrict a = aux8; + uint8_t m = 1; + for (int j = 0; j < QK_K/64; ++j) { + for (int l = 0; l < 32; ++l) a[l] = (int8_t)(q4[l] & 0xF); + for (int l = 0; l < 32; ++l) a[l] += (hm[l] & m ? 16 : 0); + a += 32; m <<= 1; + for (int l = 0; l < 32; ++l) a[l] = (int8_t)(q4[l] >> 4); + for (int l = 0; l < 32; ++l) a[l] += (hm[l] & m ? 16 : 0); + a += 32; m <<= 1; + q4 += 32; + } + memcpy(utmp, x[i].scales, 12); + utmp[3] = ((utmp[2] >> 4) & kmask2) | (((utmp[1] >> 6) & kmask3) << 4); + const uint32_t uaux = utmp[1] & kmask1; + utmp[1] = (utmp[2] & kmask2) | (((utmp[0] >> 6) & kmask3) << 4); + utmp[2] = uaux; + utmp[0] &= kmask1; + + int sumi = 0; + for (int j = 0; j < QK_K/16; ++j) sumi += y[i].bsums[j] * mins[j/2]; + a = aux8; + int is = 0; + for (int j = 0; j < QK_K/32; ++j) { + int32_t scale = scales[is++]; + for (int l = 0; l < 8; ++l) aux16[l] = q8[l] * a[l]; + for (int l = 0; l < 8; ++l) aux32[l] += scale * aux16[l]; + q8 += 8; a += 8; + for (int l = 0; l < 8; ++l) aux16[l] = q8[l] * a[l]; + for (int l = 0; l < 8; ++l) aux32[l] += scale * aux16[l]; + q8 += 8; a += 8; + for (int l = 0; l < 8; ++l) aux16[l] = q8[l] * a[l]; + for (int l = 0; l < 8; ++l) aux32[l] += scale * aux16[l]; + q8 += 8; a += 8; + for (int l = 0; l < 8; ++l) aux16[l] = q8[l] * a[l]; + for (int l = 0; l < 8; ++l) aux32[l] += scale * aux16[l]; + q8 += 8; a += 8; + } + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + for (int l = 0; l < 8; ++l) sums[l] += d * aux32[l]; + const float dmin = GGML_FP16_TO_FP32(x[i].dmin) * y[i].d; + sumf -= dmin * sumi; + } + for (int l = 0; l < 8; ++l) sumf += sums[l]; + *s = sumf; +#endif +} + +void ggml_vec_dot_q6_K_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { + assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + + const block_q6_K * restrict x = vx; + const block_q8_K * restrict y = vy; + + const int nb = n / QK_K; + +#ifdef __ARM_NEON + float sum = 0; + + const uint8x16_t m4b = vdupq_n_u8(0xF); + const int32x4_t vzero = vdupq_n_s32(0); + //const int8x16_t m32s = vdupq_n_s8(32); + + const uint8x16_t mone = vdupq_n_u8(3); + + ggml_int8x16x4_t q6bytes; + ggml_uint8x16x4_t q6h; + + for (int i = 0; i < nb; ++i) { + + const float d_all = GGML_FP16_TO_FP32(x[i].d); + + const uint8_t * restrict q6 = x[i].ql; + const uint8_t * restrict qh = x[i].qh; + const int8_t * restrict q8 = y[i].qs; + + const int8_t * restrict scale = x[i].scales; + + const ggml_int16x8x2_t q8sums = ggml_vld1q_s16_x2(y[i].bsums); + const int8x16_t scales = vld1q_s8(scale); + const ggml_int16x8x2_t q6scales = {{vmovl_s8(vget_low_s8(scales)), vmovl_s8(vget_high_s8(scales))}}; + + const int32x4_t prod = vaddq_s32(vaddq_s32(vmull_s16(vget_low_s16 (q8sums.val[0]), vget_low_s16 (q6scales.val[0])), + vmull_s16(vget_high_s16(q8sums.val[0]), vget_high_s16(q6scales.val[0]))), + vaddq_s32(vmull_s16(vget_low_s16 (q8sums.val[1]), vget_low_s16 (q6scales.val[1])), + vmull_s16(vget_high_s16(q8sums.val[1]), vget_high_s16(q6scales.val[1])))); + int32_t isum_mins = vaddvq_s32(prod); + + int32_t isum = 0; + + for (int j = 0; j < QK_K/128; ++j) { + + ggml_uint8x16x2_t qhbits = ggml_vld1q_u8_x2(qh); qh += 32; + ggml_uint8x16x4_t q6bits = ggml_vld1q_u8_x4(q6); q6 += 64; + ggml_int8x16x4_t q8bytes = ggml_vld1q_s8_x4(q8); q8 += 64; + + q6h.val[0] = vshlq_n_u8(vandq_u8(mone, qhbits.val[0]), 4); + q6h.val[1] = vshlq_n_u8(vandq_u8(mone, qhbits.val[1]), 4); + uint8x16_t shifted = vshrq_n_u8(qhbits.val[0], 2); + q6h.val[2] = vshlq_n_u8(vandq_u8(mone, shifted), 4); + shifted = vshrq_n_u8(qhbits.val[1], 2); + q6h.val[3] = vshlq_n_u8(vandq_u8(mone, shifted), 4); + + //q6bytes.val[0] = vsubq_s8(vreinterpretq_s8_u8(vorrq_u8(vandq_u8(q6bits.val[0], m4b), q6h.val[0])), m32s); + //q6bytes.val[1] = vsubq_s8(vreinterpretq_s8_u8(vorrq_u8(vandq_u8(q6bits.val[1], m4b), q6h.val[1])), m32s); + //q6bytes.val[2] = vsubq_s8(vreinterpretq_s8_u8(vorrq_u8(vandq_u8(q6bits.val[2], m4b), q6h.val[2])), m32s); + //q6bytes.val[3] = vsubq_s8(vreinterpretq_s8_u8(vorrq_u8(vandq_u8(q6bits.val[3], m4b), q6h.val[3])), m32s); + q6bytes.val[0] = vreinterpretq_s8_u8(vorrq_u8(vandq_u8(q6bits.val[0], m4b), q6h.val[0])); + q6bytes.val[1] = vreinterpretq_s8_u8(vorrq_u8(vandq_u8(q6bits.val[1], m4b), q6h.val[1])); + q6bytes.val[2] = vreinterpretq_s8_u8(vorrq_u8(vandq_u8(q6bits.val[2], m4b), q6h.val[2])); + q6bytes.val[3] = vreinterpretq_s8_u8(vorrq_u8(vandq_u8(q6bits.val[3], m4b), q6h.val[3])); + + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[0], q8bytes.val[0])) * scale[0] + + vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[1], q8bytes.val[1])) * scale[1] + + vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[2], q8bytes.val[2])) * scale[2] + + vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[3], q8bytes.val[3])) * scale[3]; + + scale += 4; + + q8bytes = ggml_vld1q_s8_x4(q8); q8 += 64; + + shifted = vshrq_n_u8(qhbits.val[0], 4); + q6h.val[0] = vshlq_n_u8(vandq_u8(mone, shifted), 4); + shifted = vshrq_n_u8(qhbits.val[1], 4); + q6h.val[1] = vshlq_n_u8(vandq_u8(mone, shifted), 4); + shifted = vshrq_n_u8(qhbits.val[0], 6); + q6h.val[2] = vshlq_n_u8(vandq_u8(mone, shifted), 4); + shifted = vshrq_n_u8(qhbits.val[1], 6); + q6h.val[3] = vshlq_n_u8(vandq_u8(mone, shifted), 4); + + //q6bytes.val[0] = vsubq_s8(vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q6bits.val[0], 4), q6h.val[0])), m32s); + //q6bytes.val[1] = vsubq_s8(vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q6bits.val[1], 4), q6h.val[1])), m32s); + //q6bytes.val[2] = vsubq_s8(vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q6bits.val[2], 4), q6h.val[2])), m32s); + //q6bytes.val[3] = vsubq_s8(vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q6bits.val[3], 4), q6h.val[3])), m32s); + q6bytes.val[0] = vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q6bits.val[0], 4), q6h.val[0])); + q6bytes.val[1] = vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q6bits.val[1], 4), q6h.val[1])); + q6bytes.val[2] = vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q6bits.val[2], 4), q6h.val[2])); + q6bytes.val[3] = vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q6bits.val[3], 4), q6h.val[3])); + + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[0], q8bytes.val[0])) * scale[0] + + vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[1], q8bytes.val[1])) * scale[1] + + vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[2], q8bytes.val[2])) * scale[2] + + vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[3], q8bytes.val[3])) * scale[3]; + scale += 4; + } + //sum += isum * d_all * y[i].d; + sum += d_all * y[i].d * (isum - 32 * isum_mins); + + } + *s = sum; + +#elif defined __AVX2__ + + const __m256i m4 = _mm256_set1_epi8(0xF); + const __m256i m2 = _mm256_set1_epi8(3); + const __m256i m32s = _mm256_set1_epi8(32); + + __m256 acc = _mm256_setzero_ps(); + + for (int i = 0; i < nb; ++i) { + + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + + const uint8_t * restrict q4 = x[i].ql; + const uint8_t * restrict qh = x[i].qh; + const int8_t * restrict q8 = y[i].qs; + + const __m128i scales = _mm_loadu_si128((const __m128i*)x[i].scales); + + __m256i sumi = _mm256_setzero_si256(); + + int is = 0; + + for (int j = 0; j < QK_K/128; ++j) { + + const __m128i scale_0 = _mm_shuffle_epi8(scales, get_scale_shuffle(is + 0)); + const __m128i scale_1 = _mm_shuffle_epi8(scales, get_scale_shuffle(is + 1)); + const __m128i scale_2 = _mm_shuffle_epi8(scales, get_scale_shuffle(is + 2)); + const __m128i scale_3 = _mm_shuffle_epi8(scales, get_scale_shuffle(is + 3)); + is += 4; + + const __m256i q4bits1 = _mm256_loadu_si256((const __m256i*)q4); q4 += 32; + const __m256i q4bits2 = _mm256_loadu_si256((const __m256i*)q4); q4 += 32; + const __m256i q4bitsH = _mm256_loadu_si256((const __m256i*)qh); qh += 32; + + const __m256i q4h_0 = _mm256_slli_epi16(_mm256_and_si256(q4bitsH, m2), 4); + const __m256i q4h_1 = _mm256_slli_epi16(_mm256_and_si256(_mm256_srli_epi16(q4bitsH, 2), m2), 4); + const __m256i q4h_2 = _mm256_slli_epi16(_mm256_and_si256(_mm256_srli_epi16(q4bitsH, 4), m2), 4); + const __m256i q4h_3 = _mm256_slli_epi16(_mm256_and_si256(_mm256_srli_epi16(q4bitsH, 6), m2), 4); + + const __m256i q4_0 = _mm256_or_si256(_mm256_and_si256(q4bits1, m4), q4h_0); + const __m256i q4_1 = _mm256_or_si256(_mm256_and_si256(q4bits2, m4), q4h_1); + const __m256i q4_2 = _mm256_or_si256(_mm256_and_si256(_mm256_srli_epi16(q4bits1, 4), m4), q4h_2); + const __m256i q4_3 = _mm256_or_si256(_mm256_and_si256(_mm256_srli_epi16(q4bits2, 4), m4), q4h_3); + + const __m256i q8_0 = _mm256_loadu_si256((const __m256i*)q8); q8 += 32; + const __m256i q8_1 = _mm256_loadu_si256((const __m256i*)q8); q8 += 32; + const __m256i q8_2 = _mm256_loadu_si256((const __m256i*)q8); q8 += 32; + const __m256i q8_3 = _mm256_loadu_si256((const __m256i*)q8); q8 += 32; + + __m256i q8s_0 = _mm256_maddubs_epi16(m32s, q8_0); + __m256i q8s_1 = _mm256_maddubs_epi16(m32s, q8_1); + __m256i q8s_2 = _mm256_maddubs_epi16(m32s, q8_2); + __m256i q8s_3 = _mm256_maddubs_epi16(m32s, q8_3); + + __m256i p16_0 = _mm256_maddubs_epi16(q4_0, q8_0); + __m256i p16_1 = _mm256_maddubs_epi16(q4_1, q8_1); + __m256i p16_2 = _mm256_maddubs_epi16(q4_2, q8_2); + __m256i p16_3 = _mm256_maddubs_epi16(q4_3, q8_3); + + p16_0 = _mm256_sub_epi16(p16_0, q8s_0); + p16_1 = _mm256_sub_epi16(p16_1, q8s_1); + p16_2 = _mm256_sub_epi16(p16_2, q8s_2); + p16_3 = _mm256_sub_epi16(p16_3, q8s_3); + + p16_0 = _mm256_madd_epi16(_mm256_cvtepi8_epi16(scale_0), p16_0); + p16_1 = _mm256_madd_epi16(_mm256_cvtepi8_epi16(scale_1), p16_1); + p16_2 = _mm256_madd_epi16(_mm256_cvtepi8_epi16(scale_2), p16_2); + p16_3 = _mm256_madd_epi16(_mm256_cvtepi8_epi16(scale_3), p16_3); + + sumi = _mm256_add_epi32(sumi, _mm256_add_epi32(p16_0, p16_1)); + sumi = _mm256_add_epi32(sumi, _mm256_add_epi32(p16_2, p16_3)); + + } + + acc = _mm256_fmadd_ps(_mm256_broadcast_ss(&d), _mm256_cvtepi32_ps(sumi), acc); + } + + *s = hsum_float_8(acc); + +#elif defined __AVX__ + + const __m128i m4 = _mm_set1_epi8(0xF); + const __m128i m3 = _mm_set1_epi8(3); + const __m128i m32s = _mm_set1_epi8(32); + const __m128i m2 = _mm_set1_epi8(2); + + __m256 acc = _mm256_setzero_ps(); + + for (int i = 0; i < nb; ++i) { + + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + + const uint8_t * restrict q4 = x[i].ql; + const uint8_t * restrict qh = x[i].qh; + const int8_t * restrict q8 = y[i].qs; + + const __m128i scales = _mm_loadu_si128((const __m128i*)x[i].scales); + + __m128i sumi_0 = _mm_setzero_si128(); + __m128i sumi_1 = _mm_setzero_si128(); + + __m128i shuffle = _mm_set_epi64x(0x0101010101010101, 0x0000000000000000); + for (int j = 0; j < QK_K/128; ++j) { + + const __m128i q4bitsH_0 = _mm_loadu_si128((const __m128i*)qh); qh += 16; + const __m128i q4bitsH_1 = _mm_loadu_si128((const __m128i*)qh); qh += 16; + + const __m128i q4h_0 = _mm_slli_epi16(_mm_and_si128(q4bitsH_0, m3), 4); + const __m128i q4h_1 = _mm_slli_epi16(_mm_and_si128(q4bitsH_1, m3), 4); + const __m128i q4h_2 = _mm_slli_epi16(_mm_and_si128(_mm_srli_epi16(q4bitsH_0, 2), m3), 4); + const __m128i q4h_3 = _mm_slli_epi16(_mm_and_si128(_mm_srli_epi16(q4bitsH_1, 2), m3), 4); + const __m128i q4h_4 = _mm_slli_epi16(_mm_and_si128(_mm_srli_epi16(q4bitsH_0, 4), m3), 4); + const __m128i q4h_5 = _mm_slli_epi16(_mm_and_si128(_mm_srli_epi16(q4bitsH_1, 4), m3), 4); + const __m128i q4h_6 = _mm_slli_epi16(_mm_and_si128(_mm_srli_epi16(q4bitsH_0, 6), m3), 4); + const __m128i q4h_7 = _mm_slli_epi16(_mm_and_si128(_mm_srli_epi16(q4bitsH_1, 6), m3), 4); + + const __m128i q4bits1_0 = _mm_loadu_si128((const __m128i*)q4); q4 += 16; + const __m128i q4bits1_1 = _mm_loadu_si128((const __m128i*)q4); q4 += 16; + const __m128i q4bits2_0 = _mm_loadu_si128((const __m128i*)q4); q4 += 16; + const __m128i q4bits2_1 = _mm_loadu_si128((const __m128i*)q4); q4 += 16; + + const __m128i q4_0 = _mm_or_si128(_mm_and_si128(q4bits1_0, m4), q4h_0); + const __m128i q4_1 = _mm_or_si128(_mm_and_si128(q4bits1_1, m4), q4h_1); + const __m128i q4_2 = _mm_or_si128(_mm_and_si128(q4bits2_0, m4), q4h_2); + const __m128i q4_3 = _mm_or_si128(_mm_and_si128(q4bits2_1, m4), q4h_3); + const __m128i q4_4 = _mm_or_si128(_mm_and_si128(_mm_srli_epi16(q4bits1_0, 4), m4), q4h_4); + const __m128i q4_5 = _mm_or_si128(_mm_and_si128(_mm_srli_epi16(q4bits1_1, 4), m4), q4h_5); + const __m128i q4_6 = _mm_or_si128(_mm_and_si128(_mm_srli_epi16(q4bits2_0, 4), m4), q4h_6); + const __m128i q4_7 = _mm_or_si128(_mm_and_si128(_mm_srli_epi16(q4bits2_1, 4), m4), q4h_7); + + const __m128i q8_0 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + const __m128i q8_1 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + const __m128i q8_2 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + const __m128i q8_3 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + const __m128i q8_4 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + const __m128i q8_5 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + const __m128i q8_6 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + const __m128i q8_7 = _mm_loadu_si128((const __m128i*)q8); q8 += 16; + + __m128i q8s_0 = _mm_maddubs_epi16(m32s, q8_0); + __m128i q8s_1 = _mm_maddubs_epi16(m32s, q8_1); + __m128i q8s_2 = _mm_maddubs_epi16(m32s, q8_2); + __m128i q8s_3 = _mm_maddubs_epi16(m32s, q8_3); + __m128i q8s_4 = _mm_maddubs_epi16(m32s, q8_4); + __m128i q8s_5 = _mm_maddubs_epi16(m32s, q8_5); + __m128i q8s_6 = _mm_maddubs_epi16(m32s, q8_6); + __m128i q8s_7 = _mm_maddubs_epi16(m32s, q8_7); + + __m128i p16_0 = _mm_maddubs_epi16(q4_0, q8_0); + __m128i p16_1 = _mm_maddubs_epi16(q4_1, q8_1); + __m128i p16_2 = _mm_maddubs_epi16(q4_2, q8_2); + __m128i p16_3 = _mm_maddubs_epi16(q4_3, q8_3); + __m128i p16_4 = _mm_maddubs_epi16(q4_4, q8_4); + __m128i p16_5 = _mm_maddubs_epi16(q4_5, q8_5); + __m128i p16_6 = _mm_maddubs_epi16(q4_6, q8_6); + __m128i p16_7 = _mm_maddubs_epi16(q4_7, q8_7); + + p16_0 = _mm_sub_epi16(p16_0, q8s_0); + p16_1 = _mm_sub_epi16(p16_1, q8s_1); + p16_2 = _mm_sub_epi16(p16_2, q8s_2); + p16_3 = _mm_sub_epi16(p16_3, q8s_3); + p16_4 = _mm_sub_epi16(p16_4, q8s_4); + p16_5 = _mm_sub_epi16(p16_5, q8s_5); + p16_6 = _mm_sub_epi16(p16_6, q8s_6); + p16_7 = _mm_sub_epi16(p16_7, q8s_7); + + const __m128i scale_0 = _mm_shuffle_epi8(scales, shuffle); + shuffle = _mm_add_epi8(shuffle, m2); + const __m128i scale_1 = _mm_shuffle_epi8(scales, shuffle); + shuffle = _mm_add_epi8(shuffle, m2); + const __m128i scale_2 = _mm_shuffle_epi8(scales, shuffle); + shuffle = _mm_add_epi8(shuffle, m2); + const __m128i scale_3 = _mm_shuffle_epi8(scales, shuffle); + shuffle = _mm_add_epi8(shuffle, m2); + + p16_0 = _mm_madd_epi16(_mm_cvtepi8_epi16(scale_0), p16_0); + p16_1 = _mm_madd_epi16(_mm_cvtepi8_epi16(_mm_unpackhi_epi64(scale_0, scale_0)), p16_1); + p16_2 = _mm_madd_epi16(_mm_cvtepi8_epi16(scale_1), p16_2); + p16_3 = _mm_madd_epi16(_mm_cvtepi8_epi16(_mm_unpackhi_epi64(scale_1, scale_1)), p16_3); + p16_4 = _mm_madd_epi16(_mm_cvtepi8_epi16(scale_2), p16_4); + p16_5 = _mm_madd_epi16(_mm_cvtepi8_epi16(_mm_unpackhi_epi64(scale_2, scale_2)), p16_5); + p16_6 = _mm_madd_epi16(_mm_cvtepi8_epi16(scale_3), p16_6); + p16_7 = _mm_madd_epi16(_mm_cvtepi8_epi16(_mm_unpackhi_epi64(scale_3, scale_3)), p16_7); + + sumi_0 = _mm_add_epi32(sumi_0, _mm_add_epi32(p16_0, p16_2)); + sumi_1 = _mm_add_epi32(sumi_1, _mm_add_epi32(p16_1, p16_3)); + sumi_0 = _mm_add_epi32(sumi_0, _mm_add_epi32(p16_4, p16_6)); + sumi_1 = _mm_add_epi32(sumi_1, _mm_add_epi32(p16_5, p16_7)); + + } + + __m256i sumi = MM256_SET_M128I(sumi_1, sumi_0); + acc = _mm256_add_ps(_mm256_mul_ps(_mm256_broadcast_ss(&d), _mm256_cvtepi32_ps(sumi)), acc); + } + + *s = hsum_float_8(acc); + +#elif defined __riscv_v_intrinsic + + float sumf = 0; + for (int i = 0; i < nb; ++i) { + + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + + const uint8_t * restrict q6 = x[i].ql; + const uint8_t * restrict qh = x[i].qh; + const int8_t * restrict q8 = y[i].qs; + + const int8_t * restrict scale = x[i].scales; + + size_t vl; + + vint32m1_t vzero = __riscv_vmv_v_x_i32m1(0, 1); + + int sum_t = 0; + int is = 0; + + for (int j = 0; j < QK_K/128; ++j) { + + vl = 32; + + // load qh + vuint8m1_t qh_x = __riscv_vle8_v_u8m1(qh, vl); + + // load Q6 + vuint8m1_t q6_0 = __riscv_vle8_v_u8m1(q6, vl); + vuint8m1_t q6_1 = __riscv_vle8_v_u8m1(q6+32, vl); + + vuint8m1_t q6a_0 = __riscv_vand_vx_u8m1(q6_0, 0x0F, vl); + vuint8m1_t q6a_1 = __riscv_vand_vx_u8m1(q6_1, 0x0F, vl); + vuint8m1_t q6s_0 = __riscv_vsrl_vx_u8m1(q6_0, 0x04, vl); + vuint8m1_t q6s_1 = __riscv_vsrl_vx_u8m1(q6_1, 0x04, vl); + + vuint8m1_t qh_0 = __riscv_vand_vx_u8m1(qh_x, 0x03, vl); + vuint8m1_t qh_1 = __riscv_vand_vx_u8m1(__riscv_vsrl_vx_u8m1(qh_x, 0x2, vl), 0x03 , vl); + vuint8m1_t qh_2 = __riscv_vand_vx_u8m1(__riscv_vsrl_vx_u8m1(qh_x, 0x4, vl), 0x03 , vl); + vuint8m1_t qh_3 = __riscv_vand_vx_u8m1(__riscv_vsrl_vx_u8m1(qh_x, 0x6, vl), 0x03 , vl); + + vuint8m1_t qhi_0 = __riscv_vor_vv_u8m1(q6a_0, __riscv_vsll_vx_u8m1(qh_0, 0x04, vl), vl); + vuint8m1_t qhi_1 = __riscv_vor_vv_u8m1(q6a_1, __riscv_vsll_vx_u8m1(qh_1, 0x04, vl), vl); + vuint8m1_t qhi_2 = __riscv_vor_vv_u8m1(q6s_0, __riscv_vsll_vx_u8m1(qh_2, 0x04, vl), vl); + vuint8m1_t qhi_3 = __riscv_vor_vv_u8m1(q6s_1, __riscv_vsll_vx_u8m1(qh_3, 0x04, vl), vl); + + vint8m1_t a_0 = __riscv_vsub_vx_i8m1(__riscv_vreinterpret_v_u8m1_i8m1(qhi_0), 32, vl); + vint8m1_t a_1 = __riscv_vsub_vx_i8m1(__riscv_vreinterpret_v_u8m1_i8m1(qhi_1), 32, vl); + vint8m1_t a_2 = __riscv_vsub_vx_i8m1(__riscv_vreinterpret_v_u8m1_i8m1(qhi_2), 32, vl); + vint8m1_t a_3 = __riscv_vsub_vx_i8m1(__riscv_vreinterpret_v_u8m1_i8m1(qhi_3), 32, vl); + + // load Q8 and take product + vint16m2_t va_q_0 = __riscv_vwmul_vv_i16m2(a_0, __riscv_vle8_v_i8m1(q8, vl), vl); + vint16m2_t va_q_1 = __riscv_vwmul_vv_i16m2(a_1, __riscv_vle8_v_i8m1(q8+32, vl), vl); + vint16m2_t va_q_2 = __riscv_vwmul_vv_i16m2(a_2, __riscv_vle8_v_i8m1(q8+64, vl), vl); + vint16m2_t va_q_3 = __riscv_vwmul_vv_i16m2(a_3, __riscv_vle8_v_i8m1(q8+96, vl), vl); + + vl = 16; + + vint32m2_t vaux_0 = __riscv_vwmul_vx_i32m2(__riscv_vget_v_i16m2_i16m1(va_q_0, 0), scale[is+0], vl); + vint32m2_t vaux_1 = __riscv_vwmul_vx_i32m2(__riscv_vget_v_i16m2_i16m1(va_q_0, 1), scale[is+1], vl); + vint32m2_t vaux_2 = __riscv_vwmul_vx_i32m2(__riscv_vget_v_i16m2_i16m1(va_q_1, 0), scale[is+2], vl); + vint32m2_t vaux_3 = __riscv_vwmul_vx_i32m2(__riscv_vget_v_i16m2_i16m1(va_q_1, 1), scale[is+3], vl); + vint32m2_t vaux_4 = __riscv_vwmul_vx_i32m2(__riscv_vget_v_i16m2_i16m1(va_q_2, 0), scale[is+4], vl); + vint32m2_t vaux_5 = __riscv_vwmul_vx_i32m2(__riscv_vget_v_i16m2_i16m1(va_q_2, 1), scale[is+5], vl); + vint32m2_t vaux_6 = __riscv_vwmul_vx_i32m2(__riscv_vget_v_i16m2_i16m1(va_q_3, 0), scale[is+6], vl); + vint32m2_t vaux_7 = __riscv_vwmul_vx_i32m2(__riscv_vget_v_i16m2_i16m1(va_q_3, 1), scale[is+7], vl); + + vint32m1_t isum0 = __riscv_vredsum_vs_i32m2_i32m1(__riscv_vadd_vv_i32m2(vaux_0, vaux_1, vl), vzero, vl); + vint32m1_t isum1 = __riscv_vredsum_vs_i32m2_i32m1(__riscv_vadd_vv_i32m2(vaux_2, vaux_3, vl), isum0, vl); + vint32m1_t isum2 = __riscv_vredsum_vs_i32m2_i32m1(__riscv_vadd_vv_i32m2(vaux_4, vaux_5, vl), isum1, vl); + vint32m1_t isum3 = __riscv_vredsum_vs_i32m2_i32m1(__riscv_vadd_vv_i32m2(vaux_6, vaux_7, vl), isum2, vl); + + sum_t += __riscv_vmv_x_s_i32m1_i32(isum3); + + q6 += 64; qh += 32; q8 += 128; is=8; + + } + + sumf += d * sum_t; + + } + + *s = sumf; + +#elif defined(__POWER9_VECTOR__) + const vector signed char lowMask = vec_splats((signed char)0xF); + const vector int v0 = vec_splats((int32_t)0); + const vector unsigned char v2 = vec_splats((unsigned char)0x2); + const vector unsigned char v3 = vec_splats((unsigned char)0x3); + const vector unsigned char v4 = vec_splats((unsigned char)0x4); + const vector unsigned char v6 = vec_splats((unsigned char)0x6); + const vector signed char off = vec_splats((signed char)0x20); + + vector float vsumf0 = vec_splats(0.0f); + vector float vsumf1 = vec_splats(0.0f); + vector float vsumf2 = vec_splats(0.0f); + vector float vsumf3 = vec_splats(0.0f); + + for (int i = 0; i < nb; ++i) { + vector float vxd = vec_splats(GGML_FP16_TO_FP32(x[i].d)); + vector float vyd = vec_splats(y[i].d); + vector float vd = vec_mul(vxd, vyd); + + vector signed int vsumi0 = v0; + vector signed int vsumi1 = v0; + vector signed int vsumi2 = v0; + vector signed int vsumi3 = v0; + vector signed int vsumi4 = v0; + vector signed int vsumi5 = v0; + vector signed int vsumi6 = v0; + vector signed int vsumi7 = v0; + + const uint8_t * restrict q6 = x[i].ql; + const uint8_t * restrict qh = x[i].qh; + const int8_t * restrict qs = x[i].scales; + const int8_t * restrict q8 = y[i].qs; + + for (int j = 0; j < QK_K/128; ++j) { + __builtin_prefetch(q6, 0, 0); + __builtin_prefetch(qh, 0, 0); + __builtin_prefetch(q8, 0, 0); + + vector signed char qxs0 = (vector signed char)vec_xl( 0, q6); + vector signed char qxs1 = (vector signed char)vec_xl(16, q6); + vector signed char qxs2 = (vector signed char)vec_xl(32, q6); + vector signed char qxs3 = (vector signed char)vec_xl(48, q6); + q6 += 64; + + vector signed char qxs00 = vec_and(qxs0, lowMask); + vector signed char qxs01 = vec_sr(qxs0, v4); + vector signed char qxs10 = vec_and(qxs1, lowMask); + vector signed char qxs11 = vec_sr(qxs1, v4); + vector signed char qxs20 = vec_and(qxs2, lowMask); + vector signed char qxs21 = vec_sr(qxs2, v4); + vector signed char qxs30 = vec_and(qxs3, lowMask); + vector signed char qxs31 = vec_sr(qxs3, v4); + + vector signed char qxhs0 = (vector signed char)vec_xl( 0, qh); + vector signed char qxhs1 = (vector signed char)vec_xl(16, qh); + qh += 32; + + vector signed char qxh00 = vec_sl(vec_and((vector signed char)v3, qxhs0), v4); + vector signed char qxh01 = vec_sl(vec_and((vector signed char)v3, vec_sr(qxhs0, v4)), v4); + vector signed char qxh10 = vec_sl(vec_and((vector signed char)v3, qxhs1), v4); + vector signed char qxh11 = vec_sl(vec_and((vector signed char)v3, vec_sr(qxhs1, v4)), v4); + vector signed char qxh20 = vec_sl(vec_and((vector signed char)v3, vec_sr(qxhs0, v2)), v4); + vector signed char qxh21 = vec_sl(vec_and((vector signed char)v3, vec_sr(qxhs0, v6)), v4); + vector signed char qxh30 = vec_sl(vec_and((vector signed char)v3, vec_sr(qxhs1, v2)), v4); + vector signed char qxh31 = vec_sl(vec_and((vector signed char)v3, vec_sr(qxhs1, v6)), v4); + + vector signed char q6x00 = vec_sub(vec_or(qxh00, qxs00), off); + vector signed char q6x01 = vec_sub(vec_or(qxh01, qxs01), off); + vector signed char q6x10 = vec_sub(vec_or(qxh10, qxs10), off); + vector signed char q6x11 = vec_sub(vec_or(qxh11, qxs11), off); + vector signed char q6x20 = vec_sub(vec_or(qxh20, qxs20), off); + vector signed char q6x21 = vec_sub(vec_or(qxh21, qxs21), off); + vector signed char q6x30 = vec_sub(vec_or(qxh30, qxs30), off); + vector signed char q6x31 = vec_sub(vec_or(qxh31, qxs31), off); + + vector signed char q8y00 = vec_xl( 0, q8); + vector signed char q8y10 = vec_xl( 16, q8); + vector signed char q8y20 = vec_xl( 32, q8); + vector signed char q8y30 = vec_xl( 48, q8); + vector signed char q8y01 = vec_xl( 64, q8); + vector signed char q8y11 = vec_xl( 80, q8); + vector signed char q8y21 = vec_xl( 96, q8); + vector signed char q8y31 = vec_xl(112, q8); + q8 += 128; + + vector signed short qv00 = vec_add(vec_mule(q6x00, q8y00), vec_mulo(q6x00, q8y00)); + vector signed short qv10 = vec_add(vec_mule(q6x10, q8y10), vec_mulo(q6x10, q8y10)); + vector signed short qv20 = vec_add(vec_mule(q6x20, q8y20), vec_mulo(q6x20, q8y20)); + vector signed short qv30 = vec_add(vec_mule(q6x30, q8y30), vec_mulo(q6x30, q8y30)); + vector signed short qv01 = vec_add(vec_mule(q6x01, q8y01), vec_mulo(q6x01, q8y01)); + vector signed short qv11 = vec_add(vec_mule(q6x11, q8y11), vec_mulo(q6x11, q8y11)); + vector signed short qv21 = vec_add(vec_mule(q6x21, q8y21), vec_mulo(q6x21, q8y21)); + vector signed short qv31 = vec_add(vec_mule(q6x31, q8y31), vec_mulo(q6x31, q8y31)); + + vector signed short vscales = vec_unpackh(vec_xl_len(qs, 8)); + qs += 8; + + vector signed short vs0 = vec_splat(vscales, 0); + vector signed short vs1 = vec_splat(vscales, 1); + vector signed short vs2 = vec_splat(vscales, 2); + vector signed short vs3 = vec_splat(vscales, 3); + vector signed short vs4 = vec_splat(vscales, 4); + vector signed short vs5 = vec_splat(vscales, 5); + vector signed short vs6 = vec_splat(vscales, 6); + vector signed short vs7 = vec_splat(vscales, 7); + + vsumi0 = vec_msum(qv00, vs0, vsumi0); + vsumi1 = vec_msum(qv01, vs4, vsumi1); + vsumi2 = vec_msum(qv10, vs1, vsumi2); + vsumi3 = vec_msum(qv11, vs5, vsumi3); + vsumi4 = vec_msum(qv20, vs2, vsumi4); + vsumi5 = vec_msum(qv21, vs6, vsumi5); + vsumi6 = vec_msum(qv30, vs3, vsumi6); + vsumi7 = vec_msum(qv31, vs7, vsumi7); + } + + vsumi0 = vec_add(vsumi0, vsumi4); + vsumi1 = vec_add(vsumi1, vsumi5); + vsumi2 = vec_add(vsumi2, vsumi6); + vsumi3 = vec_add(vsumi3, vsumi7); + + vsumf0 = vec_madd(vec_ctf(vsumi0, 0), vd, vsumf0); + vsumf1 = vec_madd(vec_ctf(vsumi1, 0), vd, vsumf1); + vsumf2 = vec_madd(vec_ctf(vsumi2, 0), vd, vsumf2); + vsumf3 = vec_madd(vec_ctf(vsumi3, 0), vd, vsumf3); + } + + vsumf0 = vec_add(vsumf0, vsumf2); + vsumf1 = vec_add(vsumf1, vsumf3); + + vsumf0 = vec_add(vsumf0, vsumf1); + + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 4)); + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 8)); + + *s = vec_extract(vsumf0, 0); + +#elif defined __loongarch_asx + + const __m256i m4 = __lasx_xvreplgr2vr_b(0xF); + const __m256i m2 = __lasx_xvreplgr2vr_b(3); + const __m256i m32s = __lasx_xvreplgr2vr_b(32); + + __m256 acc = (__m256)__lasx_xvldi(0); + + for (int i = 0; i < nb; ++i) { + + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + + const uint8_t * restrict q4 = x[i].ql; + const uint8_t * restrict qh = x[i].qh; + const int8_t * restrict q8 = y[i].qs; + + const __m128i scales = __lsx_vld((const __m128i*)x[i].scales, 0); + + __m256i sumi = __lasx_xvldi(0); + + int is = 0; + + for (int j = 0; j < QK_K/128; ++j) { + + const __m128i scale_0 = lsx_shuffle_b(scales, get_scale_shuffle(is + 0)); + const __m128i scale_1 = lsx_shuffle_b(scales, get_scale_shuffle(is + 1)); + const __m128i scale_2 = lsx_shuffle_b(scales, get_scale_shuffle(is + 2)); + const __m128i scale_3 = lsx_shuffle_b(scales, get_scale_shuffle(is + 3)); + is += 4; + + const __m256i q4bits1 = __lasx_xvld((const __m256i*)q4, 0); q4 += 32; + const __m256i q4bits2 = __lasx_xvld((const __m256i*)q4, 0); q4 += 32; + const __m256i q4bitsH = __lasx_xvld((const __m256i*)qh, 0); qh += 32; + + const __m256i q4h_0 = __lasx_xvslli_h(__lasx_xvand_v(q4bitsH, m2), 4); + const __m256i q4h_1 = __lasx_xvslli_h(__lasx_xvand_v(__lasx_xvsrli_h(q4bitsH, 2), m2), 4); + const __m256i q4h_2 = __lasx_xvslli_h(__lasx_xvand_v(__lasx_xvsrli_h(q4bitsH, 4), m2), 4); + const __m256i q4h_3 = __lasx_xvslli_h(__lasx_xvand_v(__lasx_xvsrli_h(q4bitsH, 6), m2), 4); + + const __m256i q4_0 = __lasx_xvor_v(__lasx_xvand_v(q4bits1, m4), q4h_0); + const __m256i q4_1 = __lasx_xvor_v(__lasx_xvand_v(q4bits2, m4), q4h_1); + const __m256i q4_2 = __lasx_xvor_v(__lasx_xvand_v(__lasx_xvsrli_h(q4bits1, 4), m4), q4h_2); + const __m256i q4_3 = __lasx_xvor_v(__lasx_xvand_v(__lasx_xvsrli_h(q4bits2, 4), m4), q4h_3); + + const __m256i q8_0 = __lasx_xvld((const __m256i*)q8, 0); q8 += 32; + const __m256i q8_1 = __lasx_xvld((const __m256i*)q8, 0); q8 += 32; + const __m256i q8_2 = __lasx_xvld((const __m256i*)q8, 0); q8 += 32; + const __m256i q8_3 = __lasx_xvld((const __m256i*)q8, 0); q8 += 32; + + __m256i q8s_0 = lasx_maddubs_h(m32s, q8_0); + __m256i q8s_1 = lasx_maddubs_h(m32s, q8_1); + __m256i q8s_2 = lasx_maddubs_h(m32s, q8_2); + __m256i q8s_3 = lasx_maddubs_h(m32s, q8_3); + + __m256i p16_0 = lasx_maddubs_h(q4_0, q8_0); + __m256i p16_1 = lasx_maddubs_h(q4_1, q8_1); + __m256i p16_2 = lasx_maddubs_h(q4_2, q8_2); + __m256i p16_3 = lasx_maddubs_h(q4_3, q8_3); + + p16_0 = __lasx_xvsub_h(p16_0, q8s_0); + p16_1 = __lasx_xvsub_h(p16_1, q8s_1); + p16_2 = __lasx_xvsub_h(p16_2, q8s_2); + p16_3 = __lasx_xvsub_h(p16_3, q8s_3); + + p16_0 = lasx_madd_h(lasx_ext8_16(scale_0), p16_0); + p16_1 = lasx_madd_h(lasx_ext8_16(scale_1), p16_1); + p16_2 = lasx_madd_h(lasx_ext8_16(scale_2), p16_2); + p16_3 = lasx_madd_h(lasx_ext8_16(scale_3), p16_3); + + sumi = __lasx_xvadd_w(sumi, __lasx_xvadd_w(p16_0, p16_1)); + sumi = __lasx_xvadd_w(sumi, __lasx_xvadd_w(p16_2, p16_3)); + } + + acc = __lasx_xvfmadd_s((__m256)__lasx_xvreplfr2vr_s(d), __lasx_xvffint_s_w(sumi), acc); + } + + *s = hsum_float_8(acc); + +#else + + int8_t aux8[QK_K]; + int16_t aux16[8]; + float sums [8]; + int32_t aux32[8]; + memset(sums, 0, 8*sizeof(float)); + + float sumf = 0; + for (int i = 0; i < nb; ++i) { + const uint8_t * restrict q4 = x[i].ql; + const uint8_t * restrict qh = x[i].qh; + const int8_t * restrict q8 = y[i].qs; + memset(aux32, 0, 8*sizeof(int32_t)); + int8_t * restrict a = aux8; + for (int j = 0; j < QK_K; j += 128) { + for (int l = 0; l < 32; ++l) { + a[l + 0] = (int8_t)((q4[l + 0] & 0xF) | (((qh[l] >> 0) & 3) << 4)) - 32; + a[l + 32] = (int8_t)((q4[l + 32] & 0xF) | (((qh[l] >> 2) & 3) << 4)) - 32; + a[l + 64] = (int8_t)((q4[l + 0] >> 4) | (((qh[l] >> 4) & 3) << 4)) - 32; + a[l + 96] = (int8_t)((q4[l + 32] >> 4) | (((qh[l] >> 6) & 3) << 4)) - 32; + } + a += 128; + q4 += 64; + qh += 32; + } + a = aux8; + int is = 0; + for (int j = 0; j < QK_K/16; ++j) { + int scale = x[i].scales[is++]; + for (int l = 0; l < 8; ++l) aux16[l] = q8[l] * a[l]; + for (int l = 0; l < 8; ++l) aux32[l] += scale * aux16[l]; + q8 += 8; a += 8; + for (int l = 0; l < 8; ++l) aux16[l] = q8[l] * a[l]; + for (int l = 0; l < 8; ++l) aux32[l] += scale * aux16[l]; + q8 += 8; a += 8; + } + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + for (int l = 0; l < 8; ++l) sums[l] += d * aux32[l]; + } + for (int l = 0; l < 8; ++l) sumf += sums[l]; + *s = sumf; +#endif +} + +#if defined (__AVX__) || defined (__AVX2__) || defined (__ARM_NEON) || defined (__POWER9_VECTOR__) || defined(__loongarch_asx) +static const int8_t keven_signs_q2xs[1024] = { + 1, 1, 1, 1, 1, 1, 1, 1, -1, 1, 1, 1, 1, 1, 1, -1, 1, -1, 1, 1, 1, 1, 1, -1, -1, -1, 1, 1, 1, 1, 1, 1, + 1, 1, -1, 1, 1, 1, 1, -1, -1, 1, -1, 1, 1, 1, 1, 1, 1, -1, -1, 1, 1, 1, 1, 1, -1, -1, -1, 1, 1, 1, 1, -1, + 1, 1, 1, -1, 1, 1, 1, -1, -1, 1, 1, -1, 1, 1, 1, 1, 1, -1, 1, -1, 1, 1, 1, 1, -1, -1, 1, -1, 1, 1, 1, -1, + 1, 1, -1, -1, 1, 1, 1, 1, -1, 1, -1, -1, 1, 1, 1, -1, 1, -1, -1, -1, 1, 1, 1, -1, -1, -1, -1, -1, 1, 1, 1, 1, + 1, 1, 1, 1, -1, 1, 1, -1, -1, 1, 1, 1, -1, 1, 1, 1, 1, -1, 1, 1, -1, 1, 1, 1, -1, -1, 1, 1, -1, 1, 1, -1, + 1, 1, -1, 1, -1, 1, 1, 1, -1, 1, -1, 1, -1, 1, 1, -1, 1, -1, -1, 1, -1, 1, 1, -1, -1, -1, -1, 1, -1, 1, 1, 1, + 1, 1, 1, -1, -1, 1, 1, 1, -1, 1, 1, -1, -1, 1, 1, -1, 1, -1, 1, -1, -1, 1, 1, -1, -1, -1, 1, -1, -1, 1, 1, 1, + 1, 1, -1, -1, -1, 1, 1, -1, -1, 1, -1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1, 1, 1, 1, -1, -1, -1, -1, -1, 1, 1, -1, + 1, 1, 1, 1, 1, -1, 1, -1, -1, 1, 1, 1, 1, -1, 1, 1, 1, -1, 1, 1, 1, -1, 1, 1, -1, -1, 1, 1, 1, -1, 1, -1, + 1, 1, -1, 1, 1, -1, 1, 1, -1, 1, -1, 1, 1, -1, 1, -1, 1, -1, -1, 1, 1, -1, 1, -1, -1, -1, -1, 1, 1, -1, 1, 1, + 1, 1, 1, -1, 1, -1, 1, 1, -1, 1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, -1, -1, 1, -1, 1, -1, 1, 1, + 1, 1, -1, -1, 1, -1, 1, -1, -1, 1, -1, -1, 1, -1, 1, 1, 1, -1, -1, -1, 1, -1, 1, 1, -1, -1, -1, -1, 1, -1, 1, -1, + 1, 1, 1, 1, -1, -1, 1, 1, -1, 1, 1, 1, -1, -1, 1, -1, 1, -1, 1, 1, -1, -1, 1, -1, -1, -1, 1, 1, -1, -1, 1, 1, + 1, 1, -1, 1, -1, -1, 1, -1, -1, 1, -1, 1, -1, -1, 1, 1, 1, -1, -1, 1, -1, -1, 1, 1, -1, -1, -1, 1, -1, -1, 1, -1, + 1, 1, 1, -1, -1, -1, 1, -1, -1, 1, 1, -1, -1, -1, 1, 1, 1, -1, 1, -1, -1, -1, 1, 1, -1, -1, 1, -1, -1, -1, 1, -1, + 1, 1, -1, -1, -1, -1, 1, 1, -1, 1, -1, -1, -1, -1, 1, -1, 1, -1, -1, -1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, 1, 1, + 1, 1, 1, 1, 1, 1, -1, -1, -1, 1, 1, 1, 1, 1, -1, 1, 1, -1, 1, 1, 1, 1, -1, 1, -1, -1, 1, 1, 1, 1, -1, -1, + 1, 1, -1, 1, 1, 1, -1, 1, -1, 1, -1, 1, 1, 1, -1, -1, 1, -1, -1, 1, 1, 1, -1, -1, -1, -1, -1, 1, 1, 1, -1, 1, + 1, 1, 1, -1, 1, 1, -1, 1, -1, 1, 1, -1, 1, 1, -1, -1, 1, -1, 1, -1, 1, 1, -1, -1, -1, -1, 1, -1, 1, 1, -1, 1, + 1, 1, -1, -1, 1, 1, -1, -1, -1, 1, -1, -1, 1, 1, -1, 1, 1, -1, -1, -1, 1, 1, -1, 1, -1, -1, -1, -1, 1, 1, -1, -1, + 1, 1, 1, 1, -1, 1, -1, 1, -1, 1, 1, 1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, -1, -1, -1, -1, 1, 1, -1, 1, -1, 1, + 1, 1, -1, 1, -1, 1, -1, -1, -1, 1, -1, 1, -1, 1, -1, 1, 1, -1, -1, 1, -1, 1, -1, 1, -1, -1, -1, 1, -1, 1, -1, -1, + 1, 1, 1, -1, -1, 1, -1, -1, -1, 1, 1, -1, -1, 1, -1, 1, 1, -1, 1, -1, -1, 1, -1, 1, -1, -1, 1, -1, -1, 1, -1, -1, + 1, 1, -1, -1, -1, 1, -1, 1, -1, 1, -1, -1, -1, 1, -1, -1, 1, -1, -1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, 1, -1, 1, + 1, 1, 1, 1, 1, -1, -1, 1, -1, 1, 1, 1, 1, -1, -1, -1, 1, -1, 1, 1, 1, -1, -1, -1, -1, -1, 1, 1, 1, -1, -1, 1, + 1, 1, -1, 1, 1, -1, -1, -1, -1, 1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, -1, -1, -1, 1, 1, -1, -1, -1, + 1, 1, 1, -1, 1, -1, -1, -1, -1, 1, 1, -1, 1, -1, -1, 1, 1, -1, 1, -1, 1, -1, -1, 1, -1, -1, 1, -1, 1, -1, -1, -1, + 1, 1, -1, -1, 1, -1, -1, 1, -1, 1, -1, -1, 1, -1, -1, -1, 1, -1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, 1, -1, -1, 1, + 1, 1, 1, 1, -1, -1, -1, -1, -1, 1, 1, 1, -1, -1, -1, 1, 1, -1, 1, 1, -1, -1, -1, 1, -1, -1, 1, 1, -1, -1, -1, -1, + 1, 1, -1, 1, -1, -1, -1, 1, -1, 1, -1, 1, -1, -1, -1, -1, 1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, 1, -1, -1, -1, 1, + 1, 1, 1, -1, -1, -1, -1, 1, -1, 1, 1, -1, -1, -1, -1, -1, 1, -1, 1, -1, -1, -1, -1, -1, -1, -1, 1, -1, -1, -1, -1, 1, + 1, 1, -1, -1, -1, -1, -1, -1, -1, 1, -1, -1, -1, -1, -1, 1, 1, -1, -1, -1, -1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, -1, +}; +#endif + +void ggml_vec_dot_iq2_xxs_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { + assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + + const block_iq2_xxs * restrict x = vx; + const block_q8_K * restrict y = vy; + + const int nb = n / QK_K; + +#if defined(__ARM_NEON) + + const uint64_t * signs64 = (const uint64_t *)keven_signs_q2xs; + + uint32_t aux32[4]; + const uint8_t * aux8 = (const uint8_t *)aux32; + + ggml_int8x16x4_t q2u; + ggml_int8x16x4_t q2s; + ggml_int8x16x4_t q8b; + + float sumf = 0; + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint16_t * restrict q2 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + float sumf1 = 0, sumf2 = 0; + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + q8b = ggml_vld1q_s8_x4(q8); q8 += 64; + memcpy(aux32, q2, 4*sizeof(uint32_t)); q2 += 8; + q2u.val[0] = vcombine_s8(vld1_s8((const void *)(iq2xxs_grid + aux8[ 0])), vld1_s8((const void *)(iq2xxs_grid + aux8[ 1]))); + q2u.val[1] = vcombine_s8(vld1_s8((const void *)(iq2xxs_grid + aux8[ 2])), vld1_s8((const void *)(iq2xxs_grid + aux8[ 3]))); + q2u.val[2] = vcombine_s8(vld1_s8((const void *)(iq2xxs_grid + aux8[ 8])), vld1_s8((const void *)(iq2xxs_grid + aux8[ 9]))); + q2u.val[3] = vcombine_s8(vld1_s8((const void *)(iq2xxs_grid + aux8[10])), vld1_s8((const void *)(iq2xxs_grid + aux8[11]))); + q2s.val[0] = vcombine_s8(vld1_s8((const void *)(signs64 + ((aux32[1] >> 0) & 127))), vld1_s8((const void *)(signs64 + ((aux32[1] >> 7) & 127)))); + q2s.val[1] = vcombine_s8(vld1_s8((const void *)(signs64 + ((aux32[1] >> 14) & 127))), vld1_s8((const void *)(signs64 + ((aux32[1] >> 21) & 127)))); + q2s.val[2] = vcombine_s8(vld1_s8((const void *)(signs64 + ((aux32[3] >> 0) & 127))), vld1_s8((const void *)(signs64 + ((aux32[3] >> 7) & 127)))); + q2s.val[3] = vcombine_s8(vld1_s8((const void *)(signs64 + ((aux32[3] >> 14) & 127))), vld1_s8((const void *)(signs64 + ((aux32[3] >> 21) & 127)))); + q2u.val[0] = vmulq_s8(q2u.val[0], q2s.val[0]); + q2u.val[1] = vmulq_s8(q2u.val[1], q2s.val[1]); + q2u.val[2] = vmulq_s8(q2u.val[2], q2s.val[2]); + q2u.val[3] = vmulq_s8(q2u.val[3], q2s.val[3]); + const int32x4_t p1 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q2u.val[0], q8b.val[0]), q2u.val[1], q8b.val[1]); + const int32x4_t p2 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q2u.val[2], q8b.val[2]), q2u.val[3], q8b.val[3]); + sumf1 += vaddvq_s32(p1) * (0.5f + (aux32[1] >> 28)); + sumf2 += vaddvq_s32(p2) * (0.5f + (aux32[3] >> 28)); + } + sumf += d*(sumf1 + sumf2); + } + *s = 0.25f * sumf; + +#elif defined(__AVX2__) + + const uint64_t * signs64 = (const uint64_t *)keven_signs_q2xs; + + uint32_t aux32[4]; + const uint8_t * aux8 = (const uint8_t *)aux32; + + __m256 accumf = _mm256_setzero_ps(); + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint16_t * restrict q2 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + __m256i sumi1 = _mm256_setzero_si256(); + __m256i sumi2 = _mm256_setzero_si256(); + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + const __m256i q8_1 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; + const __m256i q8_2 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; + memcpy(aux32, q2, 4*sizeof(uint32_t)); q2 += 8; + const __m256i q2_1 = _mm256_set_epi64x(iq2xxs_grid[aux8[ 3]], iq2xxs_grid[aux8[ 2]], iq2xxs_grid[aux8[1]], iq2xxs_grid[aux8[0]]); + const __m256i q2_2 = _mm256_set_epi64x(iq2xxs_grid[aux8[11]], iq2xxs_grid[aux8[10]], iq2xxs_grid[aux8[9]], iq2xxs_grid[aux8[8]]); + const __m256i s2_1 = _mm256_set_epi64x(signs64[(aux32[1] >> 21) & 127], signs64[(aux32[1] >> 14) & 127], + signs64[(aux32[1] >> 7) & 127], signs64[(aux32[1] >> 0) & 127]); + const __m256i s2_2 = _mm256_set_epi64x(signs64[(aux32[3] >> 21) & 127], signs64[(aux32[3] >> 14) & 127], + signs64[(aux32[3] >> 7) & 127], signs64[(aux32[3] >> 0) & 127]); + const __m256i q8s_1 = _mm256_sign_epi8(q8_1, s2_1); + const __m256i q8s_2 = _mm256_sign_epi8(q8_2, s2_2); + const __m256i dot1 = _mm256_maddubs_epi16(q2_1, q8s_1); + const __m256i dot2 = _mm256_maddubs_epi16(q2_2, q8s_2); + const uint16_t ls1 = aux32[1] >> 28; + const uint16_t ls2 = aux32[3] >> 28; + const __m256i p1 = _mm256_madd_epi16(dot1, _mm256_set1_epi16(2*ls1+1)); + const __m256i p2 = _mm256_madd_epi16(dot2, _mm256_set1_epi16(2*ls2+1)); + sumi1 = _mm256_add_epi32(sumi1, p1); + sumi2 = _mm256_add_epi32(sumi2, p2); + } + + accumf = _mm256_fmadd_ps(_mm256_set1_ps(d), _mm256_cvtepi32_ps(_mm256_add_epi32(sumi1, sumi2)), accumf); + + } + + *s = 0.125f * hsum_float_8(accumf); + +#elif defined(__AVX__) + const uint64_t * signs64 = (const uint64_t *)keven_signs_q2xs; + + uint32_t aux32[4]; + const uint8_t * aux8 = (const uint8_t *)aux32; + + __m256 accumf = _mm256_setzero_ps(); + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint16_t * restrict q2 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + __m128i sumi1_0 = _mm_setzero_si128(); + __m128i sumi1_1 = _mm_setzero_si128(); + __m128i sumi2_0 = _mm_setzero_si128(); + __m128i sumi2_1 = _mm_setzero_si128(); + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + const __m128i q8_1_0 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q8_1_1 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q8_2_0 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q8_2_1 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + memcpy(aux32, q2, 4*sizeof(uint32_t)); q2 += 8; + const __m128i q2_1_0 = _mm_set_epi64x(iq2xxs_grid[aux8[1]], iq2xxs_grid[aux8[0]]); + const __m128i q2_1_1 = _mm_set_epi64x(iq2xxs_grid[aux8[3]], iq2xxs_grid[aux8[2]]); + const __m128i q2_2_0 = _mm_set_epi64x(iq2xxs_grid[aux8[9]], iq2xxs_grid[aux8[8]]); + const __m128i q2_2_1 = _mm_set_epi64x(iq2xxs_grid[aux8[11]], iq2xxs_grid[aux8[10]]); + const __m128i s2_1_0 = _mm_set_epi64x(signs64[(aux32[1] >> 7) & 127], signs64[(aux32[1] >> 0) & 127]); + const __m128i s2_1_1 = _mm_set_epi64x(signs64[(aux32[1] >> 21) & 127], signs64[(aux32[1] >> 14) & 127]); + const __m128i s2_2_0 = _mm_set_epi64x(signs64[(aux32[3] >> 7) & 127], signs64[(aux32[3] >> 0) & 127]); + const __m128i s2_2_1 = _mm_set_epi64x(signs64[(aux32[3] >> 21) & 127], signs64[(aux32[3] >> 14) & 127]); + const __m128i q8s_1_0 = _mm_sign_epi8(q8_1_0, s2_1_0); + const __m128i q8s_1_1 = _mm_sign_epi8(q8_1_1, s2_1_1); + const __m128i q8s_2_0 = _mm_sign_epi8(q8_2_0, s2_2_0); + const __m128i q8s_2_1 = _mm_sign_epi8(q8_2_1, s2_2_1); + const __m128i dot1_0 = _mm_maddubs_epi16(q2_1_0, q8s_1_0); + const __m128i dot1_1 = _mm_maddubs_epi16(q2_1_1, q8s_1_1); + const __m128i dot2_0 = _mm_maddubs_epi16(q2_2_0, q8s_2_0); + const __m128i dot2_1 = _mm_maddubs_epi16(q2_2_1, q8s_2_1); + const uint16_t ls1 = aux32[1] >> 28; + const uint16_t ls2 = aux32[3] >> 28; + const __m128i p1_0 = _mm_madd_epi16(dot1_0, _mm_set1_epi16(2*ls1+1)); + const __m128i p1_1 = _mm_madd_epi16(dot1_1, _mm_set1_epi16(2*ls1+1)); + const __m128i p2_0 = _mm_madd_epi16(dot2_0, _mm_set1_epi16(2*ls2+1)); + const __m128i p2_1 = _mm_madd_epi16(dot2_1, _mm_set1_epi16(2*ls2+1)); + sumi1_0 = _mm_add_epi32(sumi1_0, p1_0); + sumi1_1 = _mm_add_epi32(sumi1_1, p1_1); + sumi2_0 = _mm_add_epi32(sumi2_0, p2_0); + sumi2_1 = _mm_add_epi32(sumi2_1, p2_1); + } + + accumf = _mm256_add_ps(_mm256_mul_ps(_mm256_set1_ps(d), _mm256_cvtepi32_ps(MM256_SET_M128I(_mm_add_epi32(sumi1_1, sumi2_1), _mm_add_epi32(sumi1_0, sumi2_0)))), accumf); + + } + + *s = 0.125f * hsum_float_8(accumf); + +#elif defined(__POWER9_VECTOR__) + const vector int v0 = vec_splats((int32_t)0); + vector float vsumf0 = vec_splats(0.0f); + vector float vsumf1 = vec_splats(0.0f); + vector float vsumf2 = vec_splats(0.0f); + vector float vsumf3 = vec_splats(0.0f); + + const uint64_t * signs64 = (const uint64_t *)keven_signs_q2xs; + + for (int i = 0; i < nb; ++i) { + vector float vxd = vec_splats(GGML_FP16_TO_FP32(x[i].d)); + vector float vyd = vec_splats(y[i].d); + vector float vd = vec_mul(vxd, vyd); + + vector signed int vsumi0 = v0; + vector signed int vsumi1 = v0; + vector signed int vsumi2 = v0; + vector signed int vsumi3 = v0; + + const uint16_t * restrict q2 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + + for (int j = 0; j < QK_K/32; j += 2) { + __builtin_prefetch(q2, 0, 1); + __builtin_prefetch(q8, 0, 1); + + uint32_t aux32[4]; + const uint8_t * aux8 = (const uint8_t *)aux32; + + memcpy(aux32, q2, 4*sizeof(uint32_t)); + q2 += 8; + + vector signed long long aux64x2_0 = {*(const int64_t *)(iq2xxs_grid + aux8[ 0]), *(const int64_t *)(iq2xxs_grid + aux8[ 1])}; + vector signed long long aux64x2_1 = {*(const int64_t *)(iq2xxs_grid + aux8[ 2]), *(const int64_t *)(iq2xxs_grid + aux8[ 3])}; + vector signed long long aux64x2_2 = {*(const int64_t *)(iq2xxs_grid + aux8[ 8]), *(const int64_t *)(iq2xxs_grid + aux8[ 9])}; + vector signed long long aux64x2_3 = {*(const int64_t *)(iq2xxs_grid + aux8[10]), *(const int64_t *)(iq2xxs_grid + aux8[11])}; + + vector signed long long vsigns0 = {*(const int64_t *)(signs64 + ((aux32[1] >> 0) & 127)), *(const int64_t *)(signs64 + ((aux32[1] >> 7) & 127))}; + vector signed long long vsigns1 = {*(const int64_t *)(signs64 + ((aux32[1] >> 14) & 127)), *(const int64_t *)(signs64 + ((aux32[1] >> 21) & 127))}; + vector signed long long vsigns2 = {*(const int64_t *)(signs64 + ((aux32[3] >> 0) & 127)), *(const int64_t *)(signs64 + ((aux32[3] >> 7) & 127))}; + vector signed long long vsigns3 = {*(const int64_t *)(signs64 + ((aux32[3] >> 14) & 127)), *(const int64_t *)(signs64 + ((aux32[3] >> 21) & 127))}; + + vector signed char q2x0 = (vector signed char)vec_mul((vector signed char)vsigns0, (vector signed char)aux64x2_0); + vector signed char q2x1 = (vector signed char)vec_mul((vector signed char)vsigns1, (vector signed char)aux64x2_1); + vector signed char q2x2 = (vector signed char)vec_mul((vector signed char)vsigns2, (vector signed char)aux64x2_2); + vector signed char q2x3 = (vector signed char)vec_mul((vector signed char)vsigns3, (vector signed char)aux64x2_3); + + vector signed char q8y0 = vec_xl( 0, q8); + vector signed char q8y1 = vec_xl(16, q8); + vector signed char q8y2 = vec_xl(32, q8); + vector signed char q8y3 = vec_xl(48, q8); + q8 += 64; + + vector signed short qv0 = vec_add(vec_mule(q2x0, q8y0), vec_mulo(q2x0, q8y0)); + vector signed short qv1 = vec_add(vec_mule(q2x1, q8y1), vec_mulo(q2x1, q8y1)); + vector signed short qv2 = vec_add(vec_mule(q2x2, q8y2), vec_mulo(q2x2, q8y2)); + vector signed short qv3 = vec_add(vec_mule(q2x3, q8y3), vec_mulo(q2x3, q8y3)); + + const uint16_t ls0 = aux32[1] >> 28; + const uint16_t ls1 = aux32[3] >> 28; + + vector signed short vscales01 = vec_splats((int16_t)(2*ls0+1)); + vector signed short vscales23 = vec_splats((int16_t)(2*ls1+1)); + + vsumi0 = vec_msum(qv0, vscales01, vsumi0); + vsumi1 = vec_msum(qv1, vscales01, vsumi1); + vsumi2 = vec_msum(qv2, vscales23, vsumi2); + vsumi3 = vec_msum(qv3, vscales23, vsumi3); + } + + vsumf0 = vec_madd(vec_ctf(vsumi0, 0), vd, vsumf0); + vsumf1 = vec_madd(vec_ctf(vsumi1, 0), vd, vsumf1); + vsumf2 = vec_madd(vec_ctf(vsumi2, 0), vd, vsumf2); + vsumf3 = vec_madd(vec_ctf(vsumi3, 0), vd, vsumf3); + } + + vsumf0 = vec_add(vsumf0, vsumf2); + vsumf1 = vec_add(vsumf1, vsumf3); + + vsumf0 = vec_add(vsumf0, vsumf1); + + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 4)); + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 8)); + + *s = 0.125f * vec_extract(vsumf0, 0); + +#elif defined(__loongarch_asx) + + const uint64_t * signs64 = (const uint64_t *)keven_signs_q2xs; + + uint32_t aux32[4]; + const uint8_t * aux8 = (const uint8_t *)aux32; + + __m256 accumf = (__m256)__lasx_xvldi(0); + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint16_t * restrict q2 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + __m256i sumi1 = __lasx_xvldi(0); + __m256i sumi2 = __lasx_xvldi(0); + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + const __m256i q8_1 = __lasx_xvld((const __m256i *)q8, 0); q8 += 32; + const __m256i q8_2 = __lasx_xvld((const __m256i *)q8, 0); q8 += 32; + memcpy(aux32, q2, 4*sizeof(uint32_t)); q2 += 8; + + const __m256i q2_1 = lasx_set_d(iq2xxs_grid[aux8[ 3]], iq2xxs_grid[aux8[ 2]], iq2xxs_grid[aux8[1]], iq2xxs_grid[aux8[0]]); + const __m256i q2_2 = lasx_set_d(iq2xxs_grid[aux8[11]], iq2xxs_grid[aux8[10]], iq2xxs_grid[aux8[9]], iq2xxs_grid[aux8[8]]); + const __m256i s2_1 = lasx_set_d(signs64[(aux32[1] >> 21) & 127], signs64[(aux32[1] >> 14) & 127], + signs64[(aux32[1] >> 7) & 127], signs64[(aux32[1] >> 0) & 127]); + const __m256i s2_2 = lasx_set_d(signs64[(aux32[3] >> 21) & 127], signs64[(aux32[3] >> 14) & 127], + signs64[(aux32[3] >> 7) & 127], signs64[(aux32[3] >> 0) & 127]); + const __m256i q8s_1 = __lasx_xvsigncov_b(s2_1, q8_1); + const __m256i q8s_2 = __lasx_xvsigncov_b(s2_2, q8_2); + const __m256i dot1 = lasx_maddubs_h(q2_1, q8s_1); + const __m256i dot2 = lasx_maddubs_h(q2_2, q8s_2); + const uint16_t ls1 = aux32[1] >> 28; + const uint16_t ls2 = aux32[3] >> 28; + const __m256i p1 = lasx_madd_h(dot1, __lasx_xvreplgr2vr_h(2*ls1+1)); + const __m256i p2 = lasx_madd_h(dot2, __lasx_xvreplgr2vr_h(2*ls2+1)); + sumi1 = __lasx_xvadd_w(sumi1, p1); + sumi2 = __lasx_xvadd_w(sumi2, p2); + } + + accumf = __lasx_xvfmadd_s(__lasx_xvreplfr2vr_s(d), __lasx_xvffint_s_w(__lasx_xvadd_w(sumi1, sumi2)), accumf); + } + + *s = 0.125f * hsum_float_8(accumf); + +#else + + uint32_t aux32[2]; + const uint8_t * aux8 = (const uint8_t *)aux32; + + float sumf = 0.f; + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint16_t * restrict q2 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + int32_t bsum = 0; + for (int ib32 = 0; ib32 < QK_K/32; ++ib32) { + memcpy(aux32, q2, 2*sizeof(uint32_t)); + q2 += 4; + const uint32_t ls = 2*(aux32[1] >> 28) + 1; + int32_t sumi = 0; + for (int l = 0; l < 4; ++l) { + const uint8_t * grid = (const uint8_t *)(iq2xxs_grid + aux8[l]); + const uint8_t signs = ksigns_iq2xs[(aux32[1] >> 7*l) & 127]; + for (int j = 0; j < 8; ++j) { + sumi += grid[j] * q8[j] * (signs & kmask_iq2xs[j] ? -1 : 1); + } + q8 += 8; + } + bsum += sumi * ls; + } + sumf += d * bsum; + } + *s = 0.125f * sumf; +#endif +} + +void ggml_vec_dot_iq2_xs_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { + assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + + const block_iq2_xs * restrict x = vx; + const block_q8_K * restrict y = vy; + + const int nb = n / QK_K; + +#if defined(__ARM_NEON) + + const uint64_t * signs64 = (const uint64_t *)keven_signs_q2xs; + + ggml_int8x16x4_t q2u; + ggml_int8x16x4_t q2s; + ggml_int8x16x4_t q8b; + + int32x4x4_t scales32; + + float sumf = 0; + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint16_t * restrict q2 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + const uint8x8_t scales8 = vld1_u8(x[i].scales); + const uint8x8_t scales_l = vand_u8(scales8, vdup_n_u8(0xf)); + const uint8x8_t scales_h = vshr_n_u8(scales8, 4); + uint8x16_t scales = vcombine_u8(vzip1_u8(scales_l, scales_h), vzip2_u8(scales_l, scales_h)); + scales = vaddq_u8(vshlq_n_u8(scales, 1), vdupq_n_u8(1)); + const uint16x8_t scales1 = vmovl_u8(vget_low_u8(scales)); + const uint16x8_t scales2 = vmovl_u8(vget_high_u8(scales)); + scales32.val[0] = vreinterpretq_s32_u32(vmovl_u16(vget_low_u16(scales1))); + scales32.val[1] = vreinterpretq_s32_u32(vmovl_u16(vget_high_u16(scales1))); + scales32.val[2] = vreinterpretq_s32_u32(vmovl_u16(vget_low_u16(scales2))); + scales32.val[3] = vreinterpretq_s32_u32(vmovl_u16(vget_high_u16(scales2))); + int32x4_t sumi = vdupq_n_s32(0); + for (int ib64 = 0; ib64 < QK_K/64; ++ib64) { + q8b = ggml_vld1q_s8_x4(q8); q8 += 64; + q2u.val[0] = vcombine_s8(vld1_s8((const void *)(iq2xs_grid + (q2[0] & 511))), vld1_s8((const void *)(iq2xs_grid + (q2[1] & 511)))); + q2u.val[1] = vcombine_s8(vld1_s8((const void *)(iq2xs_grid + (q2[2] & 511))), vld1_s8((const void *)(iq2xs_grid + (q2[3] & 511)))); + q2u.val[2] = vcombine_s8(vld1_s8((const void *)(iq2xs_grid + (q2[4] & 511))), vld1_s8((const void *)(iq2xs_grid + (q2[5] & 511)))); + q2u.val[3] = vcombine_s8(vld1_s8((const void *)(iq2xs_grid + (q2[6] & 511))), vld1_s8((const void *)(iq2xs_grid + (q2[7] & 511)))); + q2s.val[0] = vcombine_s8(vld1_s8((const void *)(signs64 + (q2[0] >> 9))), vld1_s8((const void *)(signs64 + (q2[1] >> 9)))); + q2s.val[1] = vcombine_s8(vld1_s8((const void *)(signs64 + (q2[2] >> 9))), vld1_s8((const void *)(signs64 + (q2[3] >> 9)))); + q2s.val[2] = vcombine_s8(vld1_s8((const void *)(signs64 + (q2[4] >> 9))), vld1_s8((const void *)(signs64 + (q2[5] >> 9)))); + q2s.val[3] = vcombine_s8(vld1_s8((const void *)(signs64 + (q2[6] >> 9))), vld1_s8((const void *)(signs64 + (q2[7] >> 9)))); + q2u.val[0] = vmulq_s8(q2u.val[0], q2s.val[0]); + q2u.val[1] = vmulq_s8(q2u.val[1], q2s.val[1]); + q2u.val[2] = vmulq_s8(q2u.val[2], q2s.val[2]); + q2u.val[3] = vmulq_s8(q2u.val[3], q2s.val[3]); + const int32x4_t p1 = ggml_vdotq_s32(vdupq_n_s32(0), q2u.val[0], q8b.val[0]); + const int32x4_t p2 = ggml_vdotq_s32(vdupq_n_s32(0), q2u.val[1], q8b.val[1]); + const int32x4_t p3 = ggml_vdotq_s32(vdupq_n_s32(0), q2u.val[2], q8b.val[2]); + const int32x4_t p4 = ggml_vdotq_s32(vdupq_n_s32(0), q2u.val[3], q8b.val[3]); + const int32x4_t p = vpaddq_s32(vpaddq_s32(p1, p2), vpaddq_s32(p3, p4)); + sumi = vmlaq_s32(sumi, p, scales32.val[ib64]); + q2 += 8; + } + sumf += d*vaddvq_s32(sumi); + } + *s = 0.125f * sumf; + +#elif defined(__AVX2__) + + const __m256i mone = _mm256_set1_epi8(1); + static const char block_sign_shuffle_mask_1[32] = { + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, + 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, + }; + static const char block_sign_shuffle_mask_2[32] = { + 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, + 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0e, 0x0e, 0x0e, 0x0e, 0x0e, 0x0e, 0x0e, 0x0e, + }; + static const uint8_t bit_selector_mask_bytes[32] = { + 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, + 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, + }; + + const __m256i bit_selector_mask = _mm256_loadu_si256((const __m256i*)bit_selector_mask_bytes); + const __m256i block_sign_shuffle_1 = _mm256_loadu_si256((const __m256i*)block_sign_shuffle_mask_1); + const __m256i block_sign_shuffle_2 = _mm256_loadu_si256((const __m256i*)block_sign_shuffle_mask_2); + + static const uint8_t k_bit_helper[32] = { + 0x00, 0x80, 0x80, 0x00, 0x80, 0x00, 0x00, 0x80, 0x80, 0x00, 0x00, 0x80, 0x00, 0x80, 0x80, 0x00, + 0x00, 0x80, 0x80, 0x00, 0x80, 0x00, 0x00, 0x80, 0x80, 0x00, 0x00, 0x80, 0x00, 0x80, 0x80, 0x00, + }; + const __m256i bit_helper = _mm256_loadu_si256((const __m256i*)k_bit_helper); + const __m256i m511 = _mm256_set1_epi16(511); + const __m128i m4 = _mm_set1_epi8(0xf); + const __m128i m1 = _mm_set1_epi8(1); + + uint64_t aux64; + + // somewhat hacky, but gives a significant boost in performance + __m256i aux_gindex; + const uint16_t * gindex = (const uint16_t *)&aux_gindex; + + __m256 accumf = _mm256_setzero_ps(); + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint16_t * restrict q2 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + + memcpy(&aux64, x[i].scales, 8); + __m128i stmp = _mm_set1_epi64x(aux64); + stmp = _mm_unpacklo_epi8(_mm_and_si128(stmp, m4), _mm_and_si128(_mm_srli_epi16(stmp, 4), m4)); + const __m128i scales = _mm_add_epi8(_mm_slli_epi16(stmp, 1), m1); + + __m256i sumi1 = _mm256_setzero_si256(); + __m256i sumi2 = _mm256_setzero_si256(); + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 4) { + + const __m256i q2_data = _mm256_loadu_si256((const __m256i*)q2); q2 += 16; + aux_gindex = _mm256_and_si256(q2_data, m511); + + const __m256i partial_sign_bits = _mm256_srli_epi16(q2_data, 9); + const __m256i partial_sign_bits_upper = _mm256_srli_epi16(q2_data, 13); + const __m256i partial_sign_bits_for_counting = _mm256_xor_si256(partial_sign_bits, partial_sign_bits_upper); + + const __m256i odd_bits = _mm256_shuffle_epi8(bit_helper, partial_sign_bits_for_counting); + const __m256i full_sign_bits = _mm256_or_si256(partial_sign_bits, odd_bits); + + const __m256i q8_1 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; + const __m256i q8_2 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; + const __m256i q8_3 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; + const __m256i q8_4 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; + + const __m256i q2_1 = _mm256_set_epi64x(iq2xs_grid[gindex[ 3]], iq2xs_grid[gindex[ 2]], + iq2xs_grid[gindex[ 1]], iq2xs_grid[gindex[ 0]]); + const __m256i q2_2 = _mm256_set_epi64x(iq2xs_grid[gindex[ 7]], iq2xs_grid[gindex[ 6]], + iq2xs_grid[gindex[ 5]], iq2xs_grid[gindex[ 4]]); + const __m256i q2_3 = _mm256_set_epi64x(iq2xs_grid[gindex[11]], iq2xs_grid[gindex[10]], + iq2xs_grid[gindex[ 9]], iq2xs_grid[gindex[ 8]]); + const __m256i q2_4 = _mm256_set_epi64x(iq2xs_grid[gindex[15]], iq2xs_grid[gindex[14]], + iq2xs_grid[gindex[13]], iq2xs_grid[gindex[12]]); + + const __m128i full_signs_l = _mm256_castsi256_si128(full_sign_bits); + const __m128i full_signs_h = _mm256_extractf128_si256(full_sign_bits, 1); + const __m256i full_signs_1 = MM256_SET_M128I(full_signs_l, full_signs_l); + const __m256i full_signs_2 = MM256_SET_M128I(full_signs_h, full_signs_h); + + __m256i signs; + signs = _mm256_shuffle_epi8(full_signs_1, block_sign_shuffle_1); + signs = _mm256_cmpeq_epi8(_mm256_and_si256(signs, bit_selector_mask), bit_selector_mask); + const __m256i q8s_1 = _mm256_sign_epi8(q8_1, _mm256_or_si256(signs, mone)); + + signs = _mm256_shuffle_epi8(full_signs_1, block_sign_shuffle_2); + signs = _mm256_cmpeq_epi8(_mm256_and_si256(signs, bit_selector_mask), bit_selector_mask); + const __m256i q8s_2 = _mm256_sign_epi8(q8_2, _mm256_or_si256(signs, mone)); + + signs = _mm256_shuffle_epi8(full_signs_2, block_sign_shuffle_1); + signs = _mm256_cmpeq_epi8(_mm256_and_si256(signs, bit_selector_mask), bit_selector_mask); + const __m256i q8s_3 = _mm256_sign_epi8(q8_3, _mm256_or_si256(signs, mone)); + + signs = _mm256_shuffle_epi8(full_signs_2, block_sign_shuffle_2); + signs = _mm256_cmpeq_epi8(_mm256_and_si256(signs, bit_selector_mask), bit_selector_mask); + const __m256i q8s_4 = _mm256_sign_epi8(q8_4, _mm256_or_si256(signs, mone)); + + const __m256i dot1 = _mm256_maddubs_epi16(q2_1, q8s_1); + const __m256i dot2 = _mm256_maddubs_epi16(q2_2, q8s_2); + const __m256i dot3 = _mm256_maddubs_epi16(q2_3, q8s_3); + const __m256i dot4 = _mm256_maddubs_epi16(q2_4, q8s_4); + + const __m256i sc1 = _mm256_cvtepi8_epi16(_mm_shuffle_epi8(scales, get_scale_shuffle(ib32+0))); + const __m256i sc2 = _mm256_cvtepi8_epi16(_mm_shuffle_epi8(scales, get_scale_shuffle(ib32+1))); + const __m256i sc3 = _mm256_cvtepi8_epi16(_mm_shuffle_epi8(scales, get_scale_shuffle(ib32+2))); + const __m256i sc4 = _mm256_cvtepi8_epi16(_mm_shuffle_epi8(scales, get_scale_shuffle(ib32+3))); + + sumi1 = _mm256_add_epi32(sumi1, _mm256_madd_epi16(dot1, sc1)); + sumi2 = _mm256_add_epi32(sumi2, _mm256_madd_epi16(dot2, sc2)); + sumi1 = _mm256_add_epi32(sumi1, _mm256_madd_epi16(dot3, sc3)); + sumi2 = _mm256_add_epi32(sumi2, _mm256_madd_epi16(dot4, sc4)); + } + + accumf = _mm256_fmadd_ps(_mm256_set1_ps(d), _mm256_cvtepi32_ps(_mm256_add_epi32(sumi1, sumi2)), accumf); + + } + + *s = 0.125f * hsum_float_8(accumf); + +#elif defined(__AVX__) + const __m128i mone = _mm_set1_epi8(1); + static const char block_sign_shuffle_mask_1[32] = { + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, + 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, + }; + static const char block_sign_shuffle_mask_2[32] = { + 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, + 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0e, 0x0e, 0x0e, 0x0e, 0x0e, 0x0e, 0x0e, 0x0e, + }; + static const uint8_t bit_selector_mask_bytes[32] = { + 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, + 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, + }; + + const __m128i bit_selector_mask_0 = _mm_loadu_si128((const __m128i*)bit_selector_mask_bytes); + const __m128i bit_selector_mask_1 = _mm_loadu_si128((const __m128i*)bit_selector_mask_bytes + 1); + const __m128i block_sign_shuffle_1_0 = _mm_loadu_si128((const __m128i*)block_sign_shuffle_mask_1); + const __m128i block_sign_shuffle_1_1 = _mm_loadu_si128((const __m128i*)block_sign_shuffle_mask_1 + 1); + const __m128i block_sign_shuffle_2_0 = _mm_loadu_si128((const __m128i*)block_sign_shuffle_mask_2); + const __m128i block_sign_shuffle_2_1 = _mm_loadu_si128((const __m128i*)block_sign_shuffle_mask_2 + 1); + + static const uint8_t k_bit_helper[32] = { + 0x00, 0x80, 0x80, 0x00, 0x80, 0x00, 0x00, 0x80, 0x80, 0x00, 0x00, 0x80, 0x00, 0x80, 0x80, 0x00, + 0x00, 0x80, 0x80, 0x00, 0x80, 0x00, 0x00, 0x80, 0x80, 0x00, 0x00, 0x80, 0x00, 0x80, 0x80, 0x00, + }; + const __m128i bit_helper_0 = _mm_loadu_si128((const __m128i*)k_bit_helper); + const __m128i bit_helper_1 = _mm_loadu_si128((const __m128i*)k_bit_helper + 1); + const __m128i m511 = _mm_set1_epi16(511); + const __m128i m4 = _mm_set1_epi8(0xf); + const __m128i m1 = _mm_set1_epi8(1); + + uint64_t aux64; + + // somewhat hacky, but gives a significant boost in performance + __m256i aux_gindex; + const uint16_t * gindex = (const uint16_t *)&aux_gindex; + + __m256 accumf = _mm256_setzero_ps(); + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint16_t * restrict q2 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + + memcpy(&aux64, x[i].scales, 8); + __m128i stmp = _mm_set1_epi64x(aux64); + stmp = _mm_unpacklo_epi8(_mm_and_si128(stmp, m4), _mm_and_si128(_mm_srli_epi16(stmp, 4), m4)); + const __m128i scales = _mm_add_epi8(_mm_slli_epi16(stmp, 1), m1); + + __m128i sumi1_0 = _mm_setzero_si128(); + __m128i sumi1_1 = _mm_setzero_si128(); + __m128i sumi2_0 = _mm_setzero_si128(); + __m128i sumi2_1 = _mm_setzero_si128(); + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 4) { + + const __m128i q2_data_0 = _mm_loadu_si128((const __m128i*)q2); + const __m128i q2_data_1 = _mm_loadu_si128((const __m128i*)q2 + 1); q2 += 16; + aux_gindex = MM256_SET_M128I(_mm_and_si128(q2_data_1, m511), _mm_and_si128(q2_data_0, m511)); + + const __m128i partial_sign_bits_0 = _mm_srli_epi16(q2_data_0, 9); + const __m128i partial_sign_bits_1 = _mm_srli_epi16(q2_data_1, 9); + const __m128i partial_sign_bits_upper_0 = _mm_srli_epi16(q2_data_0, 13); + const __m128i partial_sign_bits_upper_1 = _mm_srli_epi16(q2_data_1, 13); + const __m128i partial_sign_bits_for_counting_0 = _mm_xor_si128(partial_sign_bits_0, partial_sign_bits_upper_0); + const __m128i partial_sign_bits_for_counting_1 = _mm_xor_si128(partial_sign_bits_1, partial_sign_bits_upper_1); + + const __m128i odd_bits_0 = _mm_shuffle_epi8(bit_helper_0, partial_sign_bits_for_counting_0); + const __m128i odd_bits_1 = _mm_shuffle_epi8(bit_helper_1, partial_sign_bits_for_counting_1); + const __m128i full_sign_bits_0 = _mm_or_si128(partial_sign_bits_0, odd_bits_0); + const __m128i full_sign_bits_1 = _mm_or_si128(partial_sign_bits_1, odd_bits_1); + + const __m128i q8_1_0 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q8_1_1 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q8_2_0 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q8_2_1 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q8_3_0 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q8_3_1 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q8_4_0 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q8_4_1 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + + const __m128i q2_1_0 = _mm_set_epi64x(iq2xs_grid[gindex[1]], iq2xs_grid[gindex[0]]); + const __m128i q2_1_1 = _mm_set_epi64x(iq2xs_grid[gindex[3]], iq2xs_grid[gindex[2]]); + const __m128i q2_2_0 = _mm_set_epi64x(iq2xs_grid[gindex[5]], iq2xs_grid[gindex[4]]); + const __m128i q2_2_1 = _mm_set_epi64x(iq2xs_grid[gindex[7]], iq2xs_grid[gindex[6]]); + const __m128i q2_3_0 = _mm_set_epi64x(iq2xs_grid[gindex[9]], iq2xs_grid[gindex[8]]); + const __m128i q2_3_1 = _mm_set_epi64x(iq2xs_grid[gindex[11]], iq2xs_grid[gindex[10]]); + const __m128i q2_4_0 = _mm_set_epi64x(iq2xs_grid[gindex[13]], iq2xs_grid[gindex[12]]); + const __m128i q2_4_1 = _mm_set_epi64x(iq2xs_grid[gindex[15]], iq2xs_grid[gindex[14]]); + + // AVX2 full_signs_1 is full_sign_bits_0 here + // AVX2 full_signs_2 is full_sign_bits_1 here + __m128i signs_0, signs_1; + signs_0 = _mm_shuffle_epi8(full_sign_bits_0, block_sign_shuffle_1_0); + signs_1 = _mm_shuffle_epi8(full_sign_bits_0, block_sign_shuffle_1_1); + signs_0 = _mm_cmpeq_epi8(_mm_and_si128(signs_0, bit_selector_mask_0), bit_selector_mask_0); + signs_1 = _mm_cmpeq_epi8(_mm_and_si128(signs_1, bit_selector_mask_1), bit_selector_mask_1); + const __m128i q8s_1_0 = _mm_sign_epi8(q8_1_0, _mm_or_si128(signs_0, mone)); + const __m128i q8s_1_1 = _mm_sign_epi8(q8_1_1, _mm_or_si128(signs_1, mone)); + + signs_0 = _mm_shuffle_epi8(full_sign_bits_0, block_sign_shuffle_2_0); + signs_1 = _mm_shuffle_epi8(full_sign_bits_0, block_sign_shuffle_2_1); + signs_0 = _mm_cmpeq_epi8(_mm_and_si128(signs_0, bit_selector_mask_0), bit_selector_mask_0); + signs_1 = _mm_cmpeq_epi8(_mm_and_si128(signs_1, bit_selector_mask_1), bit_selector_mask_1); + const __m128i q8s_2_0 = _mm_sign_epi8(q8_2_0, _mm_or_si128(signs_0, mone)); + const __m128i q8s_2_1 = _mm_sign_epi8(q8_2_1, _mm_or_si128(signs_1, mone)); + + signs_0 = _mm_shuffle_epi8(full_sign_bits_1, block_sign_shuffle_1_0); + signs_1 = _mm_shuffle_epi8(full_sign_bits_1, block_sign_shuffle_1_1); + signs_0 = _mm_cmpeq_epi8(_mm_and_si128(signs_0, bit_selector_mask_0), bit_selector_mask_0); + signs_1 = _mm_cmpeq_epi8(_mm_and_si128(signs_1, bit_selector_mask_1), bit_selector_mask_1); + const __m128i q8s_3_0 = _mm_sign_epi8(q8_3_0, _mm_or_si128(signs_0, mone)); + const __m128i q8s_3_1 = _mm_sign_epi8(q8_3_1, _mm_or_si128(signs_1, mone)); + + signs_0 = _mm_shuffle_epi8(full_sign_bits_1, block_sign_shuffle_2_0); + signs_1 = _mm_shuffle_epi8(full_sign_bits_1, block_sign_shuffle_2_1); + signs_0 = _mm_cmpeq_epi8(_mm_and_si128(signs_0, bit_selector_mask_0), bit_selector_mask_0); + signs_1 = _mm_cmpeq_epi8(_mm_and_si128(signs_1, bit_selector_mask_1), bit_selector_mask_1); + const __m128i q8s_4_0 = _mm_sign_epi8(q8_4_0, _mm_or_si128(signs_0, mone)); + const __m128i q8s_4_1 = _mm_sign_epi8(q8_4_1, _mm_or_si128(signs_1, mone)); + + const __m128i dot1_0 = _mm_maddubs_epi16(q2_1_0, q8s_1_0); + const __m128i dot1_1 = _mm_maddubs_epi16(q2_1_1, q8s_1_1); + const __m128i dot2_0 = _mm_maddubs_epi16(q2_2_0, q8s_2_0); + const __m128i dot2_1 = _mm_maddubs_epi16(q2_2_1, q8s_2_1); + const __m128i dot3_0 = _mm_maddubs_epi16(q2_3_0, q8s_3_0); + const __m128i dot3_1 = _mm_maddubs_epi16(q2_3_1, q8s_3_1); + const __m128i dot4_0 = _mm_maddubs_epi16(q2_4_0, q8s_4_0); + const __m128i dot4_1 = _mm_maddubs_epi16(q2_4_1, q8s_4_1); + + __m128i sc_tmp = _mm_shuffle_epi8(scales, get_scale_shuffle(ib32+0)); + const __m128i sc1_0 = _mm_cvtepi8_epi16(sc_tmp); + const __m128i sc1_1 = _mm_cvtepi8_epi16(_mm_srli_si128(sc_tmp, 8)); + sc_tmp = _mm_shuffle_epi8(scales, get_scale_shuffle(ib32+1)); + const __m128i sc2_0 = _mm_cvtepi8_epi16(sc_tmp); + const __m128i sc2_1 = _mm_cvtepi8_epi16(_mm_srli_si128(sc_tmp, 8)); + sc_tmp = _mm_shuffle_epi8(scales, get_scale_shuffle(ib32+2)); + const __m128i sc3_0 = _mm_cvtepi8_epi16(sc_tmp); + const __m128i sc3_1 = _mm_cvtepi8_epi16(_mm_srli_si128(sc_tmp, 8)); + sc_tmp = _mm_shuffle_epi8(scales, get_scale_shuffle(ib32+3)); + const __m128i sc4_0 = _mm_cvtepi8_epi16(sc_tmp); + const __m128i sc4_1 = _mm_cvtepi8_epi16(_mm_srli_si128(sc_tmp, 8)); + + sumi1_0 = _mm_add_epi32(sumi1_0, _mm_madd_epi16(dot1_0, sc1_0)); + sumi1_1 = _mm_add_epi32(sumi1_1, _mm_madd_epi16(dot1_1, sc1_1)); + sumi2_0 = _mm_add_epi32(sumi2_0, _mm_madd_epi16(dot2_0, sc2_0)); + sumi2_1 = _mm_add_epi32(sumi2_1, _mm_madd_epi16(dot2_1, sc2_1)); + sumi1_0 = _mm_add_epi32(sumi1_0, _mm_madd_epi16(dot3_0, sc3_0)); + sumi1_1 = _mm_add_epi32(sumi1_1, _mm_madd_epi16(dot3_1, sc3_1)); + sumi2_0 = _mm_add_epi32(sumi2_0, _mm_madd_epi16(dot4_0, sc4_0)); + sumi2_1 = _mm_add_epi32(sumi2_1, _mm_madd_epi16(dot4_1, sc4_1)); + } + + accumf = _mm256_add_ps(_mm256_mul_ps(_mm256_set1_ps(d), _mm256_cvtepi32_ps(MM256_SET_M128I(_mm_add_epi32(sumi1_1, sumi2_1), _mm_add_epi32(sumi1_0, sumi2_0)))), accumf); + + } + + *s = 0.125f * hsum_float_8(accumf); + +#elif defined(__loongarch_asx) + + const __m256i mone = __lasx_xvreplgr2vr_b(1); + static const char block_sign_shuffle_mask_1[32] = { + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, + 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, + }; + static const char block_sign_shuffle_mask_2[32] = { + 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, + 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0e, 0x0e, 0x0e, 0x0e, 0x0e, 0x0e, 0x0e, 0x0e, + }; + static const uint8_t bit_selector_mask_bytes[32] = { + 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, + 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, + }; + + const __m256i bit_selector_mask = __lasx_xvld((const __m256i*)bit_selector_mask_bytes, 0); + const __m256i block_sign_shuffle_1 = __lasx_xvld((const __m256i*)block_sign_shuffle_mask_1, 0); + const __m256i block_sign_shuffle_2 = __lasx_xvld((const __m256i*)block_sign_shuffle_mask_2, 0); + + static const uint8_t k_bit_helper[32] = { + 0x00, 0x80, 0x80, 0x00, 0x80, 0x00, 0x00, 0x80, 0x80, 0x00, 0x00, 0x80, 0x00, 0x80, 0x80, 0x00, + 0x00, 0x80, 0x80, 0x00, 0x80, 0x00, 0x00, 0x80, 0x80, 0x00, 0x00, 0x80, 0x00, 0x80, 0x80, 0x00, + }; + const __m256i bit_helper = __lasx_xvld((const __m256i*)k_bit_helper, 0); + const __m256i m511 = __lasx_xvreplgr2vr_h(511); + const __m128i m4 = __lsx_vreplgr2vr_b(0xf); + const __m128i m1 = __lsx_vreplgr2vr_b(1); + + uint64_t aux64; + + // somewhat hacky, but gives a significant boost in performance + __m256i aux_gindex; + const uint16_t * gindex = (const uint16_t *)&aux_gindex; + + __m256 accumf = (__m256)__lasx_xvldi(0); + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint16_t * restrict q2 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + + memcpy(&aux64, x[i].scales, 8); + __m128i stmp = __lsx_vreplgr2vr_d(aux64); + stmp = __lsx_vilvl_b( __lsx_vand_v(__lsx_vsrli_h(stmp, 4), m4), __lsx_vand_v(stmp, m4)); + const __m128i scales = __lsx_vadd_b(__lsx_vslli_h(stmp, 1), m1); + + __m256i sumi1 = __lasx_xvldi(0); + __m256i sumi2 = __lasx_xvldi(0); + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 4) { + + const __m256i q2_data = __lasx_xvld((const __m256i*)q2, 0); q2 += 16; + aux_gindex = __lasx_xvand_v(q2_data, m511); + + const __m256i partial_sign_bits = __lasx_xvsrli_h(q2_data, 9); + const __m256i partial_sign_bits_upper = __lasx_xvsrli_h(q2_data, 13); + const __m256i partial_sign_bits_for_counting = __lasx_xvxor_v(partial_sign_bits, partial_sign_bits_upper); + + const __m256i odd_bits = lasx_shuffle_b(bit_helper, partial_sign_bits_for_counting); + const __m256i full_sign_bits = __lasx_xvor_v(partial_sign_bits, odd_bits); + + const __m256i q8_1 = __lasx_xvld((const __m256i *)q8, 0); q8 += 32; + const __m256i q8_2 = __lasx_xvld((const __m256i *)q8, 0); q8 += 32; + const __m256i q8_3 = __lasx_xvld((const __m256i *)q8, 0); q8 += 32; + const __m256i q8_4 = __lasx_xvld((const __m256i *)q8, 0); q8 += 32; + + const __m256i q2_1 = lasx_set_d(iq2xs_grid[gindex[ 3]], iq2xs_grid[gindex[ 2]], + iq2xs_grid[gindex[ 1]], iq2xs_grid[gindex[ 0]]); + const __m256i q2_2 = lasx_set_d(iq2xs_grid[gindex[ 7]], iq2xs_grid[gindex[ 6]], + iq2xs_grid[gindex[ 5]], iq2xs_grid[gindex[ 4]]); + const __m256i q2_3 = lasx_set_d(iq2xs_grid[gindex[11]], iq2xs_grid[gindex[10]], + iq2xs_grid[gindex[ 9]], iq2xs_grid[gindex[ 8]]); + const __m256i q2_4 = lasx_set_d(iq2xs_grid[gindex[15]], iq2xs_grid[gindex[14]], + iq2xs_grid[gindex[13]], iq2xs_grid[gindex[12]]); + + const __m128i full_signs_l = lasx_extracti128(full_sign_bits, 0); + const __m128i full_signs_h = lasx_extracti128(full_sign_bits, 1); + const __m256i full_signs_1 = lasx_insertf128(full_signs_l, full_signs_l); + const __m256i full_signs_2 = lasx_insertf128(full_signs_h, full_signs_h); + + __m256i signs; + signs = lasx_shuffle_b(full_signs_1, block_sign_shuffle_1); + signs = __lasx_xvseq_b(__lasx_xvand_v(signs, bit_selector_mask), bit_selector_mask); + const __m256i q8s_1 = __lasx_xvsigncov_b(__lasx_xvor_v(signs, mone), q8_1); + + signs = lasx_shuffle_b(full_signs_1, block_sign_shuffle_2); + signs = __lasx_xvseq_b(__lasx_xvand_v(signs, bit_selector_mask), bit_selector_mask); + const __m256i q8s_2 = __lasx_xvsigncov_b(__lasx_xvor_v(signs, mone), q8_2); + + signs = lasx_shuffle_b(full_signs_2, block_sign_shuffle_1); + signs = __lasx_xvseq_b(__lasx_xvand_v(signs, bit_selector_mask), bit_selector_mask); + const __m256i q8s_3 = __lasx_xvsigncov_b(__lasx_xvor_v(signs, mone), q8_3); + + signs = lasx_shuffle_b(full_signs_2, block_sign_shuffle_2); + signs = __lasx_xvseq_b(__lasx_xvand_v(signs, bit_selector_mask), bit_selector_mask); + const __m256i q8s_4 = __lasx_xvsigncov_b(__lasx_xvor_v(signs, mone), q8_4); + + const __m256i dot1 = lasx_maddubs_h(q2_1, q8s_1); + const __m256i dot2 = lasx_maddubs_h(q2_2, q8s_2); + const __m256i dot3 = lasx_maddubs_h(q2_3, q8s_3); + const __m256i dot4 = lasx_maddubs_h(q2_4, q8s_4); + + const __m256i sc1 = lasx_ext8_16(lsx_shuffle_b(scales, get_scale_shuffle(ib32+0))); + const __m256i sc2 = lasx_ext8_16(lsx_shuffle_b(scales, get_scale_shuffle(ib32+1))); + const __m256i sc3 = lasx_ext8_16(lsx_shuffle_b(scales, get_scale_shuffle(ib32+2))); + const __m256i sc4 = lasx_ext8_16(lsx_shuffle_b(scales, get_scale_shuffle(ib32+3))); + + sumi1 = __lasx_xvadd_w(sumi1, lasx_madd_h(dot1, sc1)); + sumi2 = __lasx_xvadd_w(sumi2, lasx_madd_h(dot2, sc2)); + sumi1 = __lasx_xvadd_w(sumi1, lasx_madd_h(dot3, sc3)); + sumi2 = __lasx_xvadd_w(sumi2, lasx_madd_h(dot4, sc4)); + } + + accumf = __lasx_xvfmadd_s(__lasx_xvreplfr2vr_s(d), __lasx_xvffint_s_w(__lasx_xvadd_w(sumi1, sumi2)), accumf); + + } + + *s = 0.125f * hsum_float_8(accumf); +#elif defined(__POWER9_VECTOR__) + const vector int v0 = vec_splats((int32_t)0); + vector float vsumf0 = vec_splats(0.0f); + vector float vsumf1 = vec_splats(0.0f); + vector float vsumf2 = vec_splats(0.0f); + vector float vsumf3 = vec_splats(0.0f); + + const uint64_t * signs64 = (const uint64_t *)keven_signs_q2xs; + + for (int i = 0; i < nb; ++i) { + vector float vxd = vec_splats(GGML_FP16_TO_FP32(x[i].d)); + vector float vyd = vec_splats(y[i].d); + vector float vd = vec_mul(vxd, vyd); + + vector signed int vsumi0 = v0; + vector signed int vsumi1 = v0; + vector signed int vsumi2 = v0; + vector signed int vsumi3 = v0; + + const uint16_t * restrict q2 = x[i].qs; + const uint8_t * restrict sc = x[i].scales; + const int8_t * restrict q8 = y[i].qs; + + for (int j = 0; j < QK_K/64; ++j) { + __builtin_prefetch(q2, 0, 1); + __builtin_prefetch(q8, 0, 1); + + vector signed long long aux64x2_0 = {*(const int64_t *)(iq2xs_grid + (q2[0] & 511)), *(const int64_t *)(iq2xs_grid + (q2[1] & 511))}; + vector signed long long aux64x2_1 = {*(const int64_t *)(iq2xs_grid + (q2[2] & 511)), *(const int64_t *)(iq2xs_grid + (q2[3] & 511))}; + vector signed long long aux64x2_2 = {*(const int64_t *)(iq2xs_grid + (q2[4] & 511)), *(const int64_t *)(iq2xs_grid + (q2[5] & 511))}; + vector signed long long aux64x2_3 = {*(const int64_t *)(iq2xs_grid + (q2[6] & 511)), *(const int64_t *)(iq2xs_grid + (q2[7] & 511))}; + + vector signed long long vsigns0 = {*(const int64_t *)(signs64 + ((q2[0] >> 9))), *(const int64_t *)(signs64 + ((q2[1] >> 9)))}; + vector signed long long vsigns1 = {*(const int64_t *)(signs64 + ((q2[2] >> 9))), *(const int64_t *)(signs64 + ((q2[3] >> 9)))}; + vector signed long long vsigns2 = {*(const int64_t *)(signs64 + ((q2[4] >> 9))), *(const int64_t *)(signs64 + ((q2[5] >> 9)))}; + vector signed long long vsigns3 = {*(const int64_t *)(signs64 + ((q2[6] >> 9))), *(const int64_t *)(signs64 + ((q2[7] >> 9)))}; + q2 += 8; + + vector signed char q2x0 = (vector signed char)vec_mul((vector signed char)vsigns0, (vector signed char)aux64x2_0); + vector signed char q2x1 = (vector signed char)vec_mul((vector signed char)vsigns1, (vector signed char)aux64x2_1); + vector signed char q2x2 = (vector signed char)vec_mul((vector signed char)vsigns2, (vector signed char)aux64x2_2); + vector signed char q2x3 = (vector signed char)vec_mul((vector signed char)vsigns3, (vector signed char)aux64x2_3); + + vector signed char q8y0 = vec_xl( 0, q8); + vector signed char q8y1 = vec_xl(16, q8); + vector signed char q8y2 = vec_xl(32, q8); + vector signed char q8y3 = vec_xl(48, q8); + q8 += 64; + + vector signed short qv0 = vec_add(vec_mule(q2x0, q8y0), vec_mulo(q2x0, q8y0)); + vector signed short qv1 = vec_add(vec_mule(q2x1, q8y1), vec_mulo(q2x1, q8y1)); + vector signed short qv2 = vec_add(vec_mule(q2x2, q8y2), vec_mulo(q2x2, q8y2)); + vector signed short qv3 = vec_add(vec_mule(q2x3, q8y3), vec_mulo(q2x3, q8y3)); + + const uint16_t ls0 = (uint16_t)(sc[0] & 0xf); + const uint16_t ls1 = (uint16_t)(sc[0] >> 4); + const uint16_t ls2 = (uint16_t)(sc[1] & 0xf); + const uint16_t ls3 = (uint16_t)(sc[1] >> 4); + sc += 2; + + vector signed short vscales0 = vec_splats((int16_t)(2*ls0+1)); + vector signed short vscales1 = vec_splats((int16_t)(2*ls1+1)); + vector signed short vscales2 = vec_splats((int16_t)(2*ls2+1)); + vector signed short vscales3 = vec_splats((int16_t)(2*ls3+1)); + + vsumi0 = vec_msum(qv0, vscales0, vsumi0); + vsumi1 = vec_msum(qv1, vscales1, vsumi1); + vsumi2 = vec_msum(qv2, vscales2, vsumi2); + vsumi3 = vec_msum(qv3, vscales3, vsumi3); + } + + vsumf0 = vec_madd(vec_ctf(vsumi0, 0), vd, vsumf0); + vsumf1 = vec_madd(vec_ctf(vsumi1, 0), vd, vsumf1); + vsumf2 = vec_madd(vec_ctf(vsumi2, 0), vd, vsumf2); + vsumf3 = vec_madd(vec_ctf(vsumi3, 0), vd, vsumf3); + } + + vsumf0 = vec_add(vsumf0, vsumf2); + vsumf1 = vec_add(vsumf1, vsumf3); + + vsumf0 = vec_add(vsumf0, vsumf1); + + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 4)); + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 8)); + + *s = 0.125f * vec_extract(vsumf0, 0); +#else + + float sumf = 0.f; + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint16_t * restrict q2 = x[i].qs; + const uint8_t * restrict sc = x[i].scales; + const int8_t * restrict q8 = y[i].qs; + int32_t bsum = 0; + for (int ib32 = 0; ib32 < QK_K/32; ++ib32) { + const uint16_t ls1 = 2*(sc[ib32] & 0xf) + 1; + const uint16_t ls2 = 2*(sc[ib32] >> 4) + 1; + int32_t sumi = 0; + for (int l = 0; l < 2; ++l) { + const uint8_t * grid = (const uint8_t *)(iq2xs_grid + (q2[l] & 511)); + const uint8_t signs = ksigns_iq2xs[q2[l] >> 9]; + for (int j = 0; j < 8; ++j) { + sumi += grid[j] * q8[j] * (signs & kmask_iq2xs[j] ? -1 : 1); + } + q8 += 8; + } + bsum += sumi * ls1; + sumi = 0; + for (int l = 2; l < 4; ++l) { + const uint8_t * grid = (const uint8_t *)(iq2xs_grid + (q2[l] & 511)); + const uint8_t signs = ksigns_iq2xs[q2[l] >> 9]; + for (int j = 0; j < 8; ++j) { + sumi += grid[j] * q8[j] * (signs & kmask_iq2xs[j] ? -1 : 1); + } + q8 += 8; + } + bsum += sumi * ls2; + q2 += 4; + } + sumf += d * bsum; + } + *s = 0.125f * sumf; +#endif +} + +void ggml_vec_dot_iq2_s_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { + assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + + const block_iq2_s * restrict x = vx; + const block_q8_K * restrict y = vy; + + const int nb = n / QK_K; + +#if defined(__ARM_NEON) + + static const uint8_t k_mask1[32] = {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, + 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03 + }; + + static const uint8_t k_mask2[16] = {0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80,}; + + const ggml_uint8x16x2_t mask1 = ggml_vld1q_u8_x2(k_mask1); + const uint8x16_t mask2 = vld1q_u8(k_mask2); + const uint8x16_t m1 = vdupq_n_u8(1); + const int32x4_t vzero = vdupq_n_s32(0); + + uint8x16x2_t vs; + ggml_int8x16x4_t q2s; + ggml_int8x16x4_t q8b; + + float sumf = 0; + for (int i = 0; i < nb; ++i) { + + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + + const uint8_t * restrict qs = x[i].qs; + const uint8_t * restrict qh = x[i].qh; + const uint16_t * restrict signs = (const uint16_t *)(x[i].qs + QK_K/8); + const int8_t * restrict q8 = y[i].qs; + + int sumi1 = 0, sumi2 = 0; + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + q8b = ggml_vld1q_s8_x4(q8); q8 += 64; + q2s.val[0] = vcombine_s8(vld1_s8((const int8_t *)(iq2s_grid + (qs[0] | ((qh[ib32+0] << 8) & 0x300)))), + vld1_s8((const int8_t *)(iq2s_grid + (qs[1] | ((qh[ib32+0] << 6) & 0x300))))); + q2s.val[1] = vcombine_s8(vld1_s8((const int8_t *)(iq2s_grid + (qs[2] | ((qh[ib32+0] << 4) & 0x300)))), + vld1_s8((const int8_t *)(iq2s_grid + (qs[3] | ((qh[ib32+0] << 2) & 0x300))))); + q2s.val[2] = vcombine_s8(vld1_s8((const int8_t *)(iq2s_grid + (qs[4] | ((qh[ib32+1] << 8) & 0x300)))), + vld1_s8((const int8_t *)(iq2s_grid + (qs[5] | ((qh[ib32+1] << 6) & 0x300))))); + q2s.val[3] = vcombine_s8(vld1_s8((const int8_t *)(iq2s_grid + (qs[6] | ((qh[ib32+1] << 4) & 0x300)))), + vld1_s8((const int8_t *)(iq2s_grid + (qs[7] | ((qh[ib32+1] << 2) & 0x300))))); + qs += 8; + + vs.val[0] = vreinterpretq_u8_u32(vdupq_n_u32(signs[0] | ((uint32_t) signs[1] << 16))); + vs.val[1] = vandq_u8(ggml_vqtbl1q_u8(vs.val[0], mask1.val[1]), mask2); + vs.val[0] = vandq_u8(ggml_vqtbl1q_u8(vs.val[0], mask1.val[0]), mask2); + vs.val[0] = vceqq_u8(vs.val[0], mask2); + vs.val[1] = vceqq_u8(vs.val[1], mask2); + + q2s.val[0] = vmulq_s8(vreinterpretq_s8_u8(vorrq_u8(vs.val[0], m1)), q2s.val[0]); + q2s.val[1] = vmulq_s8(vreinterpretq_s8_u8(vorrq_u8(vs.val[1], m1)), q2s.val[1]); + + vs.val[0] = vreinterpretq_u8_u32(vdupq_n_u32(signs[2] | ((uint32_t) signs[3] << 16))); + vs.val[1] = vandq_u8(ggml_vqtbl1q_u8(vs.val[0], mask1.val[1]), mask2); + vs.val[0] = vandq_u8(ggml_vqtbl1q_u8(vs.val[0], mask1.val[0]), mask2); + vs.val[0] = vceqq_u8(vs.val[0], mask2); + vs.val[1] = vceqq_u8(vs.val[1], mask2); + + signs += 4; + + q2s.val[2] = vmulq_s8(vreinterpretq_s8_u8(vorrq_u8(vs.val[0], m1)), q2s.val[2]); + q2s.val[3] = vmulq_s8(vreinterpretq_s8_u8(vorrq_u8(vs.val[1], m1)), q2s.val[3]); + + const int32x4_t p1 = ggml_vdotq_s32(vzero, q2s.val[0], q8b.val[0]); + const int32x4_t p2 = ggml_vdotq_s32(vzero, q2s.val[1], q8b.val[1]); + const int32x4_t p3 = ggml_vdotq_s32(vzero, q2s.val[2], q8b.val[2]); + const int32x4_t p4 = ggml_vdotq_s32(vzero, q2s.val[3], q8b.val[3]); + + sumi1 += vaddvq_s32(p1) * (1 + 2*(x[i].scales[ib32+0] & 0xf)); + sumi2 += vaddvq_s32(p2) * (1 + 2*(x[i].scales[ib32+0] >> 4)); + sumi1 += vaddvq_s32(p3) * (1 + 2*(x[i].scales[ib32+1] & 0xf)); + sumi2 += vaddvq_s32(p4) * (1 + 2*(x[i].scales[ib32+1] >> 4)); + } + sumf += d*(sumi1 + sumi2); + } + + *s = 0.125f * sumf; + +#elif defined(__AVX2__) + + static const uint8_t k_mask1[32] = {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, + 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03 + }; + + static const uint8_t k_mask2[32] = {0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, + 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, + }; + + const __m128i m4 = _mm_set1_epi8(0xf); + const __m128i m1 = _mm_set1_epi8(1); + + const __m256i mask1 = _mm256_loadu_si256((const __m256i*)k_mask1); + const __m256i mask2 = _mm256_loadu_si256((const __m256i*)k_mask2); + + uint64_t aux64; + + __m256 accumf = _mm256_setzero_ps(); + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint8_t * restrict qs = x[i].qs; + const uint8_t * restrict qh = x[i].qh; + const uint16_t * restrict signs = (const uint16_t *)(x[i].qs + QK_K/8); + const int8_t * restrict q8 = y[i].qs; + + memcpy(&aux64, x[i].scales, 8); + const __m128i scales8 = _mm_add_epi8(_mm_slli_epi16(_mm_and_si128(_mm_set_epi64x(aux64 >> 4, aux64), m4), 1), m1); + const __m256i scales16 = _mm256_cvtepi8_epi16(scales8); // 0 2 4 6 8 10 12 14 1 3 5 7 9 11 13 15 + + __m256i sumi1 = _mm256_setzero_si256(); + __m256i sumi2 = _mm256_setzero_si256(); + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + const __m256i q8_1 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; + const __m256i q8_2 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; + const __m256i q2_1 = _mm256_set_epi64x(iq2s_grid[qs[3] | ((qh[ib32+0] << 2) & 0x300)], + iq2s_grid[qs[2] | ((qh[ib32+0] << 4) & 0x300)], + iq2s_grid[qs[1] | ((qh[ib32+0] << 6) & 0x300)], + iq2s_grid[qs[0] | ((qh[ib32+0] << 8) & 0x300)]); + const __m256i q2_2 = _mm256_set_epi64x(iq2s_grid[qs[7] | ((qh[ib32+1] << 2) & 0x300)], + iq2s_grid[qs[6] | ((qh[ib32+1] << 4) & 0x300)], + iq2s_grid[qs[5] | ((qh[ib32+1] << 6) & 0x300)], + iq2s_grid[qs[4] | ((qh[ib32+1] << 8) & 0x300)]); + qs += 8; + + __m256i aux256 = _mm256_set1_epi32(signs[0] | ((uint32_t) signs[1] << 16)); + aux256 = _mm256_and_si256(_mm256_shuffle_epi8(aux256,mask1), mask2); + const __m256i s2_1 = _mm256_cmpeq_epi8(aux256, mask2); + const __m256i q8s_1 = _mm256_sub_epi8(_mm256_xor_si256(s2_1, q8_1), s2_1); + + aux256 = _mm256_set1_epi32(signs[2] | ((uint32_t) signs[3] << 16)); + aux256 = _mm256_and_si256(_mm256_shuffle_epi8(aux256,mask1), mask2); + const __m256i s2_2 = _mm256_cmpeq_epi8(aux256, mask2); + const __m256i q8s_2 = _mm256_sub_epi8(_mm256_xor_si256(s2_2, q8_2), s2_2); + + signs += 4; + + const __m256i dot1 = _mm256_maddubs_epi16(q2_1, q8s_1); // blocks 2*ib32+0, 2*ib32+1 + const __m256i dot2 = _mm256_maddubs_epi16(q2_2, q8s_2); // blocks 2*ib32+2, 2*ib32+3 + + const __m256i p1 = _mm256_madd_epi16(dot1, _mm256_shuffle_epi8(scales16, get_scale_shuffle_k4(ib32+0))); + const __m256i p2 = _mm256_madd_epi16(dot2, _mm256_shuffle_epi8(scales16, get_scale_shuffle_k4(ib32+1))); + sumi1 = _mm256_add_epi32(sumi1, p1); + sumi2 = _mm256_add_epi32(sumi2, p2); + } + + accumf = _mm256_fmadd_ps(_mm256_set1_ps(d), _mm256_cvtepi32_ps(_mm256_add_epi32(sumi1, sumi2)), accumf); + + } + + *s = 0.125f * hsum_float_8(accumf); + +#elif defined(__AVX__) + static const uint8_t k_mask1[32] = {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, + 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03 + }; + + static const uint8_t k_mask2[32] = {0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, + 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, + }; + + const __m128i m4 = _mm_set1_epi8(0xf); + const __m128i m1 = _mm_set1_epi8(1); + + const __m128i mask1_0 = _mm_loadu_si128((const __m128i*)k_mask1); + const __m128i mask1_1 = _mm_loadu_si128((const __m128i*)k_mask1 + 1); + const __m128i mask2_0 = _mm_loadu_si128((const __m128i*)k_mask2); + const __m128i mask2_1 = _mm_loadu_si128((const __m128i*)k_mask2 + 1); + + uint64_t aux64; + + __m256 accumf = _mm256_setzero_ps(); + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint8_t * restrict qs = x[i].qs; + const uint8_t * restrict qh = x[i].qh; + const uint16_t * restrict signs = (const uint16_t *)(x[i].qs + QK_K/8); + const int8_t * restrict q8 = y[i].qs; + + memcpy(&aux64, x[i].scales, 8); + const __m128i scales8 = _mm_add_epi8(_mm_slli_epi16(_mm_and_si128(_mm_set_epi64x(aux64 >> 4, aux64), m4), 1), m1); + const __m128i scales16_0 = _mm_cvtepi8_epi16(scales8); + const __m128i scales16_1 = _mm_cvtepi8_epi16(_mm_srli_si128(scales8, 8)); + + __m128i sumi1_0 = _mm_setzero_si128(); + __m128i sumi1_1 = _mm_setzero_si128(); + __m128i sumi2_0 = _mm_setzero_si128(); + __m128i sumi2_1 = _mm_setzero_si128(); + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + const __m128i q8_1_0 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q8_1_1 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q8_2_0 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q8_2_1 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q2_1_0 = _mm_set_epi64x(iq2s_grid[qs[1] | ((qh[ib32+0] << 6) & 0x300)], + iq2s_grid[qs[0] | ((qh[ib32+0] << 8) & 0x300)]); + const __m128i q2_1_1 = _mm_set_epi64x(iq2s_grid[qs[3] | ((qh[ib32+0] << 2) & 0x300)], + iq2s_grid[qs[2] | ((qh[ib32+0] << 4) & 0x300)]); + const __m128i q2_2_0 = _mm_set_epi64x(iq2s_grid[qs[5] | ((qh[ib32+1] << 6) & 0x300)], + iq2s_grid[qs[4] | ((qh[ib32+1] << 8) & 0x300)]); + const __m128i q2_2_1 = _mm_set_epi64x(iq2s_grid[qs[7] | ((qh[ib32+1] << 2) & 0x300)], + iq2s_grid[qs[6] | ((qh[ib32+1] << 4) & 0x300)]); + qs += 8; + + __m128i aux128_0 = _mm_set1_epi32(signs[0] | ((uint32_t) signs[1] << 16)); + __m128i aux128_1 = aux128_0; + aux128_0 = _mm_and_si128(_mm_shuffle_epi8(aux128_0,mask1_0), mask2_0); + aux128_1 = _mm_and_si128(_mm_shuffle_epi8(aux128_1,mask1_1), mask2_1); + const __m128i s2_1_0 = _mm_cmpeq_epi8(aux128_0, mask2_0); + const __m128i s2_1_1 = _mm_cmpeq_epi8(aux128_1, mask2_1); + const __m128i q8s_1_0 = _mm_sub_epi8(_mm_xor_si128(s2_1_0, q8_1_0), s2_1_0); + const __m128i q8s_1_1 = _mm_sub_epi8(_mm_xor_si128(s2_1_1, q8_1_1), s2_1_1); + + aux128_0 = _mm_set1_epi32(signs[2] | ((uint32_t) signs[3] << 16)); + aux128_1 = aux128_0; + aux128_0 = _mm_and_si128(_mm_shuffle_epi8(aux128_0,mask1_0), mask2_0); + aux128_1 = _mm_and_si128(_mm_shuffle_epi8(aux128_1,mask1_1), mask2_1); + const __m128i s2_2_0 = _mm_cmpeq_epi8(aux128_0, mask2_0); + const __m128i s2_2_1 = _mm_cmpeq_epi8(aux128_1, mask2_1); + const __m128i q8s_2_0 = _mm_sub_epi8(_mm_xor_si128(s2_2_0, q8_2_0), s2_2_0); + const __m128i q8s_2_1 = _mm_sub_epi8(_mm_xor_si128(s2_2_1, q8_2_1), s2_2_1); + + signs += 4; + + const __m128i dot1_0 = _mm_maddubs_epi16(q2_1_0, q8s_1_0); + const __m128i dot1_1 = _mm_maddubs_epi16(q2_1_1, q8s_1_1); + const __m128i dot2_0 = _mm_maddubs_epi16(q2_2_0, q8s_2_0); + const __m128i dot2_1 = _mm_maddubs_epi16(q2_2_1, q8s_2_1); + + const __m128i p1_0 = _mm_madd_epi16(dot1_0, _mm_shuffle_epi8(scales16_0, _mm256_extractf128_si256(get_scale_shuffle_k4(ib32+0), 0))); + const __m128i p1_1 = _mm_madd_epi16(dot1_1, _mm_shuffle_epi8(scales16_1, _mm256_extractf128_si256(get_scale_shuffle_k4(ib32+0), 1))); + const __m128i p2_0 = _mm_madd_epi16(dot2_0, _mm_shuffle_epi8(scales16_0, _mm256_extractf128_si256(get_scale_shuffle_k4(ib32+1), 0))); + const __m128i p2_1 = _mm_madd_epi16(dot2_1, _mm_shuffle_epi8(scales16_1, _mm256_extractf128_si256(get_scale_shuffle_k4(ib32+1), 1))); + sumi1_0 = _mm_add_epi32(sumi1_0, p1_0); + sumi1_1 = _mm_add_epi32(sumi1_1, p1_1); + sumi2_0 = _mm_add_epi32(sumi2_0, p2_0); + sumi2_1 = _mm_add_epi32(sumi2_1, p2_1); + } + + accumf = _mm256_add_ps(_mm256_mul_ps(_mm256_set1_ps(d), _mm256_cvtepi32_ps(MM256_SET_M128I(_mm_add_epi32(sumi1_1, sumi2_1), _mm_add_epi32(sumi1_0, sumi2_0)))), accumf); + + } + + *s = 0.125f * hsum_float_8(accumf); + +#elif defined(__POWER9_VECTOR__) + static const uint8_t k_mask1[32] = {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, + 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03 + }; + + static const uint8_t k_mask2[16] = {0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80,}; + + const vector int v0 = vec_splats((int32_t)0); + + vector float vsumf0 = vec_splats(0.0f); + vector float vsumf1 = vec_splats(0.0f); + vector float vsumf2 = vec_splats(0.0f); + vector float vsumf3 = vec_splats(0.0f); + + const vector unsigned char mask0 = vec_xl( 0, k_mask1); + const vector unsigned char mask1 = vec_xl(16, k_mask1); + const vector signed char mask2 = (vector signed char)vec_xl( 0, k_mask2); + + for (int i = 0; i < nb; ++i) { + vector float vxd = vec_splats(GGML_FP16_TO_FP32(x[i].d)); + vector float vyd = vec_splats(y[i].d); + vector float vd = vec_mul(vxd, vyd); + + vector signed int vsumi0 = v0; + vector signed int vsumi1 = v0; + vector signed int vsumi2 = v0; + vector signed int vsumi3 = v0; + + const uint8_t * restrict q2 = x[i].qs; + const uint8_t * restrict qh = x[i].qh; + const uint16_t * restrict signs = (const uint16_t *)(x[i].qs + QK_K/8); + const uint8_t * restrict sc = x[i].scales; + const int8_t * restrict q8 = y[i].qs; + + for (int j = 0; j < QK_K/32; j += 2) { + __builtin_prefetch(q2, 0, 1); + __builtin_prefetch(q8, 0, 1); + + vector signed long long aux64x2_0 = {*(const int64_t *)(iq2s_grid + (q2[0] | ((qh[0] << 8) & 0x300))), *(const int64_t *)(iq2s_grid + (q2[1] | ((qh[0] << 6) & 0x300)))}; + vector signed long long aux64x2_1 = {*(const int64_t *)(iq2s_grid + (q2[2] | ((qh[0] << 4) & 0x300))), *(const int64_t *)(iq2s_grid + (q2[3] | ((qh[0] << 2) & 0x300)))}; + vector signed long long aux64x2_2 = {*(const int64_t *)(iq2s_grid + (q2[4] | ((qh[1] << 8) & 0x300))), *(const int64_t *)(iq2s_grid + (q2[5] | ((qh[1] << 6) & 0x300)))}; + vector signed long long aux64x2_3 = {*(const int64_t *)(iq2s_grid + (q2[6] | ((qh[1] << 4) & 0x300))), *(const int64_t *)(iq2s_grid + (q2[7] | ((qh[1] << 2) & 0x300)))}; + q2 += 8; + qh += 2; + + vector signed char vsigns01 = (vector signed char)vec_splats(*(const uint32_t *)&signs[0]); + vector signed char vsigns23 = (vector signed char)vec_splats(*(const uint32_t *)&signs[2]); + signs += 4; + + vector signed char vsigns0 = vec_perm(vsigns01, vsigns01, mask0); + vector signed char vsigns1 = vec_perm(vsigns01, vsigns01, mask1); + vector signed char vsigns2 = vec_perm(vsigns23, vsigns23, mask0); + vector signed char vsigns3 = vec_perm(vsigns23, vsigns23, mask1); + + vsigns0 = (vector signed char)vec_cmpeq(vec_and(vsigns0, mask2), mask2); + vsigns1 = (vector signed char)vec_cmpeq(vec_and(vsigns1, mask2), mask2); + vsigns2 = (vector signed char)vec_cmpeq(vec_and(vsigns2, mask2), mask2); + vsigns3 = (vector signed char)vec_cmpeq(vec_and(vsigns3, mask2), mask2); + + vector signed char q2x0 = vec_sub(vec_xor(vsigns0, (vector signed char)aux64x2_0), vsigns0); + vector signed char q2x1 = vec_sub(vec_xor(vsigns1, (vector signed char)aux64x2_1), vsigns1); + vector signed char q2x2 = vec_sub(vec_xor(vsigns2, (vector signed char)aux64x2_2), vsigns2); + vector signed char q2x3 = vec_sub(vec_xor(vsigns3, (vector signed char)aux64x2_3), vsigns3); + + vector signed char q8y0 = vec_xl( 0, q8); + vector signed char q8y1 = vec_xl(16, q8); + vector signed char q8y2 = vec_xl(32, q8); + vector signed char q8y3 = vec_xl(48, q8); + q8 += 64; + + vector signed short qv0 = vec_add(vec_mule(q2x0, q8y0), vec_mulo(q2x0, q8y0)); + vector signed short qv1 = vec_add(vec_mule(q2x1, q8y1), vec_mulo(q2x1, q8y1)); + vector signed short qv2 = vec_add(vec_mule(q2x2, q8y2), vec_mulo(q2x2, q8y2)); + vector signed short qv3 = vec_add(vec_mule(q2x3, q8y3), vec_mulo(q2x3, q8y3)); + + const uint16_t ls0 = (uint16_t)(sc[0] & 0xf); + const uint16_t ls1 = (uint16_t)(sc[0] >> 4); + const uint16_t ls2 = (uint16_t)(sc[1] & 0xf); + const uint16_t ls3 = (uint16_t)(sc[1] >> 4); + sc += 2; + + vector signed short vscales0 = vec_splats((int16_t)(2*ls0+1)); + vector signed short vscales1 = vec_splats((int16_t)(2*ls1+1)); + vector signed short vscales2 = vec_splats((int16_t)(2*ls2+1)); + vector signed short vscales3 = vec_splats((int16_t)(2*ls3+1)); + + vsumi0 = vec_msum(qv0, vscales0, vsumi0); + vsumi1 = vec_msum(qv1, vscales1, vsumi1); + vsumi2 = vec_msum(qv2, vscales2, vsumi2); + vsumi3 = vec_msum(qv3, vscales3, vsumi3); + } + + vsumf0 = vec_madd(vec_ctf(vsumi0, 0), vd, vsumf0); + vsumf1 = vec_madd(vec_ctf(vsumi1, 0), vd, vsumf1); + vsumf2 = vec_madd(vec_ctf(vsumi2, 0), vd, vsumf2); + vsumf3 = vec_madd(vec_ctf(vsumi3, 0), vd, vsumf3); + } + + vsumf0 = vec_add(vsumf0, vsumf2); + vsumf1 = vec_add(vsumf1, vsumf3); + + vsumf0 = vec_add(vsumf0, vsumf1); + + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 4)); + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 8)); + + *s = 0.125f * vec_extract(vsumf0, 0); + +#elif defined(__loongarch_asx) + + static const uint8_t k_mask1[32] = {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, + 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03 + }; + + static const uint8_t k_mask2[32] = {0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, + 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, + }; + + + const __m128i m4 = __lsx_vreplgr2vr_b(0xf); + const __m128i m1 = __lsx_vreplgr2vr_b(1); + + const __m256i mask1 = __lasx_xvld((const __m256i*)k_mask1, 0); + const __m256i mask2 = __lasx_xvld((const __m256i*)k_mask2, 0); + uint64_t aux64; + + __m256 accumf = (__m256)__lasx_xvldi(0); + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint8_t * restrict qs = x[i].qs; + const uint8_t * restrict qh = x[i].qh; + const uint16_t * restrict signs = (const uint16_t *)(x[i].qs + QK_K/8); + const int8_t * restrict q8 = y[i].qs; + + __m128i tmp1; + memcpy(&aux64, x[i].scales, 8); + tmp1 = __lsx_vinsgr2vr_d(tmp1, aux64, 0); + tmp1 = __lsx_vinsgr2vr_d(tmp1, aux64 >> 4, 1); + const __m128i scales8 = __lsx_vadd_b(__lsx_vslli_h(__lsx_vand_v(tmp1, m4), 1), m1); + const __m256i scales16 = lasx_ext8_16(scales8); // 0 2 4 6 8 10 12 14 1 3 5 7 9 11 13 15 + + __m256i sumi1 = __lasx_xvldi(0); + __m256i sumi2 = __lasx_xvldi(0); + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + const __m256i q8_1 = __lasx_xvld((const __m256i *)q8, 0); q8 += 32; + const __m256i q8_2 = __lasx_xvld((const __m256i *)q8, 0); q8 += 32; + const __m256i q2_1 = lasx_set_d(iq2s_grid[qs[3] | ((qh[ib32+0] << 2) & 0x300)], + iq2s_grid[qs[2] | ((qh[ib32+0] << 4) & 0x300)], + iq2s_grid[qs[1] | ((qh[ib32+0] << 6) & 0x300)], + iq2s_grid[qs[0] | ((qh[ib32+0] << 8) & 0x300)]); + const __m256i q2_2 = lasx_set_d(iq2s_grid[qs[7] | ((qh[ib32+1] << 2) & 0x300)], + iq2s_grid[qs[6] | ((qh[ib32+1] << 4) & 0x300)], + iq2s_grid[qs[5] | ((qh[ib32+1] << 6) & 0x300)], + iq2s_grid[qs[4] | ((qh[ib32+1] << 8) & 0x300)]); + qs += 8; + + __m256i aux256 = __lasx_xvreplgr2vr_w(signs[0] | ((uint32_t) signs[1] << 16)); + aux256 = __lasx_xvand_v(lasx_shuffle_b(aux256,mask1), mask2); + const __m256i s2_1 = __lasx_xvseq_b(aux256, mask2); + const __m256i q8s_1 = __lasx_xvsub_b(__lasx_xvxor_v(s2_1, q8_1), s2_1); + + aux256 = __lasx_xvreplgr2vr_w(signs[2] | ((uint32_t) signs[3] << 16)); + aux256 = __lasx_xvand_v(lasx_shuffle_b(aux256,mask1), mask2); + const __m256i s2_2 = __lasx_xvseq_b(aux256, mask2); + const __m256i q8s_2 = __lasx_xvsub_b(__lasx_xvxor_v(s2_2, q8_2), s2_2); + + signs += 4; + + const __m256i dot1 = lasx_maddubs_h(q2_1, q8s_1); // blocks 2*ib32+0, 2*ib32+1 + const __m256i dot2 = lasx_maddubs_h(q2_2, q8s_2); // blocks 2*ib32+2, 2*ib32+3 + + const __m256i p1 = lasx_madd_h(dot1, lasx_shuffle_b(scales16, get_scale_shuffle_k4(ib32+0))); + const __m256i p2 = lasx_madd_h(dot2, lasx_shuffle_b(scales16, get_scale_shuffle_k4(ib32+1))); + sumi1 = __lasx_xvadd_w(sumi1, p1); + sumi2 = __lasx_xvadd_w(sumi2, p2); + } + + accumf = __lasx_xvfmadd_s(__lasx_xvreplfr2vr_s(d), __lasx_xvffint_s_w(__lasx_xvadd_w(sumi1, sumi2)), accumf); + } + + *s = 0.125f * hsum_float_8(accumf); + +#else + + float sumf = 0; + for (int i = 0; i < nb; i++) { + + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const int8_t * q8 = y[i].qs; + const uint8_t * qs = x[i].qs; + const uint8_t * qh = x[i].qh; + const uint8_t * signs = qs + QK_K/8; + + int bsum = 0; + for (int ib32 = 0; ib32 < QK_K/32; ++ib32) { + int ls1 = 1 + 2*(x[i].scales[ib32] & 0xf); + int ls2 = 1 + 2*(x[i].scales[ib32] >> 4); + int sumi1 = 0, sumi2 = 0; + for (int l = 0; l < 2; ++l) { + const uint8_t * grid = (const uint8_t *)(iq2s_grid + (qs[l] | (qh[ib32] << (8-2*l) & 0x300))); + for (int j = 0; j < 8; ++j) { + sumi1 += q8[j] * grid[j] * (signs[l] & kmask_iq2xs[j] ? -1 : 1); + } + q8 += 8; + } + for (int l = 2; l < 4; ++l) { + const uint8_t * grid = (const uint8_t *)(iq2s_grid + (qs[l] | (qh[ib32] << (8-2*l) & 0x300))); + for (int j = 0; j < 8; ++j) { + sumi2 += q8[j] * grid[j] * (signs[l] & kmask_iq2xs[j] ? -1 : 1); + } + q8 += 8; + } + bsum += ls1 * sumi1 + ls2 * sumi2; + qs += 4; + signs += 4; + } + + sumf += d * bsum; + } + + *s = 0.125f * sumf; + +#endif + +} + +void ggml_vec_dot_iq3_xxs_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { + assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + + const block_iq3_xxs * restrict x = vx; + const block_q8_K * restrict y = vy; + + const int nb = n / QK_K; + +#if defined(__ARM_NEON) + + const uint64_t * signs64 = (const uint64_t *)keven_signs_q2xs; + + uint32_t aux32[2]; + + ggml_int8x16x4_t q3s; + ggml_int8x16x4_t q8b; + + float sumf = 0; + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint8_t * restrict q3 = x[i].qs; + const uint8_t * restrict gas = x[i].qs + QK_K/4; + const int8_t * restrict q8 = y[i].qs; + float sumf1 = 0, sumf2 = 0; + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + q8b = ggml_vld1q_s8_x4(q8); q8 += 64; + memcpy(aux32, gas, 2*sizeof(uint32_t)); gas += 2*sizeof(uint32_t); + const uint32x4_t aux32x4_0 = ggml_vld1q_u32(iq3xxs_grid[q3[ 0]], iq3xxs_grid[q3[ 1]], iq3xxs_grid[q3[ 2]], iq3xxs_grid[q3[ 3]]); + const uint32x4_t aux32x4_1 = ggml_vld1q_u32(iq3xxs_grid[q3[ 4]], iq3xxs_grid[q3[ 5]], iq3xxs_grid[q3[ 6]], iq3xxs_grid[q3[ 7]]); + const uint32x4_t aux32x4_2 = ggml_vld1q_u32(iq3xxs_grid[q3[ 8]], iq3xxs_grid[q3[ 9]], iq3xxs_grid[q3[10]], iq3xxs_grid[q3[11]]); + const uint32x4_t aux32x4_3 = ggml_vld1q_u32(iq3xxs_grid[q3[12]], iq3xxs_grid[q3[13]], iq3xxs_grid[q3[14]], iq3xxs_grid[q3[15]]); + q3 += 16; + q3s.val[0] = vcombine_s8(vld1_s8((const void *)(signs64 + ((aux32[0] >> 0) & 127))), vld1_s8((const void *)(signs64 + ((aux32[0] >> 7) & 127)))); + q3s.val[1] = vcombine_s8(vld1_s8((const void *)(signs64 + ((aux32[0] >> 14) & 127))), vld1_s8((const void *)(signs64 + ((aux32[0] >> 21) & 127)))); + q3s.val[2] = vcombine_s8(vld1_s8((const void *)(signs64 + ((aux32[1] >> 0) & 127))), vld1_s8((const void *)(signs64 + ((aux32[1] >> 7) & 127)))); + q3s.val[3] = vcombine_s8(vld1_s8((const void *)(signs64 + ((aux32[1] >> 14) & 127))), vld1_s8((const void *)(signs64 + ((aux32[1] >> 21) & 127)))); + q3s.val[0] = vmulq_s8(q3s.val[0], vreinterpretq_s8_u32(aux32x4_0)); + q3s.val[1] = vmulq_s8(q3s.val[1], vreinterpretq_s8_u32(aux32x4_1)); + q3s.val[2] = vmulq_s8(q3s.val[2], vreinterpretq_s8_u32(aux32x4_2)); + q3s.val[3] = vmulq_s8(q3s.val[3], vreinterpretq_s8_u32(aux32x4_3)); + const int32x4_t p1 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q3s.val[0], q8b.val[0]), q3s.val[1], q8b.val[1]); + const int32x4_t p2 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q3s.val[2], q8b.val[2]), q3s.val[3], q8b.val[3]); + sumf1 += vaddvq_s32(p1) * (0.5f + (aux32[0] >> 28)); + sumf2 += vaddvq_s32(p2) * (0.5f + (aux32[1] >> 28)); + } + sumf += d*(sumf1 + sumf2); + } + *s = 0.5f * sumf; + +#elif defined(__AVX2__) + + const uint64_t * signs64 = (const uint64_t *)keven_signs_q2xs; + + uint32_t aux32[2]; + + __m256 accumf = _mm256_setzero_ps(); + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint8_t * restrict q3 = x[i].qs; + const uint8_t * restrict gas = x[i].qs + QK_K/4; + const int8_t * restrict q8 = y[i].qs; + __m256i sumi1 = _mm256_setzero_si256(); + __m256i sumi2 = _mm256_setzero_si256(); + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + const __m256i q8_1 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; + const __m256i q8_2 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; + const __m256i q2_1 = _mm256_set_epi32(iq3xxs_grid[q3[7]], iq3xxs_grid[q3[6]], iq3xxs_grid[q3[5]], iq3xxs_grid[q3[4]], + iq3xxs_grid[q3[3]], iq3xxs_grid[q3[2]], iq3xxs_grid[q3[1]], iq3xxs_grid[q3[0]]); + q3 += 8; + const __m256i q2_2 = _mm256_set_epi32(iq3xxs_grid[q3[7]], iq3xxs_grid[q3[6]], iq3xxs_grid[q3[5]], iq3xxs_grid[q3[4]], + iq3xxs_grid[q3[3]], iq3xxs_grid[q3[2]], iq3xxs_grid[q3[1]], iq3xxs_grid[q3[0]]); + q3 += 8; + memcpy(aux32, gas, 8); gas += 8; + const __m256i s2_1 = _mm256_set_epi64x(signs64[(aux32[0] >> 21) & 127], signs64[(aux32[0] >> 14) & 127], + signs64[(aux32[0] >> 7) & 127], signs64[(aux32[0] >> 0) & 127]); + const __m256i s2_2 = _mm256_set_epi64x(signs64[(aux32[1] >> 21) & 127], signs64[(aux32[1] >> 14) & 127], + signs64[(aux32[1] >> 7) & 127], signs64[(aux32[1] >> 0) & 127]); + const __m256i q8s_1 = _mm256_sign_epi8(q8_1, s2_1); + const __m256i q8s_2 = _mm256_sign_epi8(q8_2, s2_2); + const __m256i dot1 = _mm256_maddubs_epi16(q2_1, q8s_1); + const __m256i dot2 = _mm256_maddubs_epi16(q2_2, q8s_2); + const uint16_t ls1 = aux32[0] >> 28; + const uint16_t ls2 = aux32[1] >> 28; + const __m256i p1 = _mm256_madd_epi16(dot1, _mm256_set1_epi16(2*ls1+1)); + const __m256i p2 = _mm256_madd_epi16(dot2, _mm256_set1_epi16(2*ls2+1)); + sumi1 = _mm256_add_epi32(sumi1, p1); + sumi2 = _mm256_add_epi32(sumi2, p2); + } + + accumf = _mm256_fmadd_ps(_mm256_set1_ps(d), _mm256_cvtepi32_ps(_mm256_add_epi32(sumi1, sumi2)), accumf); + + } + + *s = 0.25f * hsum_float_8(accumf); + +#elif defined(__AVX__) + const uint64_t * signs64 = (const uint64_t *)keven_signs_q2xs; + + uint32_t aux32[2]; + + __m256 accumf = _mm256_setzero_ps(); + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint8_t * restrict q3 = x[i].qs; + const uint8_t * restrict gas = x[i].qs + QK_K/4; + const int8_t * restrict q8 = y[i].qs; + __m128i sumi1_0 = _mm_setzero_si128(); + __m128i sumi1_1 = _mm_setzero_si128(); + __m128i sumi2_0 = _mm_setzero_si128(); + __m128i sumi2_1 = _mm_setzero_si128(); + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + const __m128i q8_1_0 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q8_1_1 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q8_2_0 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q8_2_1 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q2_1_0 = _mm_set_epi32(iq3xxs_grid[q3[3]], iq3xxs_grid[q3[2]], iq3xxs_grid[q3[1]], iq3xxs_grid[q3[0]]); + const __m128i q2_1_1 = _mm_set_epi32(iq3xxs_grid[q3[7]], iq3xxs_grid[q3[6]], iq3xxs_grid[q3[5]], iq3xxs_grid[q3[4]]); + q3 += 8; + const __m128i q2_2_0 = _mm_set_epi32(iq3xxs_grid[q3[3]], iq3xxs_grid[q3[2]], iq3xxs_grid[q3[1]], iq3xxs_grid[q3[0]]); + const __m128i q2_2_1 = _mm_set_epi32(iq3xxs_grid[q3[7]], iq3xxs_grid[q3[6]], iq3xxs_grid[q3[5]], iq3xxs_grid[q3[4]]); + q3 += 8; + memcpy(aux32, gas, 8); gas += 8; + const __m128i s2_1_0 = _mm_set_epi64x(signs64[(aux32[0] >> 7) & 127], signs64[(aux32[0] >> 0) & 127]); + const __m128i s2_1_1 = _mm_set_epi64x(signs64[(aux32[0] >> 21) & 127], signs64[(aux32[0] >> 14) & 127]); + const __m128i s2_2_0 = _mm_set_epi64x(signs64[(aux32[1] >> 7) & 127], signs64[(aux32[1] >> 0) & 127]); + const __m128i s2_2_1 = _mm_set_epi64x(signs64[(aux32[1] >> 21) & 127], signs64[(aux32[1] >> 14) & 127]); + const __m128i q8s_1_0 = _mm_sign_epi8(q8_1_0, s2_1_0); + const __m128i q8s_1_1 = _mm_sign_epi8(q8_1_1, s2_1_1); + const __m128i q8s_2_0 = _mm_sign_epi8(q8_2_0, s2_2_0); + const __m128i q8s_2_1 = _mm_sign_epi8(q8_2_1, s2_2_1); + const __m128i dot1_0 = _mm_maddubs_epi16(q2_1_0, q8s_1_0); + const __m128i dot1_1 = _mm_maddubs_epi16(q2_1_1, q8s_1_1); + const __m128i dot2_0 = _mm_maddubs_epi16(q2_2_0, q8s_2_0); + const __m128i dot2_1 = _mm_maddubs_epi16(q2_2_1, q8s_2_1); + const uint16_t ls1 = aux32[0] >> 28; + const uint16_t ls2 = aux32[1] >> 28; + const __m128i p1_0 = _mm_madd_epi16(dot1_0, _mm_set1_epi16(2*ls1+1)); + const __m128i p1_1 = _mm_madd_epi16(dot1_1, _mm_set1_epi16(2*ls1+1)); + const __m128i p2_0 = _mm_madd_epi16(dot2_0, _mm_set1_epi16(2*ls2+1)); + const __m128i p2_1 = _mm_madd_epi16(dot2_1, _mm_set1_epi16(2*ls2+1)); + sumi1_0 = _mm_add_epi32(sumi1_0, p1_0); + sumi1_1 = _mm_add_epi32(sumi1_1, p1_1); + sumi2_0 = _mm_add_epi32(sumi2_0, p2_0); + sumi2_1 = _mm_add_epi32(sumi2_1, p2_1); + } + + accumf = _mm256_add_ps(_mm256_mul_ps(_mm256_set1_ps(d), _mm256_cvtepi32_ps(MM256_SET_M128I(_mm_add_epi32(sumi1_1, sumi2_1), _mm_add_epi32(sumi1_0, sumi2_0)))), accumf); + + } + + *s = 0.25f * hsum_float_8(accumf); + +#elif defined(__POWER9_VECTOR__) + const uint64_t * signs64 = (const uint64_t *)keven_signs_q2xs; + + const vector int v0 = vec_splats((int32_t)0); + + vector float vsumf0 = vec_splats(0.0f); + vector float vsumf1 = vec_splats(0.0f); + vector float vsumf2 = vec_splats(0.0f); + vector float vsumf3 = vec_splats(0.0f); + + for (int i = 0; i < nb; ++i) { + vector float vxd = vec_splats(GGML_FP16_TO_FP32(x[i].d)); + vector float vyd = vec_splats(y[i].d); + vector float vd = vec_mul(vxd, vyd); + + vector signed int vsumi0 = v0; + vector signed int vsumi1 = v0; + vector signed int vsumi2 = v0; + vector signed int vsumi3 = v0; + + const uint8_t * restrict q3 = x[i].qs; + const uint32_t * restrict signs = (const uint32_t *)(x[i].qs + QK_K/4); + const int8_t * restrict q8 = y[i].qs; + +#pragma GCC unroll 1 + for (int j = 0; j < QK_K/32; j += 2) { + __builtin_prefetch(q3, 0, 1); + __builtin_prefetch(q8, 0, 1); + + vector unsigned int aux32x4_0 = {iq3xxs_grid[q3[ 0]], iq3xxs_grid[q3[ 1]], iq3xxs_grid[q3[ 2]], iq3xxs_grid[q3[ 3]]}; + vector unsigned int aux32x4_1 = {iq3xxs_grid[q3[ 4]], iq3xxs_grid[q3[ 5]], iq3xxs_grid[q3[ 6]], iq3xxs_grid[q3[ 7]]}; + vector unsigned int aux32x4_2 = {iq3xxs_grid[q3[ 8]], iq3xxs_grid[q3[ 9]], iq3xxs_grid[q3[10]], iq3xxs_grid[q3[11]]}; + vector unsigned int aux32x4_3 = {iq3xxs_grid[q3[12]], iq3xxs_grid[q3[13]], iq3xxs_grid[q3[14]], iq3xxs_grid[q3[15]]}; + q3 += 16; + + vector unsigned long long aux64x2_0 = {(uint64_t)(signs64[(signs[0] >> 0) & 127]), (uint64_t)(signs64[(signs[0] >> 7) & 127])}; + vector unsigned long long aux64x2_1 = {(uint64_t)(signs64[(signs[0] >> 14) & 127]), (uint64_t)(signs64[(signs[0] >> 21) & 127])}; + vector unsigned long long aux64x2_2 = {(uint64_t)(signs64[(signs[1] >> 0) & 127]), (uint64_t)(signs64[(signs[1] >> 7) & 127])}; + vector unsigned long long aux64x2_3 = {(uint64_t)(signs64[(signs[1] >> 14) & 127]), (uint64_t)(signs64[(signs[1] >> 21) & 127])}; + + vector signed char q3x0 = vec_mul((vector signed char)aux64x2_0, (vector signed char)aux32x4_0); + vector signed char q3x1 = vec_mul((vector signed char)aux64x2_1, (vector signed char)aux32x4_1); + vector signed char q3x2 = vec_mul((vector signed char)aux64x2_2, (vector signed char)aux32x4_2); + vector signed char q3x3 = vec_mul((vector signed char)aux64x2_3, (vector signed char)aux32x4_3); + + vector signed char q8y0 = vec_xl( 0, q8); + vector signed char q8y1 = vec_xl(16, q8); + vector signed char q8y2 = vec_xl(32, q8); + vector signed char q8y3 = vec_xl(48, q8); + q8 += 64; + + vector signed short qv0 = vec_add(vec_mule(q3x0, q8y0), vec_mulo(q3x0, q8y0)); + vector signed short qv1 = vec_add(vec_mule(q3x1, q8y1), vec_mulo(q3x1, q8y1)); + vector signed short qv2 = vec_add(vec_mule(q3x2, q8y2), vec_mulo(q3x2, q8y2)); + vector signed short qv3 = vec_add(vec_mule(q3x3, q8y3), vec_mulo(q3x3, q8y3)); + + const uint16_t ls0 = (uint16_t)(signs[0] >> 28); + const uint16_t ls1 = (uint16_t)(signs[1] >> 28); + signs += 2; + + vector signed short vscales01 = (vector signed short)vec_splats((uint16_t)(2*ls0+1)); + vector signed short vscales23 = (vector signed short)vec_splats((uint16_t)(2*ls1+1)); + + vsumi0 = vec_msum(qv0, vscales01, vsumi0); + vsumi1 = vec_msum(qv1, vscales01, vsumi1); + vsumi2 = vec_msum(qv2, vscales23, vsumi2); + vsumi3 = vec_msum(qv3, vscales23, vsumi3); + } + + vsumf0 = vec_madd(vec_ctf(vsumi0, 0), vd, vsumf0); + vsumf1 = vec_madd(vec_ctf(vsumi1, 0), vd, vsumf1); + vsumf2 = vec_madd(vec_ctf(vsumi2, 0), vd, vsumf2); + vsumf3 = vec_madd(vec_ctf(vsumi3, 0), vd, vsumf3); + } + + vsumf0 = vec_add(vsumf0, vsumf2); + vsumf1 = vec_add(vsumf1, vsumf3); + + vsumf0 = vec_add(vsumf0, vsumf1); + + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 4)); + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 8)); + + *s = 0.25f * vec_extract(vsumf0, 0); + +#elif defined(__loongarch_asx) + + const uint64_t * signs64 = (const uint64_t *)keven_signs_q2xs; + + uint32_t aux32[2]; + + __m256 accumf = (__m256)__lasx_xvldi(0); + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint8_t * restrict q3 = x[i].qs; + const uint8_t * restrict gas = x[i].qs + QK_K/4; + const int8_t * restrict q8 = y[i].qs; + __m256i sumi1 = __lasx_xvldi(0); + __m256i sumi2 = __lasx_xvldi(0); + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + const __m256i q8_1 = __lasx_xvld((const __m256i *)q8, 0); q8 += 32; + const __m256i q8_2 = __lasx_xvld((const __m256i *)q8, 0); q8 += 32; + const __m256i q2_1 = lasx_set_w(iq3xxs_grid[q3[7]], iq3xxs_grid[q3[6]], iq3xxs_grid[q3[5]], iq3xxs_grid[q3[4]], + iq3xxs_grid[q3[3]], iq3xxs_grid[q3[2]], iq3xxs_grid[q3[1]], iq3xxs_grid[q3[0]]); + q3 += 8; + const __m256i q2_2 = lasx_set_w(iq3xxs_grid[q3[7]], iq3xxs_grid[q3[6]], iq3xxs_grid[q3[5]], iq3xxs_grid[q3[4]], + iq3xxs_grid[q3[3]], iq3xxs_grid[q3[2]], iq3xxs_grid[q3[1]], iq3xxs_grid[q3[0]]); + q3 += 8; + memcpy(aux32, gas, 8); gas += 8; + + const __m256i s2_1 = lasx_set_d(signs64[(aux32[0] >> 21) & 127], signs64[(aux32[0] >> 14) & 127], + signs64[(aux32[0] >> 7) & 127], signs64[(aux32[0] >> 0) & 127]); + const __m256i s2_2 = lasx_set_d(signs64[(aux32[1] >> 21) & 127], signs64[(aux32[1] >> 14) & 127], + signs64[(aux32[1] >> 7) & 127], signs64[(aux32[1] >> 0) & 127]); + const __m256i q8s_1 = __lasx_xvsigncov_b(s2_1, q8_1); + const __m256i q8s_2 = __lasx_xvsigncov_b(s2_2, q8_2); + const __m256i dot1 = lasx_maddubs_h(q2_1, q8s_1); + const __m256i dot2 = lasx_maddubs_h(q2_2, q8s_2); + const uint16_t ls1 = aux32[0] >> 28; + const uint16_t ls2 = aux32[1] >> 28; + + const __m256i p1 = lasx_madd_h(dot1, __lasx_xvreplgr2vr_h(2*ls1+1)); + const __m256i p2 = lasx_madd_h(dot2, __lasx_xvreplgr2vr_h(2*ls2+1)); + sumi1 = __lasx_xvadd_w(sumi1, p1); + sumi2 = __lasx_xvadd_w(sumi2, p2); + } + + accumf = __lasx_xvfmadd_s(__lasx_xvreplfr2vr_s(d), __lasx_xvffint_s_w(__lasx_xvadd_w(sumi1, sumi2)), accumf); + } + + *s = 0.25f * hsum_float_8(accumf); + +#else + + uint32_t aux32; + + float sumf = 0.f; + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint8_t * restrict q3 = x[i].qs; + const uint8_t * restrict gas = x[i].qs + QK_K/4; + const int8_t * restrict q8 = y[i].qs; + int32_t bsum = 0; + for (int ib32 = 0; ib32 < QK_K/32; ++ib32) { + memcpy(&aux32, gas, sizeof(uint32_t)); gas += sizeof(uint32_t); + const uint32_t ls = 2*(aux32 >> 28) + 1; + int32_t sumi = 0; + for (int l = 0; l < 4; ++l) { + const uint8_t * grid1 = (const uint8_t *)(iq3xxs_grid + q3[2*l+0]); + const uint8_t * grid2 = (const uint8_t *)(iq3xxs_grid + q3[2*l+1]); + const uint8_t signs = ksigns_iq2xs[(aux32 >> 7*l) & 127]; + for (int j = 0; j < 4; ++j) { + sumi += grid1[j] * q8[j+0] * (signs & kmask_iq2xs[j+0] ? -1 : 1); + sumi += grid2[j] * q8[j+4] * (signs & kmask_iq2xs[j+4] ? -1 : 1); + } + q8 += 8; + } + q3 += 8; + bsum += sumi * ls; + } + sumf += d * bsum; + } + *s = 0.25f * sumf; +#endif +} + +void ggml_vec_dot_iq3_s_q8_K (int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { + assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + + const block_iq3_s * restrict x = vx; + const block_q8_K * restrict y = vy; + + const int nb = n / QK_K; + +#if defined(__ARM_NEON) + + typedef union { + uint16x8_t vec_index; + uint16_t index[8]; + } vec_index_t; + + static const uint8_t k_mask1[32] = {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, + 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03 + }; + + static const uint8_t k_mask2[16] = {0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80,}; + + static const int16_t k_shift[8] = {8, 7, 6, 5, 4, 3, 2, 1}; + + const ggml_uint8x16x2_t mask1 = ggml_vld1q_u8_x2(k_mask1); + const uint8x16_t mask2 = vld1q_u8(k_mask2); + + const int16x8_t hshift = vld1q_s16(k_shift); + const uint16x8_t m256 = vdupq_n_u16(256); + const uint8x16_t m1 = vdupq_n_u8(1); + + uint8x16x2_t vs; + ggml_int8x16x4_t q3s; + ggml_int8x16x4_t q8b; + vec_index_t idx; + + uint32_t scales32[2]; + const uint8_t * scales8 = (const uint8_t *)scales32; + + float sumf = 0; + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint8_t * restrict qs = x[i].qs; + const uint8_t * restrict qh = x[i].qh; + const uint16_t * restrict signs = (const uint16_t *)x[i].signs; + const int8_t * restrict q8 = y[i].qs; + + memcpy(scales32, x[i].scales, 4); + scales32[1] = (((scales32[0] >> 4) & 0x0f0f0f0f) << 1) | 0x01010101; + scales32[0] = ((scales32[0] & 0x0f0f0f0f) << 1) | 0x01010101; + + int sumi1 = 0, sumi2 = 0; + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + q8b = ggml_vld1q_s8_x4(q8); q8 += 64; + + const uint8x16_t idx_l = vld1q_u8(qs); qs += 16; + idx.vec_index = vorrq_u16(vmovl_u8(vget_low_u8 (idx_l)), vandq_u16(vshlq_u16(vdupq_n_u16(qh[ib32+0]), hshift), m256)); + const uint32x4_t aux32x4_0 = ggml_vld1q_u32(iq3s_grid[idx.index[0]], iq3s_grid[idx.index[1]], + iq3s_grid[idx.index[2]], iq3s_grid[idx.index[3]]); + const uint32x4_t aux32x4_1 = ggml_vld1q_u32(iq3s_grid[idx.index[4]], iq3s_grid[idx.index[5]], + iq3s_grid[idx.index[6]], iq3s_grid[idx.index[7]]); + idx.vec_index = vorrq_u16(vmovl_u8(vget_high_u8(idx_l)), vandq_u16(vshlq_u16(vdupq_n_u16(qh[ib32+1]), hshift), m256)); + const uint32x4_t aux32x4_2 = ggml_vld1q_u32(iq3s_grid[idx.index[0]], iq3s_grid[idx.index[1]], + iq3s_grid[idx.index[2]], iq3s_grid[idx.index[3]]); + const uint32x4_t aux32x4_3 = ggml_vld1q_u32(iq3s_grid[idx.index[4]], iq3s_grid[idx.index[5]], + iq3s_grid[idx.index[6]], iq3s_grid[idx.index[7]]); + + + vs.val[0] = vreinterpretq_u8_u32(vdupq_n_u32(signs[0] | ((uint32_t) signs[1] << 16))); + vs.val[1] = vandq_u8(ggml_vqtbl1q_u8(vs.val[0], mask1.val[1]), mask2); + vs.val[0] = vandq_u8(ggml_vqtbl1q_u8(vs.val[0], mask1.val[0]), mask2); + vs.val[0] = vorrq_u8(vceqq_u8(vs.val[0], mask2), m1); + vs.val[1] = vorrq_u8(vceqq_u8(vs.val[1], mask2), m1); + + q3s.val[0] = vmulq_s8(vreinterpretq_s8_u8(vs.val[0]), vreinterpretq_s8_u32(aux32x4_0)); + q3s.val[1] = vmulq_s8(vreinterpretq_s8_u8(vs.val[1]), vreinterpretq_s8_u32(aux32x4_1)); + + vs.val[0] = vreinterpretq_u8_u32(vdupq_n_u32(signs[2] | ((uint32_t) signs[3] << 16))); + vs.val[1] = vandq_u8(ggml_vqtbl1q_u8(vs.val[0], mask1.val[1]), mask2); + vs.val[0] = vandq_u8(ggml_vqtbl1q_u8(vs.val[0], mask1.val[0]), mask2); + vs.val[0] = vorrq_u8(vceqq_u8(vs.val[0], mask2), m1); + vs.val[1] = vorrq_u8(vceqq_u8(vs.val[1], mask2), m1); + + signs += 4; + + q3s.val[2] = vmulq_s8(vreinterpretq_s8_u8(vs.val[0]), vreinterpretq_s8_u32(aux32x4_2)); + q3s.val[3] = vmulq_s8(vreinterpretq_s8_u8(vs.val[1]), vreinterpretq_s8_u32(aux32x4_3)); + + const int32x4_t p1 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q3s.val[0], q8b.val[0]), q3s.val[1], q8b.val[1]); + const int32x4_t p2 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q3s.val[2], q8b.val[2]), q3s.val[3], q8b.val[3]); + + sumi1 += vaddvq_s32(p1) * scales8[ib32/2+0]; + sumi2 += vaddvq_s32(p2) * scales8[ib32/2+4]; + } + sumf += d*(sumi1 + sumi2); + } + *s = sumf; + +#elif defined(__AVX2__) + + static const uint8_t k_mask1[32] = {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, + 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03 + }; + + static const uint8_t k_mask2[32] = {0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, + 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, + }; + + const __m256i mask1 = _mm256_loadu_si256((const __m256i*)k_mask1); + const __m256i mask2 = _mm256_loadu_si256((const __m256i*)k_mask2); + + const __m256i idx_shift = _mm256_set_epi32(1, 2, 3, 4, 5, 6, 7, 8); + const __m256i idx_mask = _mm256_set1_epi32(256); + + typedef union { + __m256i vec[2]; + uint32_t index[16]; + } index_t; + + index_t idx; + + __m256 accumf = _mm256_setzero_ps(); + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint8_t * restrict qs = x[i].qs; + const uint8_t * restrict qh = x[i].qh; + const uint16_t * restrict signs = (const uint16_t *)x[i].signs; + const int8_t * restrict q8 = y[i].qs; + __m256i sumi1 = _mm256_setzero_si256(); + __m256i sumi2 = _mm256_setzero_si256(); + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + const __m256i q8_1 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; + const __m256i q8_2 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; + const __m256i idx_l = _mm256_cvtepu8_epi16(_mm_loadu_si128((const __m128i *)qs)); qs += 16; + idx.vec[0] = _mm256_set1_epi32(qh[ib32+0]); + idx.vec[1] = _mm256_set1_epi32(qh[ib32+1]); + idx.vec[0] = _mm256_and_si256(_mm256_sllv_epi32(idx.vec[0], idx_shift), idx_mask); + idx.vec[1] = _mm256_and_si256(_mm256_sllv_epi32(idx.vec[1], idx_shift), idx_mask); + idx.vec[0] = _mm256_or_si256(idx.vec[0], _mm256_cvtepi16_epi32(_mm256_castsi256_si128(idx_l))); + idx.vec[1] = _mm256_or_si256(idx.vec[1], _mm256_cvtepi16_epi32(_mm256_extractf128_si256(idx_l, 1))); + + // At leat on my CPU (Ryzen 7950X), using _mm256_i32gather_epi32 is slower than _mm256_set_epi32. Strange. + //const __m256i q2_1 = _mm256_i32gather_epi32((const int *)iq3s_grid, idx.vec[0], 4); + //const __m256i q2_2 = _mm256_i32gather_epi32((const int *)iq3s_grid, idx.vec[1], 4); + const __m256i q2_1 = _mm256_set_epi32( + iq3s_grid[idx.index[7]], iq3s_grid[idx.index[6]], iq3s_grid[idx.index[5]], iq3s_grid[idx.index[4]], + iq3s_grid[idx.index[3]], iq3s_grid[idx.index[2]], iq3s_grid[idx.index[1]], iq3s_grid[idx.index[0]] + ); + const __m256i q2_2 = _mm256_set_epi32( + iq3s_grid[idx.index[15]], iq3s_grid[idx.index[14]], iq3s_grid[idx.index[13]], iq3s_grid[idx.index[12]], + iq3s_grid[idx.index[11]], iq3s_grid[idx.index[10]], iq3s_grid[idx.index[ 9]], iq3s_grid[idx.index[ 8]] + ); + + __m256i aux256 = _mm256_set1_epi32(signs[0] | (signs[1] << 16)); + aux256 = _mm256_and_si256(_mm256_shuffle_epi8(aux256,mask1), mask2); + const __m256i s2_1 = _mm256_cmpeq_epi8(aux256, mask2); + const __m256i q8s_1 = _mm256_sub_epi8(_mm256_xor_si256(s2_1, q8_1), s2_1); + + aux256 = _mm256_set1_epi32(signs[2] | (signs[3] << 16)); + aux256 = _mm256_and_si256(_mm256_shuffle_epi8(aux256,mask1), mask2); + const __m256i s2_2 = _mm256_cmpeq_epi8(aux256, mask2); + const __m256i q8s_2 = _mm256_sub_epi8(_mm256_xor_si256(s2_2, q8_2), s2_2); + + signs += 4; + + const __m256i dot1 = _mm256_maddubs_epi16(q2_1, q8s_1); + const __m256i dot2 = _mm256_maddubs_epi16(q2_2, q8s_2); + const uint16_t ls1 = x[i].scales[ib32/2] & 0xf; + const uint16_t ls2 = x[i].scales[ib32/2] >> 4; + const __m256i p1 = _mm256_madd_epi16(dot1, _mm256_set1_epi16(2*ls1+1)); + const __m256i p2 = _mm256_madd_epi16(dot2, _mm256_set1_epi16(2*ls2+1)); + sumi1 = _mm256_add_epi32(sumi1, p1); + sumi2 = _mm256_add_epi32(sumi2, p2); + } + + accumf = _mm256_fmadd_ps(_mm256_set1_ps(d), _mm256_cvtepi32_ps(_mm256_add_epi32(sumi1, sumi2)), accumf); + + } + + *s = hsum_float_8(accumf); + +#elif defined(__AVX__) + static const uint8_t k_mask1[32] = {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, + 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03 + }; + + static const uint8_t k_mask2[32] = {0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, + 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, + }; + + const __m128i mask1_0 = _mm_loadu_si128((const __m128i*)k_mask1); + const __m128i mask1_1 = _mm_loadu_si128((const __m128i*)k_mask1 + 1); + const __m128i mask2_0 = _mm_loadu_si128((const __m128i*)k_mask2); + const __m128i mask2_1 = _mm_loadu_si128((const __m128i*)k_mask2 + 1); + + const __m128i idx_mul_0 = _mm_set_epi32(32, 64, 128, 256); + const __m128i idx_mul_1 = _mm_set_epi32(2, 4, 8, 16); + const __m128i idx_mask = _mm_set1_epi32(256); + + typedef union { + __m128i vec[4]; + uint32_t index[16]; + } index_t; + + index_t idx; + + __m256 accumf = _mm256_setzero_ps(); + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint8_t * restrict qs = x[i].qs; + const uint8_t * restrict qh = x[i].qh; + const uint16_t * restrict signs = (const uint16_t *)x[i].signs; + const int8_t * restrict q8 = y[i].qs; + __m128i sumi1_0 = _mm_setzero_si128(); + __m128i sumi1_1 = _mm_setzero_si128(); + __m128i sumi2_0 = _mm_setzero_si128(); + __m128i sumi2_1 = _mm_setzero_si128(); + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + const __m128i q8_1_0 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q8_1_1 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q8_2_0 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q8_2_1 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i qs_tmp = _mm_loadu_si128((const __m128i *)qs); + const __m128i idx_l_0 = _mm_cvtepu8_epi16(qs_tmp); + const __m128i idx_l_1 = _mm_cvtepu8_epi16(_mm_srli_si128(qs_tmp, 8)); qs += 16; + idx.vec[0] = _mm_set1_epi32(qh[ib32+0]); + idx.vec[1] = idx.vec[0]; + idx.vec[2] = _mm_set1_epi32(qh[ib32+1]); + idx.vec[3] = idx.vec[2]; + + idx.vec[0] = _mm_and_si128(_mm_mullo_epi32(idx.vec[0], idx_mul_0), idx_mask); + idx.vec[1] = _mm_and_si128(_mm_mullo_epi32(idx.vec[1], idx_mul_1), idx_mask); + idx.vec[2] = _mm_and_si128(_mm_mullo_epi32(idx.vec[2], idx_mul_0), idx_mask); + idx.vec[3] = _mm_and_si128(_mm_mullo_epi32(idx.vec[3], idx_mul_1), idx_mask); + + idx.vec[0] = _mm_or_si128(idx.vec[0], _mm_cvtepi16_epi32(idx_l_0)); + idx.vec[1] = _mm_or_si128(idx.vec[1], _mm_cvtepi16_epi32(_mm_srli_si128(idx_l_0, 8))); + idx.vec[2] = _mm_or_si128(idx.vec[2], _mm_cvtepi16_epi32(idx_l_1)); + idx.vec[3] = _mm_or_si128(idx.vec[3], _mm_cvtepi16_epi32(_mm_srli_si128(idx_l_1, 8))); + + const __m128i q2_1_0 = _mm_set_epi32(iq3s_grid[idx.index[3]], iq3s_grid[idx.index[2]], iq3s_grid[idx.index[1]], iq3s_grid[idx.index[0]]); + const __m128i q2_1_1 = _mm_set_epi32(iq3s_grid[idx.index[7]], iq3s_grid[idx.index[6]], iq3s_grid[idx.index[5]], iq3s_grid[idx.index[4]]); + const __m128i q2_2_0 = _mm_set_epi32(iq3s_grid[idx.index[11]], iq3s_grid[idx.index[10]], iq3s_grid[idx.index[9]], iq3s_grid[idx.index[8]]); + const __m128i q2_2_1 = _mm_set_epi32(iq3s_grid[idx.index[15]], iq3s_grid[idx.index[14]], iq3s_grid[idx.index[13]], iq3s_grid[idx.index[12]]); + + __m128i aux128_0 = _mm_set1_epi32(signs[0] | (signs[1] << 16)); + __m128i aux128_1 = aux128_0; + aux128_0 = _mm_and_si128(_mm_shuffle_epi8(aux128_0,mask1_0), mask2_0); + aux128_1 = _mm_and_si128(_mm_shuffle_epi8(aux128_1,mask1_1), mask2_1); + const __m128i s2_1_0 = _mm_cmpeq_epi8(aux128_0, mask2_0); + const __m128i s2_1_1 = _mm_cmpeq_epi8(aux128_1, mask2_1); + const __m128i q8s_1_0 = _mm_sub_epi8(_mm_xor_si128(s2_1_0, q8_1_0), s2_1_0); + const __m128i q8s_1_1 = _mm_sub_epi8(_mm_xor_si128(s2_1_1, q8_1_1), s2_1_1); + + aux128_0 = _mm_set1_epi32(signs[2] | (signs[3] << 16)); + aux128_1 = aux128_0; + aux128_0 = _mm_and_si128(_mm_shuffle_epi8(aux128_0,mask1_0), mask2_0); + aux128_1 = _mm_and_si128(_mm_shuffle_epi8(aux128_1,mask1_1), mask2_1); + const __m128i s2_2_0 = _mm_cmpeq_epi8(aux128_0, mask2_0); + const __m128i s2_2_1 = _mm_cmpeq_epi8(aux128_1, mask2_1); + const __m128i q8s_2_0 = _mm_sub_epi8(_mm_xor_si128(s2_2_0, q8_2_0), s2_2_0); + const __m128i q8s_2_1 = _mm_sub_epi8(_mm_xor_si128(s2_2_1, q8_2_1), s2_2_1); + + signs += 4; + + const __m128i dot1_0 = _mm_maddubs_epi16(q2_1_0, q8s_1_0); + const __m128i dot1_1 = _mm_maddubs_epi16(q2_1_1, q8s_1_1); + const __m128i dot2_0 = _mm_maddubs_epi16(q2_2_0, q8s_2_0); + const __m128i dot2_1 = _mm_maddubs_epi16(q2_2_1, q8s_2_1); + const uint16_t ls1 = x[i].scales[ib32/2] & 0xf; + const uint16_t ls2 = x[i].scales[ib32/2] >> 4; + const __m128i p1_0 = _mm_madd_epi16(dot1_0, _mm_set1_epi16(2*ls1+1)); + const __m128i p1_1 = _mm_madd_epi16(dot1_1, _mm_set1_epi16(2*ls1+1)); + const __m128i p2_0 = _mm_madd_epi16(dot2_0, _mm_set1_epi16(2*ls2+1)); + const __m128i p2_1 = _mm_madd_epi16(dot2_1, _mm_set1_epi16(2*ls2+1)); + sumi1_0 = _mm_add_epi32(sumi1_0, p1_0); + sumi1_1 = _mm_add_epi32(sumi1_1, p1_1); + sumi2_0 = _mm_add_epi32(sumi2_0, p2_0); + sumi2_1 = _mm_add_epi32(sumi2_1, p2_1); + } + + accumf = _mm256_add_ps(_mm256_mul_ps(_mm256_set1_ps(d), _mm256_cvtepi32_ps(MM256_SET_M128I(_mm_add_epi32(sumi1_1, sumi2_1), _mm_add_epi32(sumi1_0, sumi2_0)))), accumf); + + } + + *s = hsum_float_8(accumf); + +#elif defined(__POWER9_VECTOR__) + static const uint8_t k_mask1[32] = {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, + 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03 + }; + + static const uint8_t k_mask2[16] = {0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80,}; + + const vector int v0 = vec_splats((int32_t)0); + + vector float vsumf0 = vec_splats(0.0f); + vector float vsumf1 = vec_splats(0.0f); + vector float vsumf2 = vec_splats(0.0f); + vector float vsumf3 = vec_splats(0.0f); + + const vector unsigned char mask0 = vec_xl( 0, k_mask1); + const vector unsigned char mask1 = vec_xl(16, k_mask1); + const vector signed char mask2 = (vector signed char)vec_xl( 0, k_mask2); + + for (int i = 0; i < nb; ++i) { + vector float vxd = vec_splats(GGML_FP16_TO_FP32(x[i].d)); + vector float vyd = vec_splats(y[i].d); + vector float vd = vec_mul(vxd, vyd); + + const uint8_t * restrict q3 = x[i].qs; + const uint8_t * restrict qh = x[i].qh; + const uint16_t * restrict signs = (const uint16_t *)(x[i].signs); + const uint8_t * restrict sc = x[i].scales; + const int8_t * restrict q8 = y[i].qs; + + vector signed int vsumi0 = v0; + vector signed int vsumi1 = v0; + vector signed int vsumi2 = v0; + vector signed int vsumi3 = v0; + + for (int j = 0; j < QK_K/32; j += 2) { + __builtin_prefetch(q3, 0, 1); + __builtin_prefetch(q8, 0, 1); + + vector unsigned int aux32x4_0 = {iq3s_grid[q3[ 0] | ((qh[0] << 8) & 256)], iq3s_grid[q3[ 1] | ((qh[0] << 7) & 256)], + iq3s_grid[q3[ 2] | ((qh[0] << 6) & 256)], iq3s_grid[q3[ 3] | ((qh[0] << 5) & 256)]}; + vector unsigned int aux32x4_1 = {iq3s_grid[q3[ 4] | ((qh[0] << 4) & 256)], iq3s_grid[q3[ 5] | ((qh[0] << 3) & 256)], + iq3s_grid[q3[ 6] | ((qh[0] << 2) & 256)], iq3s_grid[q3[ 7] | ((qh[0] << 1) & 256)]}; + vector unsigned int aux32x4_2 = {iq3s_grid[q3[ 8] | ((qh[1] << 8) & 256)], iq3s_grid[q3[ 9] | ((qh[1] << 7) & 256)], + iq3s_grid[q3[10] | ((qh[1] << 6) & 256)], iq3s_grid[q3[11] | ((qh[1] << 5) & 256)]}; + vector unsigned int aux32x4_3 = {iq3s_grid[q3[12] | ((qh[1] << 4) & 256)], iq3s_grid[q3[13] | ((qh[1] << 3) & 256)], + iq3s_grid[q3[14] | ((qh[1] << 2) & 256)], iq3s_grid[q3[15] | ((qh[1] << 1) & 256)]}; + q3 += 16; + qh += 2; + + vector signed char vsigns01 = (vector signed char)vec_splats(*(const uint32_t *)&signs[0]); + vector signed char vsigns02 = (vector signed char)vec_splats(*(const uint32_t *)&signs[2]); + signs += 4; + + vector signed char vsigns0 = vec_perm(vsigns01, vsigns01, mask0); + vector signed char vsigns1 = vec_perm(vsigns01, vsigns01, mask1); + vector signed char vsigns2 = vec_perm(vsigns02, vsigns02, mask0); + vector signed char vsigns3 = vec_perm(vsigns02, vsigns02, mask1); + + vsigns0 = (vector signed char)vec_cmpeq(vec_and(vsigns0, mask2), mask2); + vsigns1 = (vector signed char)vec_cmpeq(vec_and(vsigns1, mask2), mask2); + vsigns2 = (vector signed char)vec_cmpeq(vec_and(vsigns2, mask2), mask2); + vsigns3 = (vector signed char)vec_cmpeq(vec_and(vsigns3, mask2), mask2); + + vector signed char q3x0 = vec_sub(vec_xor(vsigns0, (vector signed char)aux32x4_0), vsigns0); + vector signed char q3x1 = vec_sub(vec_xor(vsigns1, (vector signed char)aux32x4_1), vsigns1); + vector signed char q3x2 = vec_sub(vec_xor(vsigns2, (vector signed char)aux32x4_2), vsigns2); + vector signed char q3x3 = vec_sub(vec_xor(vsigns3, (vector signed char)aux32x4_3), vsigns3); + + vector signed char q8y0 = vec_xl( 0, q8); + vector signed char q8y1 = vec_xl(16, q8); + vector signed char q8y2 = vec_xl(32, q8); + vector signed char q8y3 = vec_xl(48, q8); + q8 += 64; + + vector signed short qv0 = vec_add(vec_mule(q3x0, q8y0), vec_mulo(q3x0, q8y0)); + vector signed short qv1 = vec_add(vec_mule(q3x1, q8y1), vec_mulo(q3x1, q8y1)); + vector signed short qv2 = vec_add(vec_mule(q3x2, q8y2), vec_mulo(q3x2, q8y2)); + vector signed short qv3 = vec_add(vec_mule(q3x3, q8y3), vec_mulo(q3x3, q8y3)); + + const uint16_t ls0 = (uint16_t)(sc[0] & 0xf); + const uint16_t ls1 = (uint16_t)(sc[0] >> 4); + sc ++; + + vector signed short vscales01 = (vector signed short)vec_splats((uint16_t)(2*ls0+1)); + vector signed short vscales23 = (vector signed short)vec_splats((uint16_t)(2*ls1+1)); + + vsumi0 = vec_msum(qv0, vscales01, vsumi0); + vsumi1 = vec_msum(qv1, vscales01, vsumi1); + vsumi2 = vec_msum(qv2, vscales23, vsumi2); + vsumi3 = vec_msum(qv3, vscales23, vsumi3); + } + + vsumf0 = vec_madd(vec_ctf(vsumi0, 0), vd, vsumf0); + vsumf1 = vec_madd(vec_ctf(vsumi1, 0), vd, vsumf1); + vsumf2 = vec_madd(vec_ctf(vsumi2, 0), vd, vsumf2); + vsumf3 = vec_madd(vec_ctf(vsumi3, 0), vd, vsumf3); + } + + vsumf0 = vec_add(vsumf0, vsumf2); + vsumf1 = vec_add(vsumf1, vsumf3); + + vsumf0 = vec_add(vsumf0, vsumf1); + + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 4)); + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 8)); + + *s = vec_extract(vsumf0, 0); + +#elif defined(__loongarch_asx) + + static const uint8_t k_mask1[32] = {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, + 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03 + }; + + static const uint8_t k_mask2[32] = {0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, + 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, + }; + + const __m256i mask1 = __lasx_xvld((const __m256i*)k_mask1, 0); + const __m256i mask2 = __lasx_xvld((const __m256i*)k_mask2, 0); + + __m256i idx_shift = lasx_set_w(1, 2, 3, 4, 5, 6, 7, 8); + const __m256i idx_mask = __lasx_xvreplgr2vr_w(256); + + typedef union { + __m256i vec[2]; + uint32_t index[16]; + } index_t; + + index_t idx; + + __m256 accumf = (__m256)__lasx_xvldi(0); + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint8_t * restrict qs = x[i].qs; + const uint8_t * restrict qh = x[i].qh; + const uint16_t * restrict signs = (const uint16_t *)x[i].signs; + const int8_t * restrict q8 = y[i].qs; + __m256i sumi1 = __lasx_xvldi(0); + __m256i sumi2 = __lasx_xvldi(0); + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + const __m256i q8_1 = __lasx_xvld((const __m256i *)q8, 0); q8 += 32; + const __m256i q8_2 = __lasx_xvld((const __m256i *)q8, 0); q8 += 32; + const __m256i idx_l = lasx_extu8_16(__lsx_vld(qs, 0)); qs += 16; + idx.vec[0] = __lasx_xvreplgr2vr_w(qh[ib32+0]); + idx.vec[1] = __lasx_xvreplgr2vr_w(qh[ib32+1]); + idx.vec[0] = __lasx_xvand_v(__lasx_xvsll_w(idx.vec[0], idx_shift), idx_mask); + idx.vec[1] = __lasx_xvand_v(__lasx_xvsll_w(idx.vec[1], idx_shift), idx_mask); + idx.vec[0] = __lasx_xvor_v(idx.vec[0], lasx_ext16_32(lasx_extracti128(idx_l, 0))); + idx.vec[1] = __lasx_xvor_v(idx.vec[1], lasx_ext16_32(lasx_extracti128(idx_l, 1))); + + // At leat on my CPU (Ryzen 7950X), using _mm256_i32gather_epi32 is slower than _mm256_set_epi32. Strange. + //const __m256i q2_1 = _mm256_i32gather_epi32((const int *)iq3s_grid, idx.vec[0], 4); + //const __m256i q2_2 = _mm256_i32gather_epi32((const int *)iq3s_grid, idx.vec[1], 4); + const __m256i q2_1 = lasx_set_w( + iq3s_grid[idx.index[7]], iq3s_grid[idx.index[6]], iq3s_grid[idx.index[5]], iq3s_grid[idx.index[4]], + iq3s_grid[idx.index[3]], iq3s_grid[idx.index[2]], iq3s_grid[idx.index[1]], iq3s_grid[idx.index[0]] + ); + const __m256i q2_2 = lasx_set_w( + iq3s_grid[idx.index[15]], iq3s_grid[idx.index[14]], iq3s_grid[idx.index[13]], iq3s_grid[idx.index[12]], + iq3s_grid[idx.index[11]], iq3s_grid[idx.index[10]], iq3s_grid[idx.index[ 9]], iq3s_grid[idx.index[ 8]] + ); + + __m256i aux256 = __lasx_xvreplgr2vr_w(signs[0] | (signs[1] << 16)); + aux256 = __lasx_xvand_v(lasx_shuffle_b(aux256,mask1), mask2); + const __m256i s2_1 = __lasx_xvseq_b(aux256, mask2); + const __m256i q8s_1 = __lasx_xvsub_b(__lasx_xvxor_v(s2_1, q8_1), s2_1); + + aux256 = __lasx_xvreplgr2vr_w(signs[2] | (signs[3] << 16)); + aux256 = __lasx_xvand_v(lasx_shuffle_b(aux256,mask1), mask2); + const __m256i s2_2 = __lasx_xvseq_b(aux256, mask2); + const __m256i q8s_2 = __lasx_xvsub_b(__lasx_xvxor_v(s2_2, q8_2), s2_2); + + signs += 4; + + const __m256i dot1 = lasx_maddubs_h(q2_1, q8s_1); + const __m256i dot2 = lasx_maddubs_h(q2_2, q8s_2); + const uint16_t ls1 = x[i].scales[ib32/2] & 0xf; + const uint16_t ls2 = x[i].scales[ib32/2] >> 4; + const __m256i p1 = lasx_madd_h(dot1, __lasx_xvreplgr2vr_h(2*ls1+1)); + const __m256i p2 = lasx_madd_h(dot2, __lasx_xvreplgr2vr_h(2*ls2+1)); + sumi1 = __lasx_xvadd_w(sumi1, p1); + sumi2 = __lasx_xvadd_w(sumi2, p2); + } + + accumf = __lasx_xvfmadd_s(__lasx_xvreplfr2vr_s(d), __lasx_xvffint_s_w(__lasx_xvadd_w(sumi1, sumi2)), accumf); + } + + *s = hsum_float_8(accumf); + +#else + + float sumf = 0.f; + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint8_t * restrict qs = x[i].qs; + const uint8_t * restrict qh = x[i].qh; + const uint8_t * restrict signs = x[i].signs; + const int8_t * restrict q8 = y[i].qs; + int32_t bsum = 0; + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + const uint32_t ls1 = 2*(x[i].scales[ib32/2] & 0xf) + 1; + const uint32_t ls2 = 2*(x[i].scales[ib32/2] >> 4) + 1; + int32_t sumi = 0; + for (int l = 0; l < 4; ++l) { + const uint8_t * grid1 = (const uint8_t *)(iq3s_grid + (qs[2*l+0] | ((qh[ib32+0] << (8-2*l)) & 256))); + const uint8_t * grid2 = (const uint8_t *)(iq3s_grid + (qs[2*l+1] | ((qh[ib32+0] << (7-2*l)) & 256))); + for (int j = 0; j < 4; ++j) { + sumi += grid1[j] * q8[j+0] * (signs[l] & kmask_iq2xs[j+0] ? -1 : 1); + sumi += grid2[j] * q8[j+4] * (signs[l] & kmask_iq2xs[j+4] ? -1 : 1); + } + q8 += 8; + } + qs += 8; + signs += 4; + bsum += sumi * ls1; + sumi = 0; + for (int l = 0; l < 4; ++l) { + const uint8_t * grid1 = (const uint8_t *)(iq3s_grid + (qs[2*l+0] | ((qh[ib32+1] << (8-2*l)) & 256))); + const uint8_t * grid2 = (const uint8_t *)(iq3s_grid + (qs[2*l+1] | ((qh[ib32+1] << (7-2*l)) & 256))); + for (int j = 0; j < 4; ++j) { + sumi += grid1[j] * q8[j+0] * (signs[l] & kmask_iq2xs[j+0] ? -1 : 1); + sumi += grid2[j] * q8[j+4] * (signs[l] & kmask_iq2xs[j+4] ? -1 : 1); + } + q8 += 8; + } + qs += 8; + signs += 4; + bsum += sumi * ls2; + } + sumf += d * bsum; + } + *s = sumf; +#endif +} + +#if defined(__AVX2__) +static inline __m256i mul_add_epi8(const __m256i x, const __m256i y) { + const __m256i ax = _mm256_sign_epi8(x, x); + const __m256i sy = _mm256_sign_epi8(y, x); + return _mm256_maddubs_epi16(ax, sy); +} +#elif defined(__loongarch_asx) +static inline __m256i mul_add_epi8(const __m256i x, const __m256i y) { + const __m256i ax = __lasx_xvsigncov_b(x, x); + const __m256i sy = __lasx_xvsigncov_b(x, y); + __m256i tmp1, tmp2, tmp3; + tmp1 = __lasx_xvmulwev_h_bu_b(ax, sy); + tmp2 = __lasx_xvmulwod_h_bu_b(ax, sy); + tmp3 = __lasx_xvadd_h(tmp1, tmp2); + return __lasx_xvsat_h(tmp3, 15); +} +#endif + +void ggml_vec_dot_iq1_s_q8_K (int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { + assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + + const block_iq1_s * restrict x = vx; + const block_q8_K * restrict y = vy; + + const int nb = n / QK_K; + +#if defined __ARM_NEON + + ggml_int8x16x4_t q1b; + ggml_int8x16x4_t q8b; + + float sumf = 0; + for (int i = 0; i < nb; ++i) { + + const int8_t * q8 = y[i].qs; + const uint8_t * qs = x[i].qs; + const uint16_t * qh = x[i].qh; + + int sumi1 = 0, sumi2 = 0, sumi3 = 0; + + for (int ib = 0; ib < QK_K/32; ib += 2) { + + q1b.val[0] = vcombine_s8(vld1_s8((const int8_t *)(iq1s_grid + (qs[0] | ((qh[ib+0] << 8) & 0x700)))), + vld1_s8((const int8_t *)(iq1s_grid + (qs[1] | ((qh[ib+0] << 5) & 0x700))))); + q1b.val[1] = vcombine_s8(vld1_s8((const int8_t *)(iq1s_grid + (qs[2] | ((qh[ib+0] << 2) & 0x700)))), + vld1_s8((const int8_t *)(iq1s_grid + (qs[3] | ((qh[ib+0] >> 1) & 0x700))))); + q1b.val[2] = vcombine_s8(vld1_s8((const int8_t *)(iq1s_grid + (qs[4] | ((qh[ib+1] << 8) & 0x700)))), + vld1_s8((const int8_t *)(iq1s_grid + (qs[5] | ((qh[ib+1] << 5) & 0x700))))); + q1b.val[3] = vcombine_s8(vld1_s8((const int8_t *)(iq1s_grid + (qs[6] | ((qh[ib+1] << 2) & 0x700)))), + vld1_s8((const int8_t *)(iq1s_grid + (qs[7] | ((qh[ib+1] >> 1) & 0x700))))); + qs += 8; + + q8b = ggml_vld1q_s8_x4(q8); q8 += 64; + + const int32x4_t p1 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q1b.val[0], q8b.val[0]), q1b.val[1], q8b.val[1]); + const int32x4_t p2 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q1b.val[2], q8b.val[2]), q1b.val[3], q8b.val[3]); + + const int ls1 = 2*((qh[ib+0] >> 12) & 7) + 1; + const int ls2 = 2*((qh[ib+1] >> 12) & 7) + 1; + sumi1 += vaddvq_s32(p1) * ls1; + sumi2 += vaddvq_s32(p2) * ls2; + sumi3 += (y[i].bsums[2*ib+0] + y[i].bsums[2*ib+1]) * ls1 * (qh[ib+0] & 0x8000 ? -1 : 1) + + (y[i].bsums[2*ib+2] + y[i].bsums[2*ib+3]) * ls2 * (qh[ib+1] & 0x8000 ? -1 : 1); + + } + + sumf += y[i].d * GGML_FP16_TO_FP32(x[i].d) * (sumi1 + sumi2 + IQ1S_DELTA * sumi3); + } + + *s = sumf; + +#elif defined __AVX2__ + + __m256 accum = _mm256_setzero_ps(); + float accum1 = 0; + for (int i = 0; i < nb; ++i) { + + const int8_t * q8 = y[i].qs; + const uint8_t * qs = x[i].qs; + const uint16_t * qh = x[i].qh; + + __m256i sumi = _mm256_setzero_si256(); + int sumi1 = 0; + for (int ib = 0; ib < QK_K/32; ib += 2) { + const __m256i q1b_1 = _mm256_set_epi64x(iq1s_grid[qs[3] | ((qh[ib+0] >> 1) & 0x700)], iq1s_grid[qs[2] | ((qh[ib+0] << 2) & 0x700)], + iq1s_grid[qs[1] | ((qh[ib+0] << 5) & 0x700)], iq1s_grid[qs[0] | ((qh[ib+0] << 8) & 0x700)]); + const __m256i q1b_2 = _mm256_set_epi64x(iq1s_grid[qs[7] | ((qh[ib+1] >> 1) & 0x700)], iq1s_grid[qs[6] | ((qh[ib+1] << 2) & 0x700)], + iq1s_grid[qs[5] | ((qh[ib+1] << 5) & 0x700)], iq1s_grid[qs[4] | ((qh[ib+1] << 8) & 0x700)]); + qs += 8; + const __m256i q8b_1 = _mm256_loadu_si256((const __m256i*)q8); q8 += 32; + const __m256i q8b_2 = _mm256_loadu_si256((const __m256i*)q8); q8 += 32; + + const __m256i dot1 = mul_add_epi8(q1b_1, q8b_1); + const __m256i dot2 = mul_add_epi8(q1b_2, q8b_2); + const int16_t ls1 = 2*((qh[ib+0] >> 12) & 7) + 1; + const int16_t ls2 = 2*((qh[ib+1] >> 12) & 7) + 1; + const __m256i p1 = _mm256_madd_epi16(dot1, _mm256_set1_epi16(ls1)); + const __m256i p2 = _mm256_madd_epi16(dot2, _mm256_set1_epi16(ls2)); + + sumi = _mm256_add_epi32(sumi, _mm256_add_epi32(p1, p2)); + sumi1 += (y[i].bsums[2*ib+0] + y[i].bsums[2*ib+1]) * (qh[ib+0] & 0x8000 ? -1 : 1) * ls1 + + (y[i].bsums[2*ib+2] + y[i].bsums[2*ib+3]) * (qh[ib+1] & 0x8000 ? -1 : 1) * ls2; + } + + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + accum = _mm256_fmadd_ps(_mm256_set1_ps(d), _mm256_cvtepi32_ps(sumi), accum); + accum1 += d * sumi1; + + } + + *s = hsum_float_8(accum) + IQ1S_DELTA * accum1; + +#elif defined __AVX__ + __m256 accum = _mm256_setzero_ps(); + float accum1 = 0; + for (int i = 0; i < nb; ++i) { + + const int8_t * q8 = y[i].qs; + const uint8_t * qs = x[i].qs; + const uint16_t * qh = x[i].qh; + + __m128i sumi1_0 = _mm_setzero_si128(); + __m128i sumi1_1 = _mm_setzero_si128(); + int sumi1 = 0; + for (int ib = 0; ib < QK_K/32; ib += 2) { + const __m128i q1b_1_0 = _mm_set_epi64x(iq1s_grid[qs[1] | ((qh[ib+0] << 5) & 0x700)], iq1s_grid[qs[0] | ((qh[ib+0] << 8) & 0x700)]); + const __m128i q1b_1_1 = _mm_set_epi64x(iq1s_grid[qs[3] | ((qh[ib+0] >> 1) & 0x700)], iq1s_grid[qs[2] | ((qh[ib+0] << 2) & 0x700)]); + const __m128i q1b_2_0 = _mm_set_epi64x(iq1s_grid[qs[5] | ((qh[ib+1] << 5) & 0x700)], iq1s_grid[qs[4] | ((qh[ib+1] << 8) & 0x700)]); + const __m128i q1b_2_1 = _mm_set_epi64x(iq1s_grid[qs[7] | ((qh[ib+1] >> 1) & 0x700)], iq1s_grid[qs[6] | ((qh[ib+1] << 2) & 0x700)]); + qs += 8; + const __m128i q8b_1_0 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q8b_1_1 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q8b_2_0 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q8b_2_1 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + + const __m128i dot1_0 = mul_add_epi8_sse(q1b_1_0, q8b_1_0); + const __m128i dot1_1 = mul_add_epi8_sse(q1b_1_1, q8b_1_1); + const __m128i dot2_0 = mul_add_epi8_sse(q1b_2_0, q8b_2_0); + const __m128i dot2_1 = mul_add_epi8_sse(q1b_2_1, q8b_2_1); + const int16_t ls1 = 2*((qh[ib+0] >> 12) & 7) + 1; + const int16_t ls2 = 2*((qh[ib+1] >> 12) & 7) + 1; + const __m128i p1_0 = _mm_madd_epi16(dot1_0, _mm_set1_epi16(ls1)); + const __m128i p1_1 = _mm_madd_epi16(dot1_1, _mm_set1_epi16(ls1)); + const __m128i p2_0 = _mm_madd_epi16(dot2_0, _mm_set1_epi16(ls2)); + const __m128i p2_1 = _mm_madd_epi16(dot2_1, _mm_set1_epi16(ls2)); + + sumi1_0 = _mm_add_epi32(sumi1_0, _mm_add_epi32(p1_0, p2_0)); + sumi1_1 = _mm_add_epi32(sumi1_1, _mm_add_epi32(p1_1, p2_1)); + sumi1 += (y[i].bsums[2*ib+0] + y[i].bsums[2*ib+1]) * (qh[ib+0] & 0x8000 ? -1 : 1) * ls1 + + (y[i].bsums[2*ib+2] + y[i].bsums[2*ib+3]) * (qh[ib+1] & 0x8000 ? -1 : 1) * ls2; + } + + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + accum = _mm256_add_ps(_mm256_mul_ps(_mm256_set1_ps(d), _mm256_cvtepi32_ps(MM256_SET_M128I(sumi1_1, sumi1_0))), accum); + accum1 += d * sumi1; + + } + + *s = hsum_float_8(accum) + IQ1S_DELTA * accum1; + +#elif defined(__POWER9_VECTOR__) + const vector unsigned char v0 = vec_splats((unsigned char)0x0); + const vector unsigned short vsign = vec_splats((unsigned short)0x8000); + + vector float vsumf0 = vec_splats(0.0f); + vector float vsumf1 = vec_splats(0.0f); + vector float vsumf2 = vec_splats(0.0f); + vector float vsumf3 = vec_splats(0.0f); + + for (int i = 0; i < nb; ++i) { + vector float vxd = vec_splats(GGML_FP16_TO_FP32(x[i].d)); + vector float vyd = vec_splats(y[i].d); + vector float vd = vec_mul(vxd, vyd); + + vector signed int vsumi0 = vec_splats((int32_t)0); + vector signed int vsumi1 = vec_splats((int32_t)0); + vector signed int vsumi2 = vec_splats((int32_t)0); + vector signed int vsumi3 = vec_splats((int32_t)0); + vector signed int vsumi8 = vec_splats((int32_t)0); + + const uint8_t * restrict q1 = x[i].qs; + const uint16_t * restrict qh = x[i].qh; + const int8_t * restrict q8 = y[i].qs; + const int16_t * restrict qs = y[i].bsums; + + for (int j = 0; j < QK_K/32; j += 2) { + __builtin_prefetch(q1, 0, 1); + __builtin_prefetch(qh, 0, 1); + __builtin_prefetch(q8, 0, 1); + + vector signed long long aux64x2_0 = {*(const int64_t *)(iq1s_grid + (q1[0] | ((qh[0] << 8) & 0x700))), *(const int64_t *)(iq1s_grid + (q1[1] | ((qh[0] << 5) & 0x700)))}; + vector signed long long aux64x2_1 = {*(const int64_t *)(iq1s_grid + (q1[2] | ((qh[0] << 2) & 0x700))), *(const int64_t *)(iq1s_grid + (q1[3] | ((qh[0] >> 1) & 0x700)))}; + vector signed long long aux64x2_2 = {*(const int64_t *)(iq1s_grid + (q1[4] | ((qh[1] << 8) & 0x700))), *(const int64_t *)(iq1s_grid + (q1[5] | ((qh[1] << 5) & 0x700)))}; + vector signed long long aux64x2_3 = {*(const int64_t *)(iq1s_grid + (q1[6] | ((qh[1] << 2) & 0x700))), *(const int64_t *)(iq1s_grid + (q1[7] | ((qh[1] >> 1) & 0x700)))}; + q1 += 8; + + vector signed char q1x0 = (vector signed char)aux64x2_0; + vector signed char q1x1 = (vector signed char)aux64x2_1; + vector signed char q1x2 = (vector signed char)aux64x2_2; + vector signed char q1x3 = (vector signed char)aux64x2_3; + + vector signed char q8y0 = vec_xl( 0, q8); + vector signed char q8y1 = vec_xl(16, q8); + vector signed char q8y2 = vec_xl(32, q8); + vector signed char q8y3 = vec_xl(48, q8); + q8 += 64; + + vector signed short qv0 = vec_add(vec_mule(q1x0, q8y0), vec_mulo(q1x0, q8y0)); + vector signed short qv1 = vec_add(vec_mule(q1x1, q8y1), vec_mulo(q1x1, q8y1)); + vector signed short qv2 = vec_add(vec_mule(q1x2, q8y2), vec_mulo(q1x2, q8y2)); + vector signed short qv3 = vec_add(vec_mule(q1x3, q8y3), vec_mulo(q1x3, q8y3)); + + const uint16_t ls0 = (uint16_t)((qh[0] >> 12) & 7); + const uint16_t ls1 = (uint16_t)((qh[1] >> 12) & 7); + + vector signed short vscales01 = (vector signed short)vec_splats((uint16_t)(2*ls0+1)); + vector signed short vscales23 = (vector signed short)vec_splats((uint16_t)(2*ls1+1)); + vector signed short vscales = vec_sld(vscales23, vscales01, 8); + + vsumi0 = vec_msum(qv0, vscales01, vsumi0); + vsumi1 = vec_msum(qv1, vscales01, vsumi1); + vsumi2 = vec_msum(qv2, vscales23, vsumi2); + vsumi3 = vec_msum(qv3, vscales23, vsumi3); + + vector signed short q8ysums = vec_xl_len(qs, 8); + qs += 4; + q8ysums = vec_mergeh(q8ysums, (vector signed short)v0); + + vector signed short qxh = (vector signed short)vec_sld(vec_splats(qh[1]), vec_splats(qh[0]), 8); + qh += 2; + vector __bool short vsel = vec_cmpge(qxh, (vector signed short)v0); + + vector signed short q8ysum = vec_sel((vector signed short)vec_xor((vector unsigned short)q8ysums, vsign), q8ysums, vsel); + + vsumi8 = vec_add(vec_mule(q8ysum, vscales), vsumi8); + } + + vsumf0 = vec_madd(vec_ctf(vsumi0, 0), vd, vsumf0); + vsumf1 = vec_madd(vec_ctf(vsumi1, 0), vd, vsumf1); + vsumf2 = vec_madd(vec_ctf(vsumi2, 0), vd, vsumf2); + vsumf3 = vec_madd(vec_ctf(vsumi3, 0), vd, vsumf3); + + vsumf0 = vec_madd(vec_ctf(vsumi8, 0), vec_mul(vd, vec_splats(IQ1S_DELTA)), vsumf0); + } + + vsumf0 = vec_add(vsumf0, vsumf2); + vsumf1 = vec_add(vsumf1, vsumf3); + + vsumf0 = vec_add(vsumf0, vsumf1); + + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 4)); + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 8)); + + *s = vec_extract(vsumf0, 0); + +#elif defined(__loongarch_asx) + + __m256 accum = (__m256)__lasx_xvldi(0); + float accum1 = 0; + for (int i = 0; i < nb; ++i) { + + const int8_t * q8 = y[i].qs; + const uint8_t * qs = x[i].qs; + const uint16_t * qh = x[i].qh; + + __m256i sumi = __lasx_xvldi(0); + int sumi1 = 0; + for (int ib = 0; ib < QK_K/32; ib += 2) { + __m256i q1b_1 = __lasx_xvinsgr2vr_d(q1b_1, iq1s_grid[qs[0] | ((qh[ib+0] << 8) & 0x700)], 0); + q1b_1 = __lasx_xvinsgr2vr_d(q1b_1, iq1s_grid[qs[1] | ((qh[ib+0] << 5) & 0x700)], 1); + q1b_1 = __lasx_xvinsgr2vr_d(q1b_1, iq1s_grid[qs[2] | ((qh[ib+0] << 2) & 0x700)], 2); + q1b_1 = __lasx_xvinsgr2vr_d(q1b_1, iq1s_grid[qs[3] | ((qh[ib+0] >> 1) & 0x700)], 3); + + __m256i q1b_2 = __lasx_xvinsgr2vr_d(q1b_2, iq1s_grid[qs[4] | ((qh[ib+1] << 8) & 0x700)], 0); + q1b_2 = __lasx_xvinsgr2vr_d(q1b_2, iq1s_grid[qs[5] | ((qh[ib+1] << 5) & 0x700)], 1); + q1b_2 = __lasx_xvinsgr2vr_d(q1b_2, iq1s_grid[qs[6] | ((qh[ib+1] << 2) & 0x700)], 2); + q1b_2 = __lasx_xvinsgr2vr_d(q1b_2, iq1s_grid[qs[7] | ((qh[ib+1] >> 1) & 0x700)], 3); + + qs += 8; + const __m256i q8b_1 = __lasx_xvld((const __m256i*)q8, 0); q8 += 32; + const __m256i q8b_2 = __lasx_xvld((const __m256i*)q8, 0); q8 += 32; + + const __m256i dot1 = mul_add_epi8(q1b_1, q8b_1); + const __m256i dot2 = mul_add_epi8(q1b_2, q8b_2); + const int16_t ls1 = 2*((qh[ib+0] >> 12) & 7) + 1; + const int16_t ls2 = 2*((qh[ib+1] >> 12) & 7) + 1; + + __m256i tmp1, tmp5, tmp6; + tmp1 = __lasx_xvreplgr2vr_h(ls1); + tmp5 = __lasx_xvmulwev_w_h(dot1, tmp1); + tmp6 = __lasx_xvmulwod_w_h(dot1, tmp1); + const __m256i p1 = __lasx_xvadd_w(tmp5, tmp6); + + tmp1 = __lasx_xvreplgr2vr_h(ls2); + tmp5 = __lasx_xvmulwev_w_h(dot2, tmp1); + tmp6 = __lasx_xvmulwod_w_h(dot2, tmp1); + const __m256i p2 = __lasx_xvadd_w(tmp5, tmp6); + + sumi = __lasx_xvadd_w(sumi, __lasx_xvadd_w(p1, p2)); + sumi1 += (y[i].bsums[2*ib+0] + y[i].bsums[2*ib+1]) * (qh[ib+0] & 0x8000 ? -1 : 1) * ls1 + + (y[i].bsums[2*ib+2] + y[i].bsums[2*ib+3]) * (qh[ib+1] & 0x8000 ? -1 : 1) * ls2; + } + + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + accum = __lasx_xvfmadd_s(__lasx_xvreplfr2vr_s(d), __lasx_xvffint_s_w(sumi), accum); + accum1 += d * sumi1; + } + + *s = hsum_float_8(accum) + IQ1S_DELTA * accum1; + +#else + + float sumf = 0; + for (int i = 0; i < nb; i++) { + + const int8_t * q8 = y[i].qs; + const uint8_t * qs = x[i].qs; + const uint16_t * qh = x[i].qh; + + int sumi = 0, sumi1 = 0; + for (int ib = 0; ib < QK_K/32; ++ib) { + const int ls = 2*((qh[ib] >> 12) & 7) + 1; + const int delta = qh[ib] & 0x8000 ? -1 : 1; + int lsum = 0; + for (int l = 0; l < 4; ++l) { + const int8_t * grid = (const int8_t *)(iq1s_grid + (qs[l] | (((qh[ib] >> 3*l) & 7) << 8))); + for (int j = 0; j < 8; ++j) { + lsum += q8[j] * grid[j]; + } + q8 += 8; + } + sumi += ls * lsum; + sumi1 += ls * delta * (y[i].bsums[2*ib+0] + y[i].bsums[2*ib+1]); + qs += 4; + } + + sumf += GGML_FP16_TO_FP32(x[i].d) * y[i].d * (sumi + IQ1S_DELTA * sumi1); + } + + *s = sumf; + +#endif +} + +void ggml_vec_dot_iq1_m_q8_K (int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { + assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + + const block_iq1_m * restrict x = vx; + const block_q8_K * restrict y = vy; + + const int nb = n / QK_K; + + iq1m_scale_t scale; + +#if defined __ARM_NEON + const int32x4_t mask = vdupq_n_s32(0x7); + const int32x4_t mone = vdupq_n_s32(1); + const int32x4_t mzero = vdupq_n_s32(0); + + ggml_int8x16x4_t deltas; + deltas.val[0] = vcombine_s8(vdup_n_s8(+1), vdup_n_s8(+1)); + deltas.val[1] = vcombine_s8(vdup_n_s8(-1), vdup_n_s8(+1)); + deltas.val[2] = vcombine_s8(vdup_n_s8(+1), vdup_n_s8(-1)); + deltas.val[3] = vcombine_s8(vdup_n_s8(-1), vdup_n_s8(-1)); + + ggml_int8x16x4_t q1b; + ggml_int8x16x4_t q8b; + + uint32_t aux32; + const uint8_t * aux8 = (const uint8_t *)&aux32; + + float sumf = 0; + for (int i = 0; i < nb; ++i) { + + const int8_t * q8 = y[i].qs; + const uint8_t * qs = x[i].qs; + const uint8_t * qh = x[i].qh; + const uint16_t * sc = (const uint16_t *)x[i].scales; + + scale.u16 = (sc[0] >> 12) | ((sc[1] >> 8) & 0x00f0) | ((sc[2] >> 4) & 0x0f00) | (sc[3] & 0xf000); + + int32x4_t sumi1 = mzero; + int32x4_t sumi2 = mzero; + + for (int ib = 0; ib < QK_K/32; ib += 2) { + + q1b.val[0] = vcombine_s8(vld1_s8((const int8_t *)(iq1s_grid + (qs[0] | ((qh[0] << 8) & 0x700)))), + vld1_s8((const int8_t *)(iq1s_grid + (qs[1] | ((qh[0] << 4) & 0x700))))); + q1b.val[1] = vcombine_s8(vld1_s8((const int8_t *)(iq1s_grid + (qs[2] | ((qh[1] << 8) & 0x700)))), + vld1_s8((const int8_t *)(iq1s_grid + (qs[3] | ((qh[1] << 4) & 0x700))))); + q1b.val[2] = vcombine_s8(vld1_s8((const int8_t *)(iq1s_grid + (qs[4] | ((qh[2] << 8) & 0x700)))), + vld1_s8((const int8_t *)(iq1s_grid + (qs[5] | ((qh[2] << 4) & 0x700))))); + q1b.val[3] = vcombine_s8(vld1_s8((const int8_t *)(iq1s_grid + (qs[6] | ((qh[3] << 8) & 0x700)))), + vld1_s8((const int8_t *)(iq1s_grid + (qs[7] | ((qh[3] << 4) & 0x700))))); + + q8b = ggml_vld1q_s8_x4(q8); q8 += 64; + + const int32x4_t p1 = vpaddq_s32(ggml_vdotq_s32(mzero, q1b.val[0], q8b.val[0]), ggml_vdotq_s32(mzero, q1b.val[1], q8b.val[1])); + const int32x4_t p2 = vpaddq_s32(ggml_vdotq_s32(mzero, q1b.val[2], q8b.val[2]), ggml_vdotq_s32(mzero, q1b.val[3], q8b.val[3])); + const int32x4_t p12 = vpaddq_s32(p1, p2); + + const uint32_t * qh32 = (const uint32_t *)qh; // we are 4-byte aligned, so we can do that + aux32 = ((qh32[0] >> 3) & 0x01010101) | ((qh32[0] >> 6) & 0x02020202); + + const int32x4_t p3 = vpaddq_s32(ggml_vdotq_s32(mzero, deltas.val[aux8[0]], q8b.val[0]), ggml_vdotq_s32(mzero, deltas.val[aux8[1]], q8b.val[1])); + const int32x4_t p4 = vpaddq_s32(ggml_vdotq_s32(mzero, deltas.val[aux8[2]], q8b.val[2]), ggml_vdotq_s32(mzero, deltas.val[aux8[3]], q8b.val[3])); + const int32x4_t p34 = vpaddq_s32(p3, p4); + + int32x4_t scales_4 = ggml_vld1q_u32(sc[ib/2] >> 0, sc[ib/2] >> 3, sc[ib/2] >> 6, sc[ib/2] >> 9); + + scales_4 = vaddq_s32(vshlq_n_s32(vandq_s32(scales_4, mask), 1), mone); + + sumi1 = vmlaq_s32(sumi1, scales_4, p12); + sumi2 = vmlaq_s32(sumi2, scales_4, p34); + + qs += 8; qh += 4; + + } + + sumf += y[i].d * GGML_FP16_TO_FP32(scale.f16) * (vaddvq_s32(sumi1) + IQ1M_DELTA * vaddvq_s32(sumi2)); + } + + *s = sumf; + +#elif defined __AVX2__ + + const __m256i mask = _mm256_set1_epi16(0x7); + const __m256i mone = _mm256_set1_epi16(1); + + __m256 accum1 = _mm256_setzero_ps(); + __m256 accum2 = _mm256_setzero_ps(); + for (int i = 0; i < nb; ++i) { + + const int8_t * q8 = y[i].qs; + const uint8_t * qs = x[i].qs; + const uint8_t * qh = x[i].qh; + const uint16_t * sc = (const uint16_t *)x[i].scales; + + scale.u16 = (sc[0] >> 12) | ((sc[1] >> 8) & 0x00f0) | ((sc[2] >> 4) & 0x0f00) | (sc[3] & 0xf000); + + __m256i sumi1 = _mm256_setzero_si256(); + __m256i sumi2 = _mm256_setzero_si256(); + for (int ib = 0; ib < QK_K/32; ib += 2) { + const __m256i q1b_1 = _mm256_set_epi64x( + iq1s_grid[qs[3] | (((uint16_t)qh[1] << 4) & 0x700)], iq1s_grid[qs[2] | (((uint16_t)qh[1] << 8) & 0x700)], + iq1s_grid[qs[1] | (((uint16_t)qh[0] << 4) & 0x700)], iq1s_grid[qs[0] | (((uint16_t)qh[0] << 8) & 0x700)] + ); + const __m256i q1b_2 = _mm256_set_epi64x( + iq1s_grid[qs[7] | (((uint16_t)qh[3] << 4) & 0x700)], iq1s_grid[qs[6] | (((uint16_t)qh[3] << 8) & 0x700)], + iq1s_grid[qs[5] | (((uint16_t)qh[2] << 4) & 0x700)], iq1s_grid[qs[4] | (((uint16_t)qh[2] << 8) & 0x700)] + ); + const __m256i q8b_1 = _mm256_loadu_si256((const __m256i*)q8); q8 += 32; + const __m256i q8b_2 = _mm256_loadu_si256((const __m256i*)q8); q8 += 32; + + const __m256i dot1 = mul_add_epi8(q1b_1, q8b_1); + const __m256i dot2 = mul_add_epi8(q1b_2, q8b_2); + + const __m256i delta1 = _mm256_set_epi64x(qh[1] & 0x80 ? 0xffffffffffffffff : 0x0101010101010101, + qh[1] & 0x08 ? 0xffffffffffffffff : 0x0101010101010101, + qh[0] & 0x80 ? 0xffffffffffffffff : 0x0101010101010101, + qh[0] & 0x08 ? 0xffffffffffffffff : 0x0101010101010101); + const __m256i delta2 = _mm256_set_epi64x(qh[3] & 0x80 ? 0xffffffffffffffff : 0x0101010101010101, + qh[3] & 0x08 ? 0xffffffffffffffff : 0x0101010101010101, + qh[2] & 0x80 ? 0xffffffffffffffff : 0x0101010101010101, + qh[2] & 0x08 ? 0xffffffffffffffff : 0x0101010101010101); + + const __m256i dot3 = mul_add_epi8(delta1, q8b_1); + const __m256i dot4 = mul_add_epi8(delta2, q8b_2); + + __m256i scale1 = MM256_SET_M128I(_mm_set1_epi16(sc[ib/2] >> 3), _mm_set1_epi16(sc[ib/2] >> 0)); + __m256i scale2 = MM256_SET_M128I(_mm_set1_epi16(sc[ib/2] >> 9), _mm_set1_epi16(sc[ib/2] >> 6)); + + scale1 = _mm256_add_epi16(_mm256_slli_epi16(_mm256_and_si256(scale1, mask), 1), mone); + scale2 = _mm256_add_epi16(_mm256_slli_epi16(_mm256_and_si256(scale2, mask), 1), mone); + const __m256i p1 = _mm256_madd_epi16(dot1, scale1); + const __m256i p2 = _mm256_madd_epi16(dot2, scale2); + const __m256i p3 = _mm256_madd_epi16(dot3, scale1); + const __m256i p4 = _mm256_madd_epi16(dot4, scale2); + + sumi1 = _mm256_add_epi32(sumi1, _mm256_add_epi32(p1, p2)); + sumi2 = _mm256_add_epi32(sumi2, _mm256_add_epi32(p3, p4)); + + qs += 8; qh += 4; + } + + const __m256 d = _mm256_set1_ps(y[i].d * GGML_FP16_TO_FP32(scale.f16)); + + accum1 = _mm256_fmadd_ps(d, _mm256_cvtepi32_ps(sumi1), accum1); + accum2 = _mm256_fmadd_ps(d, _mm256_cvtepi32_ps(sumi2), accum2); + } + + *s = hsum_float_8(accum1) + IQ1M_DELTA * hsum_float_8(accum2); + +#elif defined __AVX__ + const __m128i mask = _mm_set1_epi16(0x7); + const __m128i mone = _mm_set1_epi16(1); + + __m256 accum1 = _mm256_setzero_ps(); + __m256 accum2 = _mm256_setzero_ps(); + for (int i = 0; i < nb; ++i) { + + const int8_t * q8 = y[i].qs; + const uint8_t * qs = x[i].qs; + const uint8_t * qh = x[i].qh; + const uint16_t * sc = (const uint16_t *)x[i].scales; + + scale.u16 = (sc[0] >> 12) | ((sc[1] >> 8) & 0x00f0) | ((sc[2] >> 4) & 0x0f00) | (sc[3] & 0xf000); + + __m128i sumi1_0 = _mm_setzero_si128(); + __m128i sumi1_1 = _mm_setzero_si128(); + __m128i sumi2_0 = _mm_setzero_si128(); + __m128i sumi2_1 = _mm_setzero_si128(); + for (int ib = 0; ib < QK_K/32; ib += 2) { + const __m128i q1b_1_0 = _mm_set_epi64x( + iq1s_grid[qs[1] | (((uint16_t)qh[0] << 4) & 0x700)], iq1s_grid[qs[0] | (((uint16_t)qh[0] << 8) & 0x700)]); + const __m128i q1b_1_1 = _mm_set_epi64x( + iq1s_grid[qs[3] | (((uint16_t)qh[1] << 4) & 0x700)], iq1s_grid[qs[2] | (((uint16_t)qh[1] << 8) & 0x700)]); + const __m128i q1b_2_0 = _mm_set_epi64x( + iq1s_grid[qs[5] | (((uint16_t)qh[2] << 4) & 0x700)], iq1s_grid[qs[4] | (((uint16_t)qh[2] << 8) & 0x700)]); + const __m128i q1b_2_1 = _mm_set_epi64x( + iq1s_grid[qs[7] | (((uint16_t)qh[3] << 4) & 0x700)], iq1s_grid[qs[6] | (((uint16_t)qh[3] << 8) & 0x700)]); + const __m128i q8b_1_0 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q8b_1_1 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q8b_2_0 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q8b_2_1 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + + const __m128i dot1_0 = mul_add_epi8_sse(q1b_1_0, q8b_1_0); + const __m128i dot1_1 = mul_add_epi8_sse(q1b_1_1, q8b_1_1); + const __m128i dot2_0 = mul_add_epi8_sse(q1b_2_0, q8b_2_0); + const __m128i dot2_1 = mul_add_epi8_sse(q1b_2_1, q8b_2_1); + + const __m128i delta1_0 = _mm_set_epi64x(qh[0] & 0x80 ? 0xffffffffffffffff : 0x0101010101010101, + qh[0] & 0x08 ? 0xffffffffffffffff : 0x0101010101010101); + const __m128i delta1_1 = _mm_set_epi64x(qh[1] & 0x80 ? 0xffffffffffffffff : 0x0101010101010101, + qh[1] & 0x08 ? 0xffffffffffffffff : 0x0101010101010101); + const __m128i delta2_0 = _mm_set_epi64x(qh[2] & 0x80 ? 0xffffffffffffffff : 0x0101010101010101, + qh[2] & 0x08 ? 0xffffffffffffffff : 0x0101010101010101); + const __m128i delta2_1 = _mm_set_epi64x(qh[3] & 0x80 ? 0xffffffffffffffff : 0x0101010101010101, + qh[3] & 0x08 ? 0xffffffffffffffff : 0x0101010101010101); + + const __m128i dot3_0 = mul_add_epi8_sse(delta1_0, q8b_1_0); + const __m128i dot3_1 = mul_add_epi8_sse(delta1_1, q8b_1_1); + const __m128i dot4_0 = mul_add_epi8_sse(delta2_0, q8b_2_0); + const __m128i dot4_1 = mul_add_epi8_sse(delta2_1, q8b_2_1); + + __m128i scale1_0 = _mm_set1_epi16(sc[ib/2] >> 0); + __m128i scale1_1 = _mm_set1_epi16(sc[ib/2] >> 3); + __m128i scale2_0 = _mm_set1_epi16(sc[ib/2] >> 6); + __m128i scale2_1 = _mm_set1_epi16(sc[ib/2] >> 9); + + scale1_0 = _mm_add_epi16(_mm_slli_epi16(_mm_and_si128(scale1_0, mask), 1), mone); + scale1_1 = _mm_add_epi16(_mm_slli_epi16(_mm_and_si128(scale1_1, mask), 1), mone); + scale2_0 = _mm_add_epi16(_mm_slli_epi16(_mm_and_si128(scale2_0, mask), 1), mone); + scale2_1 = _mm_add_epi16(_mm_slli_epi16(_mm_and_si128(scale2_1, mask), 1), mone); + const __m128i p1_0 = _mm_madd_epi16(dot1_0, scale1_0); + const __m128i p1_1 = _mm_madd_epi16(dot1_1, scale1_1); + const __m128i p2_0 = _mm_madd_epi16(dot2_0, scale2_0); + const __m128i p2_1 = _mm_madd_epi16(dot2_1, scale2_1); + const __m128i p3_0 = _mm_madd_epi16(dot3_0, scale1_0); + const __m128i p3_1 = _mm_madd_epi16(dot3_1, scale1_1); + const __m128i p4_0 = _mm_madd_epi16(dot4_0, scale2_0); + const __m128i p4_1 = _mm_madd_epi16(dot4_1, scale2_1); + + sumi1_0 = _mm_add_epi32(sumi1_0, _mm_add_epi32(p1_0, p2_0)); + sumi1_1 = _mm_add_epi32(sumi1_1, _mm_add_epi32(p1_1, p2_1)); + sumi2_0 = _mm_add_epi32(sumi2_0, _mm_add_epi32(p3_0, p4_0)); + sumi2_1 = _mm_add_epi32(sumi2_1, _mm_add_epi32(p3_1, p4_1)); + + qs += 8; qh += 4; + } + + const __m256 d = _mm256_set1_ps(y[i].d * GGML_FP16_TO_FP32(scale.f16)); + + accum1 = _mm256_add_ps(_mm256_mul_ps(d, _mm256_cvtepi32_ps(MM256_SET_M128I(sumi1_1, sumi1_0))), accum1); + accum2 = _mm256_add_ps(_mm256_mul_ps(d, _mm256_cvtepi32_ps(MM256_SET_M128I(sumi2_1, sumi2_0))), accum2); + } + + *s = hsum_float_8(accum1) + IQ1M_DELTA * hsum_float_8(accum2); + +#else + + int sum1[2], sum2[2], delta[4]; + + float sumf = 0; + for (int i = 0; i < nb; i++) { + + const int8_t * q8 = y[i].qs; + const uint8_t * qs = x[i].qs; + const uint8_t * qh = x[i].qh; + const uint16_t * sc = (const uint16_t *)x[i].scales; + + scale.u16 = (sc[0] >> 12) | ((sc[1] >> 8) & 0x00f0) | ((sc[2] >> 4) & 0x0f00) | (sc[3] & 0xf000); + + int sumi1 = 0, sumi2 = 0; + for (int ib = 0; ib < QK_K/32; ++ib) { + delta[0] = qh[0] & 0x08 ? -1 : 1; + delta[1] = qh[0] & 0x80 ? -1 : 1; + delta[2] = qh[1] & 0x08 ? -1 : 1; + delta[3] = qh[1] & 0x80 ? -1 : 1; + sum1[0] = sum1[1] = sum2[0] = sum2[1] = 0; + for (int l = 0; l < 4; ++l) { + const int8_t * grid = (const int8_t *)(iq1s_grid + (qs[l] | (((uint16_t)qh[l/2] << (8 - 4*(l%2))) & 0x700))); + int lsum1 = 0, lsum2 = 0; + for (int j = 0; j < 8; ++j) { + lsum1 += q8[j] * grid[j]; + lsum2 += q8[j]; + } + q8 += 8; + sum1[l/2] += lsum1; + sum2[l/2] += lsum2*delta[l]; + } + + const int ls1 = 2*((sc[ib/2] >> (6*(ib%2)+0)) & 0x7) + 1; + const int ls2 = 2*((sc[ib/2] >> (6*(ib%2)+3)) & 0x7) + 1; + + sumi1 += sum1[0] * ls1 + sum1[1] * ls2; + sumi2 += sum2[0] * ls1 + sum2[1] * ls2; + qs += 4; + qh += 2; + } + + sumf += GGML_FP16_TO_FP32(scale.f16) * y[i].d * (sumi1 + IQ1M_DELTA * sumi2); + } + + *s = sumf; + +#endif +} + +void ggml_vec_dot_iq4_nl_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + assert(n % QK4_NL == 0); + static_assert(QK4_NL == QK8_0, "QK4_NL and QK8_0 must be the same"); + + const block_iq4_nl * restrict x = vx; + const block_q8_0 * restrict y = vy; + + const int nb = n / QK4_NL; + + int ib = 0; + float sumf = 0; + +#if defined __ARM_NEON + const int8x16_t values = vld1q_s8(kvalues_iq4nl); + const uint8x16_t m4b = vdupq_n_u8(0x0f); + uint8x16x2_t q4bits; + int8x16x4_t q4b; + int8x16x4_t q8b; + int32x4_t prod_1, prod_2; + + for (; ib + 1 < nb; ib += 2) { + + q4bits.val[0] = vld1q_u8(x[ib + 0].qs); + q4bits.val[1] = vld1q_u8(x[ib + 1].qs); + q8b.val[0] = vld1q_s8(y[ib + 0].qs); + q8b.val[1] = vld1q_s8(y[ib + 0].qs + 16); + q8b.val[2] = vld1q_s8(y[ib + 1].qs); + q8b.val[3] = vld1q_s8(y[ib + 1].qs + 16); + + q4b.val[0] = ggml_vqtbl1q_s8(values, vandq_u8 (q4bits.val[0], m4b)); + q4b.val[1] = ggml_vqtbl1q_s8(values, vshrq_n_u8(q4bits.val[0], 4)); + q4b.val[2] = ggml_vqtbl1q_s8(values, vandq_u8 (q4bits.val[1], m4b)); + q4b.val[3] = ggml_vqtbl1q_s8(values, vshrq_n_u8(q4bits.val[1], 4)); + + prod_1 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q4b.val[0], q8b.val[0]), q4b.val[1], q8b.val[1]); + prod_2 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q4b.val[2], q8b.val[2]), q4b.val[3], q8b.val[3]); + + sumf += + GGML_FP16_TO_FP32(x[ib+0].d) * GGML_FP16_TO_FP32(y[ib + 0].d) * vaddvq_s32(prod_1) + + GGML_FP16_TO_FP32(x[ib+1].d) * GGML_FP16_TO_FP32(y[ib + 1].d) * vaddvq_s32(prod_2); + } + +#elif defined __AVX2__ + + const __m128i values128 = _mm_loadu_si128((const __m128i*)kvalues_iq4nl); + const __m128i m4b = _mm_set1_epi8(0x0f); + const __m256i mone = _mm256_set1_epi16(1); + + __m256 accum1 = _mm256_setzero_ps(); + __m256 accum2 = _mm256_setzero_ps(); + for (; ib + 1 < nb; ib += 2) { + const __m128i q4bits_1 = _mm_loadu_si128((const __m128i*)x[ib + 0].qs); + const __m128i q4bits_2 = _mm_loadu_si128((const __m128i*)x[ib + 1].qs); + const __m256i q8b_1 = _mm256_loadu_si256((const __m256i *)y[ib + 0].qs); + const __m256i q8b_2 = _mm256_loadu_si256((const __m256i *)y[ib + 1].qs); + const __m256i q4b_1 = MM256_SET_M128I(_mm_shuffle_epi8(values128, _mm_and_si128(_mm_srli_epi16(q4bits_1, 4), m4b)), + _mm_shuffle_epi8(values128, _mm_and_si128(q4bits_1, m4b))); + const __m256i q4b_2 = MM256_SET_M128I(_mm_shuffle_epi8(values128, _mm_and_si128(_mm_srli_epi16(q4bits_2, 4), m4b)), + _mm_shuffle_epi8(values128, _mm_and_si128(q4bits_2, m4b))); + const __m256i p16_1 = mul_add_epi8(q4b_1, q8b_1); + const __m256i p16_2 = mul_add_epi8(q4b_2, q8b_2); + const __m256i p_1 = _mm256_madd_epi16(p16_1, mone); + const __m256i p_2 = _mm256_madd_epi16(p16_2, mone); + accum1 = _mm256_fmadd_ps(_mm256_set1_ps(GGML_FP16_TO_FP32(y[ib + 0].d)*GGML_FP16_TO_FP32(x[ib + 0].d)), + _mm256_cvtepi32_ps(p_1), accum1); + accum2 = _mm256_fmadd_ps(_mm256_set1_ps(GGML_FP16_TO_FP32(y[ib + 1].d)*GGML_FP16_TO_FP32(x[ib + 1].d)), + _mm256_cvtepi32_ps(p_2), accum2); + } + + sumf = hsum_float_8(_mm256_add_ps(accum1, accum2)); + +#elif defined __AVX__ + const __m128i values128 = _mm_loadu_si128((const __m128i*)kvalues_iq4nl); + const __m128i m4b = _mm_set1_epi8(0x0f); + const __m128i mone = _mm_set1_epi16(1); + + __m256 accum1 = _mm256_setzero_ps(); + __m256 accum2 = _mm256_setzero_ps(); + for (; ib + 1 < nb; ib += 2) { + const __m128i q4bits_1 = _mm_loadu_si128((const __m128i *)x[ib + 0].qs); + const __m128i q4bits_2 = _mm_loadu_si128((const __m128i *)x[ib + 1].qs); + const __m128i q8b_1_0 = _mm_loadu_si128((const __m128i *)y[ib + 0].qs); + const __m128i q8b_1_1 = _mm_loadu_si128((const __m128i *)y[ib + 0].qs + 1); + const __m128i q8b_2_0 = _mm_loadu_si128((const __m128i *)y[ib + 1].qs); + const __m128i q8b_2_1 = _mm_loadu_si128((const __m128i *)y[ib + 1].qs + 1); + + const __m128i q4b_1_0 = _mm_shuffle_epi8(values128, _mm_and_si128(q4bits_1, m4b)); + const __m128i q4b_1_1 = _mm_shuffle_epi8(values128, _mm_and_si128(_mm_srli_epi16(q4bits_1, 4), m4b)); + const __m128i q4b_2_0 = _mm_shuffle_epi8(values128, _mm_and_si128(q4bits_2, m4b)); + const __m128i q4b_2_1 = _mm_shuffle_epi8(values128, _mm_and_si128(_mm_srli_epi16(q4bits_2, 4), m4b)); + const __m128i p16_1_0 = mul_add_epi8_sse(q4b_1_0, q8b_1_0); + const __m128i p16_1_1 = mul_add_epi8_sse(q4b_1_1, q8b_1_1); + const __m128i p16_2_0 = mul_add_epi8_sse(q4b_2_0, q8b_2_0); + const __m128i p16_2_1 = mul_add_epi8_sse(q4b_2_1, q8b_2_1); + const __m128i p_1_0 = _mm_madd_epi16(p16_1_0, mone); + const __m128i p_1_1 = _mm_madd_epi16(p16_1_1, mone); + const __m128i p_2_0 = _mm_madd_epi16(p16_2_0, mone); + const __m128i p_2_1 = _mm_madd_epi16(p16_2_1, mone); + accum1 = _mm256_add_ps(_mm256_mul_ps(_mm256_set1_ps(GGML_FP16_TO_FP32(y[ib + 0].d)*GGML_FP16_TO_FP32(x[ib + 0].d)), + _mm256_cvtepi32_ps(MM256_SET_M128I(p_1_1, p_1_0))), accum1); + accum2 = _mm256_add_ps(_mm256_mul_ps(_mm256_set1_ps(GGML_FP16_TO_FP32(y[ib + 1].d)*GGML_FP16_TO_FP32(x[ib + 1].d)), + _mm256_cvtepi32_ps(MM256_SET_M128I(p_2_1, p_2_0))), accum2); + } + + sumf = hsum_float_8(_mm256_add_ps(accum1, accum2)); + +#elif defined(__POWER9_VECTOR__) + const vector signed char lowMask = vec_splats((signed char)0xF); + const vector signed int v0 = vec_splats((int32_t)0); + const vector unsigned char v4 = vec_splats((unsigned char)0x4); + + vector float vsumf0 = vec_splats(0.0f); + vector float vsumf1 = vec_splats(0.0f); + + const vector signed char values = vec_xl( 0, kvalues_iq4nl); + +#pragma GCC unroll 4 + for (; ib < nb; ++ib) { + __builtin_prefetch(x[ib].qs, 0, 1); + __builtin_prefetch(y[ib].qs, 0, 1); + + + vector float vxd = vec_splats(GGML_FP16_TO_FP32(x[ib].d)); + vector float vyd = vec_splats(GGML_FP16_TO_FP32(y[ib].d)); + vector float vd = vec_mul(vxd, vyd); + + vector signed char qxs = (vector signed char)vec_xl( 0, x[ib].qs); + vector signed char q4x0 = vec_and(qxs, lowMask); + vector signed char q4x1 = vec_sr(qxs, v4); + + q4x0 = vec_perm(values, values, (vector unsigned char)q4x0); + q4x1 = vec_perm(values, values, (vector unsigned char)q4x1); + + vector signed char q8y0 = vec_xl( 0, y[ib].qs); + vector signed char q8y1 = vec_xl(16, y[ib].qs); + + vector signed short qv0 = vec_add(vec_mule(q4x0, q8y0), vec_mulo(q4x0, q8y0)); + vector signed short qv1 = vec_add(vec_mule(q4x1, q8y1), vec_mulo(q4x1, q8y1)); + + vector signed int vsumi0 = v0; + vector signed int vsumi1 = v0; + + vsumi0 = vec_sum4s(qv0, vsumi0); + vsumi1 = vec_sum4s(qv1, vsumi1); + + vsumf0 = vec_madd(vec_ctf(vsumi0, 0), vd, vsumf0); + vsumf1 = vec_madd(vec_ctf(vsumi1, 0), vd, vsumf1); + } + + vsumf0 = vec_add(vsumf0, vsumf1); + + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 4)); + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 8)); + + sumf = vec_extract(vsumf0, 0); + +#elif defined (__loongarch_asx) + + const __m128i values128 = __lsx_vld((const __m128i*)kvalues_iq4nl, 0); + const __m128i m4b = __lsx_vreplgr2vr_b(0x0f); + const __m256i mone = __lasx_xvreplgr2vr_h(1); + + __m256 accum1 = (__m256)__lasx_xvldi(0); + __m256 accum2 = (__m256)__lasx_xvldi(0); + for (; ib + 1 < nb; ib += 2) { + const __m128i q4bits_1 = __lsx_vld((const __m128i*)x[ib + 0].qs, 0); + const __m128i q4bits_2 = __lsx_vld((const __m128i*)x[ib + 1].qs, 0); + const __m256i q8b_1 = __lasx_xvld((const __m256i *)y[ib + 0].qs, 0); + const __m256i q8b_2 = __lasx_xvld((const __m256i *)y[ib + 1].qs, 0); + const __m256i q4b_1 = lasx_insertf128(lsx_shuffle_b(values128, __lsx_vand_v(__lsx_vsrli_h(q4bits_1, 4), m4b)), + lsx_shuffle_b(values128, __lsx_vand_v(q4bits_1, m4b))); + const __m256i q4b_2 = lasx_insertf128(lsx_shuffle_b(values128, __lsx_vand_v(__lsx_vsrli_h(q4bits_2, 4), m4b)), + lsx_shuffle_b(values128, __lsx_vand_v(q4bits_2, m4b))); + const __m256i p16_1 = mul_add_epi8(q4b_1, q8b_1); + const __m256i p16_2 = mul_add_epi8(q4b_2, q8b_2); + const __m256i p_1 = lasx_madd_h(p16_1, mone); + const __m256i p_2 = lasx_madd_h(p16_2, mone); + accum1 = __lasx_xvfmadd_s(__lasx_xvreplfr2vr_s(GGML_FP16_TO_FP32(y[ib + 0].d)*GGML_FP16_TO_FP32(x[ib + 0].d)), + __lasx_xvffint_s_w(p_1), accum1); + accum2 = __lasx_xvfmadd_s(__lasx_xvreplfr2vr_s(GGML_FP16_TO_FP32(y[ib + 1].d)*GGML_FP16_TO_FP32(x[ib + 1].d)), + __lasx_xvffint_s_w(p_2), accum2); + } + + sumf = hsum_float_8(__lasx_xvfadd_s(accum1, accum2)); + +#endif + for (; ib < nb; ++ib) { + const float d = GGML_FP16_TO_FP32(y[ib].d)*GGML_FP16_TO_FP32(x[ib].d); + int sumi1 = 0, sumi2 = 0; + for (int j = 0; j < QK4_NL/2; ++j) { + sumi1 += y[ib].qs[j+ 0] * kvalues_iq4nl[x[ib].qs[j] & 0xf]; + sumi2 += y[ib].qs[j+QK4_NL/2] * kvalues_iq4nl[x[ib].qs[j] >> 4]; + } + sumf += d * (sumi1 + sumi2); + } + *s = sumf; +} + +void ggml_vec_dot_iq4_xs_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + assert(n % QK_K == 0); + + const block_iq4_xs * restrict x = vx; + const block_q8_K * restrict y = vy; + + const int nb = n / QK_K; + +#if defined __ARM_NEON + const int8x16_t values = vld1q_s8(kvalues_iq4nl); + const uint8x16_t m4b = vdupq_n_u8(0x0f); + ggml_uint8x16x2_t q4bits; + ggml_int8x16x4_t q4b; + ggml_int8x16x4_t q8b; + int32x4_t prod_1, prod_2; + + float sumf = 0; + + for (int ibl = 0; ibl < nb; ++ibl) { + + const int8_t * q8 = y[ibl].qs; + const uint8_t * q4 = x[ibl].qs; + uint16_t h = x[ibl].scales_h; + + int sumi1 = 0, sumi2 = 0; + for (int ib = 0; ib < QK_K/64; ++ib) { + + q4bits = ggml_vld1q_u8_x2(q4); q4 += 32; + q8b = ggml_vld1q_s8_x4(q8); q8 += 64; + + q4b.val[0] = ggml_vqtbl1q_s8(values, vandq_u8 (q4bits.val[0], m4b)); + q4b.val[1] = ggml_vqtbl1q_s8(values, vshrq_n_u8(q4bits.val[0], 4)); + q4b.val[2] = ggml_vqtbl1q_s8(values, vandq_u8 (q4bits.val[1], m4b)); + q4b.val[3] = ggml_vqtbl1q_s8(values, vshrq_n_u8(q4bits.val[1], 4)); + + prod_1 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q4b.val[0], q8b.val[0]), q4b.val[1], q8b.val[1]); + prod_2 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q4b.val[2], q8b.val[2]), q4b.val[3], q8b.val[3]); + + int ls1 = ((x[ibl].scales_l[ib] & 0xf) | ((h << 4) & 0x30)) - 32; + int ls2 = ((x[ibl].scales_l[ib] >> 4) | ((h << 2) & 0x30)) - 32; + h >>= 4; + sumi1 += vaddvq_s32(prod_1) * ls1; + sumi2 += vaddvq_s32(prod_2) * ls2; + + } + + sumf += GGML_FP16_TO_FP32(x[ibl].d) * y[ibl].d * (sumi1 + sumi2); + } + + *s = sumf; + +#elif defined __AVX2__ + + const __m128i values128 = _mm_loadu_si128((const __m128i*)kvalues_iq4nl); + const __m128i m4b = _mm_set1_epi8(0x0f); + + __m256 accum = _mm256_setzero_ps(); + for (int ibl = 0; ibl < nb; ++ibl) { + const uint8_t * qs = x[ibl].qs; + const int8_t * q8 = y[ibl].qs; + uint16_t sh = x[ibl].scales_h; + __m256i sumi1 = _mm256_setzero_si256(); + __m256i sumi2 = _mm256_setzero_si256(); + for (int ib = 0; ib < QK_K/32; ib += 2) { + const __m128i q4bits_1 = _mm_loadu_si128((const __m128i*)qs); qs += 16; + const __m128i q4bits_2 = _mm_loadu_si128((const __m128i*)qs); qs += 16; + const __m256i q8b_1 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; + const __m256i q8b_2 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; + const __m256i q4b_1 = MM256_SET_M128I(_mm_shuffle_epi8(values128, _mm_and_si128(_mm_srli_epi16(q4bits_1, 4), m4b)), + _mm_shuffle_epi8(values128, _mm_and_si128(q4bits_1, m4b))); + const __m256i q4b_2 = MM256_SET_M128I(_mm_shuffle_epi8(values128, _mm_and_si128(_mm_srli_epi16(q4bits_2, 4), m4b)), + _mm_shuffle_epi8(values128, _mm_and_si128(q4bits_2, m4b))); + const __m256i p16_1 = mul_add_epi8(q4b_1, q8b_1); + const __m256i p16_2 = mul_add_epi8(q4b_2, q8b_2); + const int16_t ls1 = ((x[ibl].scales_l[ib/2] & 0xf) | ((sh << 4) & 0x30)) - 32; + const int16_t ls2 = ((x[ibl].scales_l[ib/2] >> 4) | ((sh << 2) & 0x30)) - 32; + sh >>= 4; + const __m256i p_1 = _mm256_madd_epi16(p16_1, _mm256_set1_epi16(ls1)); + const __m256i p_2 = _mm256_madd_epi16(p16_2, _mm256_set1_epi16(ls2)); + sumi1 = _mm256_add_epi32(p_1, sumi1); + sumi2 = _mm256_add_epi32(p_2, sumi2); + } + accum = _mm256_fmadd_ps(_mm256_set1_ps(GGML_FP16_TO_FP32(x[ibl].d)*y[ibl].d), + _mm256_cvtepi32_ps(_mm256_add_epi32(sumi1, sumi2)), accum); + } + + *s = hsum_float_8(accum); + +#elif defined __AVX__ + const __m128i values128 = _mm_loadu_si128((const __m128i*)kvalues_iq4nl); + const __m128i m4b = _mm_set1_epi8(0x0f); + + __m256 accum = _mm256_setzero_ps(); + for (int ibl = 0; ibl < nb; ++ibl) { + const uint8_t * qs = x[ibl].qs; + const int8_t * q8 = y[ibl].qs; + uint16_t sh = x[ibl].scales_h; + __m128i sumi1_0 = _mm_setzero_si128(); + __m128i sumi1_1 = _mm_setzero_si128(); + __m128i sumi2_0 = _mm_setzero_si128(); + __m128i sumi2_1 = _mm_setzero_si128(); + for (int ib = 0; ib < QK_K/32; ib += 2) { + const __m128i q4bits_1 = _mm_loadu_si128((const __m128i *)qs); qs += 16; + const __m128i q4bits_2 = _mm_loadu_si128((const __m128i *)qs); qs += 16; + const __m128i q8b_1_0 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q8b_1_1 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q8b_2_0 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q8b_2_1 = _mm_loadu_si128((const __m128i *)q8); q8 += 16; + const __m128i q4b_1_0 = _mm_shuffle_epi8(values128, _mm_and_si128(q4bits_1, m4b)); + const __m128i q4b_1_1 = _mm_shuffle_epi8(values128, _mm_and_si128(_mm_srli_epi16(q4bits_1, 4), m4b)); + const __m128i q4b_2_0 = _mm_shuffle_epi8(values128, _mm_and_si128(q4bits_2, m4b)); + const __m128i q4b_2_1 = _mm_shuffle_epi8(values128, _mm_and_si128(_mm_srli_epi16(q4bits_2, 4), m4b)); + const __m128i p16_1_0 = mul_add_epi8_sse(q4b_1_0, q8b_1_0); + const __m128i p16_1_1 = mul_add_epi8_sse(q4b_1_1, q8b_1_1); + const __m128i p16_2_0 = mul_add_epi8_sse(q4b_2_0, q8b_2_0); + const __m128i p16_2_1 = mul_add_epi8_sse(q4b_2_1, q8b_2_1); + const int16_t ls1 = ((x[ibl].scales_l[ib/2] & 0xf) | ((sh << 4) & 0x30)) - 32; + const int16_t ls2 = ((x[ibl].scales_l[ib/2] >> 4) | ((sh << 2) & 0x30)) - 32; + sh >>= 4; + const __m128i p_1_0 = _mm_madd_epi16(p16_1_0, _mm_set1_epi16(ls1)); + const __m128i p_1_1 = _mm_madd_epi16(p16_1_1, _mm_set1_epi16(ls1)); + const __m128i p_2_0 = _mm_madd_epi16(p16_2_0, _mm_set1_epi16(ls2)); + const __m128i p_2_1 = _mm_madd_epi16(p16_2_1, _mm_set1_epi16(ls2)); + sumi1_0 = _mm_add_epi32(p_1_0, sumi1_0); + sumi1_1 = _mm_add_epi32(p_1_1, sumi1_1); + sumi2_0 = _mm_add_epi32(p_2_0, sumi2_0); + sumi2_1 = _mm_add_epi32(p_2_1, sumi2_1); + } + __m128i sumi12_0 = _mm_add_epi32(sumi1_0, sumi2_0); + __m128i sumi12_1 = _mm_add_epi32(sumi1_1, sumi2_1); + accum = _mm256_add_ps(_mm256_mul_ps(_mm256_set1_ps(GGML_FP16_TO_FP32(x[ibl].d)*y[ibl].d), + _mm256_cvtepi32_ps(MM256_SET_M128I(sumi12_1, sumi12_0))), accum); + } + + *s = hsum_float_8(accum); + +#elif defined(__POWER9_VECTOR__) + const vector signed char lowMask = vec_splats((signed char)0xF); + const vector int v0 = vec_splats((int32_t)0); + const vector unsigned char v4 = vec_splats((unsigned char)0x4); + + vector float vsumf0 = vec_splats(0.0f); + vector float vsumf1 = vec_splats(0.0f); + vector float vsumf2 = vec_splats(0.0f); + vector float vsumf3 = vec_splats(0.0f); + + const vector signed char values = vec_xl( 0, kvalues_iq4nl); + + for (int ibl = 0; ibl < nb; ++ibl) { + + vector float vxd = vec_splats(GGML_FP16_TO_FP32(x[ibl].d)); + vector float vyd = vec_splats(y[ibl].d); + vector float vd = vec_mul(vxd, vyd); + + vector signed int vsumi0 = v0; + vector signed int vsumi1 = v0; + vector signed int vsumi2 = v0; + vector signed int vsumi3 = v0; + + uint16_t h = x[ibl].scales_h; + + const uint8_t * restrict q4 = x[ibl].qs; + const uint8_t * restrict sc = x[ibl].scales_l; + const int8_t * restrict q8 = y[ibl].qs; + + for (int ib = 0; ib < QK_K/64; ib ++ ) { + __builtin_prefetch(q4, 0, 1); + __builtin_prefetch(q8, 0, 1); + + vector signed char qxs0 = (vector signed char)vec_xl( 0, q4); + vector signed char qxs1 = (vector signed char)vec_xl(16, q4); + q4 += 32; + + vector signed char q4x00 = (vector signed char)vec_and(qxs0, lowMask); + vector signed char q4x01 = (vector signed char)vec_sr(qxs0, v4); + vector signed char q4x10 = (vector signed char)vec_and(qxs1, lowMask); + vector signed char q4x11 = (vector signed char)vec_sr(qxs1, v4); + + q4x00 = vec_perm(values, values, (vector unsigned char)q4x00); + q4x01 = vec_perm(values, values, (vector unsigned char)q4x01); + q4x10 = vec_perm(values, values, (vector unsigned char)q4x10); + q4x11 = vec_perm(values, values, (vector unsigned char)q4x11); + + vector signed char q8y0 = vec_xl( 0, q8); + vector signed char q8y1 = vec_xl(16, q8); + vector signed char q8y2 = vec_xl(32, q8); + vector signed char q8y3 = vec_xl(48, q8); + q8 += 64; + + vector signed short qv0 = vec_add(vec_mule(q4x00, q8y0), vec_mulo(q4x00, q8y0)); + vector signed short qv1 = vec_add(vec_mule(q4x01, q8y1), vec_mulo(q4x01, q8y1)); + vector signed short qv2 = vec_add(vec_mule(q4x10, q8y2), vec_mulo(q4x10, q8y2)); + vector signed short qv3 = vec_add(vec_mule(q4x11, q8y3), vec_mulo(q4x11, q8y3)); + + const uint16_t ls0 = (uint16_t)(((sc[0] & 0xf) | ((h << 4) & 0x30)) - 32); + const uint16_t ls1 = (uint16_t)(((sc[0] >> 4) | ((h << 2) & 0x30)) - 32); + h >>= 4; + sc ++; + + vector signed short vscales01 = vec_splats((int16_t)ls0); + vector signed short vscales23 = vec_splats((int16_t)ls1); + + vsumi0 = vec_msum(qv0, vscales01, vsumi0); + vsumi1 = vec_msum(qv1, vscales01, vsumi1); + vsumi2 = vec_msum(qv2, vscales23, vsumi2); + vsumi3 = vec_msum(qv3, vscales23, vsumi3); + } + + vsumf0 = vec_madd(vec_ctf(vsumi0, 0), vd, vsumf0); + vsumf1 = vec_madd(vec_ctf(vsumi1, 0), vd, vsumf1); + vsumf2 = vec_madd(vec_ctf(vsumi2, 0), vd, vsumf2); + vsumf3 = vec_madd(vec_ctf(vsumi3, 0), vd, vsumf3); + } + + vsumf0 = vec_add(vsumf0, vsumf2); + vsumf1 = vec_add(vsumf1, vsumf3); + + vsumf0 = vec_add(vsumf0, vsumf1); + + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 4)); + vsumf0 = vec_add(vsumf0, vec_sld(vsumf0, vsumf0, 8)); + + *s = vec_extract(vsumf0, 0); + +#elif defined(__loongarch_asx) + + const __m128i values128 = __lsx_vld((const __m128i*)kvalues_iq4nl, 0); + const __m128i m4b = __lsx_vreplgr2vr_b(0x0f); + + __m256 accum = (__m256)__lasx_xvldi(0); + __m256i tmp1; + __m128i tmp0, tmp2, tmp3, tmp4, mask_8f, mask; + + mask_8f = __lsx_vreplgr2vr_b(0x8f); + for (int ibl = 0; ibl < nb; ++ibl) { + const uint8_t * qs = x[ibl].qs; + const int8_t * q8 = y[ibl].qs; + uint16_t sh = x[ibl].scales_h; + __m256i sumi1 = __lasx_xvldi(0); + __m256i sumi2 = __lasx_xvldi(0); + __m128i zero = __lsx_vldi(0); + for (int ib = 0; ib < QK_K/32; ib += 2) { + const __m128i q4bits_1 = __lsx_vld((const __m128i*)qs, 0); qs += 16; + const __m128i q4bits_2 = __lsx_vld((const __m128i*)qs, 0); qs += 16; + const __m256i q8b_1 = __lasx_xvld((const __m256i *)q8, 0); q8 += 32; + const __m256i q8b_2 = __lasx_xvld((const __m256i *)q8, 0); q8 += 32; + tmp2 = __lsx_vand_v(__lsx_vand_v(__lsx_vsrli_h(q4bits_1, 4), m4b), mask_8f); + tmp0 = __lsx_vori_b(tmp2, 0x10); + mask = __lsx_vsle_b(zero, tmp2); + tmp3 = __lsx_vand_v(tmp0, mask); + tmp3 = __lsx_vshuf_b(values128, zero, tmp3); + + tmp2 = __lsx_vand_v(__lsx_vand_v(q4bits_1, m4b), mask_8f); + tmp0 = __lsx_vori_b(tmp2, 0x10); + mask = __lsx_vsle_b(zero, tmp2); + tmp4 = __lsx_vand_v(tmp0, mask); + tmp4 = __lsx_vshuf_b(values128, zero, tmp4); + + const __m256i q4b_1 = lasx_insertf128(tmp3, tmp4); + + tmp2 = __lsx_vand_v(__lsx_vand_v(__lsx_vsrli_h(q4bits_2, 4), m4b), mask_8f); + tmp0 = __lsx_vori_b(tmp2, 0x10); + mask = __lsx_vsle_b(zero, tmp2); + tmp3 = __lsx_vand_v(tmp0, mask); + tmp3 = __lsx_vshuf_b(values128, zero, tmp3); + + tmp2 = __lsx_vand_v(__lsx_vand_v(q4bits_2, m4b), mask_8f); + tmp0 = __lsx_vori_b(tmp2, 0x10); + mask = __lsx_vsle_b(zero, tmp2); + tmp4 = __lsx_vand_v(tmp0, mask); + tmp4 = __lsx_vshuf_b(values128, zero, tmp4); + + const __m256i q4b_2 = lasx_insertf128(tmp3, tmp4); + + const __m256i p16_1 = mul_add_epi8(q4b_1, q8b_1); + const __m256i p16_2 = mul_add_epi8(q4b_2, q8b_2); + const int16_t ls1 = ((x[ibl].scales_l[ib/2] & 0xf) | ((sh << 4) & 0x30)) - 32; + const int16_t ls2 = ((x[ibl].scales_l[ib/2] >> 4) | ((sh << 2) & 0x30)) - 32; + sh >>= 4; + __m256i tmp5, tmp6; + tmp1 = __lasx_xvreplgr2vr_h(ls1); + tmp5 = __lasx_xvmulwev_w_h(p16_1, tmp1); + tmp6 = __lasx_xvmulwod_w_h(p16_1, tmp1); + const __m256i p_1 = __lasx_xvadd_w(tmp5, tmp6); + tmp1 = __lasx_xvreplgr2vr_h(ls2); + tmp5 = __lasx_xvmulwev_w_h(p16_2, tmp1); + tmp6 = __lasx_xvmulwod_w_h(p16_2, tmp1); + const __m256i p_2 = __lasx_xvadd_w(tmp5, tmp6); + sumi1 = __lasx_xvadd_w(p_1, sumi1); + sumi2 = __lasx_xvadd_w(p_2, sumi2); + } + accum = __lasx_xvfmadd_s(__lasx_xvreplfr2vr_s(GGML_FP16_TO_FP32(x[ibl].d)*y[ibl].d), + __lasx_xvffint_s_w(__lasx_xvadd_w(sumi1, sumi2)), accum); + } + + *s = hsum_float_8(accum); + +#else + float sumf = 0; + for (int ibl = 0; ibl < nb; ++ibl) { + const float d4d8 = GGML_FP16_TO_FP32(x[ibl].d) * y[ibl].d; + uint16_t h = x[ibl].scales_h; + const uint8_t * qs = x[ibl].qs; + const int8_t * q8 = y[ibl].qs; + for (int ib = 0; ib < QK_K/32; ib += 2) { + const uint8_t ls1 = (x[ibl].scales_l[ib/2] & 0xf) | ((h << 4) & 0x30); + const uint8_t ls2 = (x[ibl].scales_l[ib/2] >> 4) | ((h << 2) & 0x30); + h >>= 4; + const float d1 = d4d8*(ls1 - 32); + const float d2 = d4d8*(ls2 - 32); + int sumi1 = 0, sumi2 = 0; + for (int j = 0; j < 16; ++j) { + sumi1 += q8[j+ 0] * kvalues_iq4nl[qs[j] & 0xf]; + sumi2 += q8[j+16] * kvalues_iq4nl[qs[j] >> 4]; + } + sumf += d1 * (sumi1 + sumi2); + qs += 16; + q8 += 32; + sumi1 = sumi2 = 0; + for (int j = 0; j < 16; ++j) { + sumi1 += q8[j+ 0] * kvalues_iq4nl[qs[j] & 0xf]; + sumi2 += q8[j+16] * kvalues_iq4nl[qs[j] >> 4]; + } + sumf += d2 * (sumi1 + sumi2); + qs += 16; + q8 += 32; + } + } + *s = sumf; +#endif +} + +// ================================ IQ2 quantization ============================================= + +typedef struct { + uint64_t * grid; + int * map; + uint16_t * neighbours; +} iq2_entry_t; + +static iq2_entry_t iq2_data[4] = { + {NULL, NULL, NULL}, + {NULL, NULL, NULL}, + {NULL, NULL, NULL}, + {NULL, NULL, NULL}, +}; + +static inline int iq2_data_index(enum ggml_type type) { + GGML_ASSERT(type == GGML_TYPE_IQ2_XXS || type == GGML_TYPE_IQ2_XS || type == GGML_TYPE_IQ1_S || type == GGML_TYPE_IQ1_M || type == GGML_TYPE_IQ2_S); + return type == GGML_TYPE_IQ2_XXS ? 0 : + type == GGML_TYPE_IQ2_XS ? 1 : + type == GGML_TYPE_IQ1_S || type == GGML_TYPE_IQ1_M ? 2 : 3; +} + +static inline int iq2_grid_size(enum ggml_type type) { + GGML_ASSERT(type == GGML_TYPE_IQ2_XXS || type == GGML_TYPE_IQ2_XS || type == GGML_TYPE_IQ1_S || type == GGML_TYPE_IQ1_M || type == GGML_TYPE_IQ2_S); + return type == GGML_TYPE_IQ2_XXS ? 256 : + type == GGML_TYPE_IQ2_XS ? 512 : + type == GGML_TYPE_IQ1_S || type == GGML_TYPE_IQ1_M ? NGRID_IQ1S : 1024; +} + +static int iq2_compare_func(const void * left, const void * right) { + const int * l = (const int *)left; + const int * r = (const int *)right; + return l[0] < r[0] ? -1 : l[0] > r[0] ? 1 : l[1] < r[1] ? -1 : l[1] > r[1] ? 1 : 0; +} + +void iq2xs_init_impl(enum ggml_type type) { + const int gindex = iq2_data_index(type); + const int grid_size = iq2_grid_size(type); + if (iq2_data[gindex].grid) { + return; + } + static const uint16_t kgrid_2bit_256[256] = { + 0, 2, 5, 8, 10, 17, 20, 32, 34, 40, 42, 65, 68, 80, 88, 97, + 100, 128, 130, 138, 162, 257, 260, 272, 277, 320, 388, 408, 512, 514, 546, 642, + 1025, 1028, 1040, 1057, 1060, 1088, 1090, 1096, 1120, 1153, 1156, 1168, 1188, 1280, 1282, 1288, + 1312, 1350, 1385, 1408, 1425, 1545, 1552, 1600, 1668, 1700, 2048, 2053, 2056, 2068, 2088, 2113, + 2116, 2128, 2130, 2184, 2308, 2368, 2562, 2580, 4097, 4100, 4112, 4129, 4160, 4192, 4228, 4240, + 4245, 4352, 4360, 4384, 4432, 4442, 4480, 4644, 4677, 5120, 5128, 5152, 5157, 5193, 5248, 5400, + 5474, 5632, 5654, 6145, 6148, 6160, 6208, 6273, 6400, 6405, 6560, 6737, 8192, 8194, 8202, 8260, + 8289, 8320, 8322, 8489, 8520, 8704, 8706, 9217, 9220, 9232, 9280, 9302, 9472, 9537, 9572, 9872, + 10248, 10272, 10388, 10820, 16385, 16388, 16400, 16408, 16417, 16420, 16448, 16456, 16470, 16480, 16513, 16516, + 16528, 16640, 16672, 16737, 16768, 16773, 16897, 16912, 16968, 16982, 17000, 17408, 17416, 17440, 17536, 17561, + 17682, 17700, 17920, 18433, 18436, 18448, 18496, 18501, 18688, 18776, 18785, 18818, 19013, 19088, 20480, 20488, + 20497, 20505, 20512, 20608, 20616, 20740, 20802, 20900, 21137, 21648, 21650, 21770, 22017, 22100, 22528, 22545, + 22553, 22628, 22848, 23048, 24580, 24592, 24640, 24680, 24832, 24917, 25112, 25184, 25600, 25605, 25872, 25874, + 25988, 26690, 32768, 32770, 32778, 32833, 32898, 33028, 33048, 33088, 33297, 33793, 33796, 33808, 33813, 33856, + 33888, 34048, 34118, 34196, 34313, 34368, 34400, 34818, 35076, 35345, 36868, 36880, 36900, 36928, 37025, 37142, + 37248, 37445, 37888, 37922, 37956, 38225, 39041, 39200, 40962, 41040, 41093, 41225, 41472, 42008, 43088, 43268, + }; + static const uint16_t kgrid_2bit_512[512] = { + 0, 2, 5, 8, 10, 17, 20, 22, 25, 32, 34, 37, 40, 65, 68, 70, + 73, 80, 82, 85, 88, 97, 100, 128, 130, 133, 136, 145, 148, 153, 160, 257, + 260, 262, 265, 272, 274, 277, 280, 282, 289, 292, 320, 322, 325, 328, 337, 340, + 352, 360, 385, 388, 400, 512, 514, 517, 520, 529, 532, 544, 577, 580, 592, 597, + 640, 650, 1025, 1028, 1030, 1033, 1040, 1042, 1045, 1048, 1057, 1060, 1088, 1090, 1093, 1096, + 1105, 1108, 1110, 1120, 1153, 1156, 1168, 1280, 1282, 1285, 1288, 1297, 1300, 1312, 1345, 1348, + 1360, 1377, 1408, 1537, 1540, 1552, 1574, 1600, 1602, 1668, 2048, 2050, 2053, 2056, 2058, 2065, + 2068, 2080, 2085, 2113, 2116, 2128, 2136, 2176, 2208, 2218, 2305, 2308, 2320, 2368, 2433, 2441, + 2560, 2592, 2600, 2710, 2720, 4097, 4100, 4102, 4105, 4112, 4114, 4117, 4120, 4129, 4132, 4160, + 4162, 4165, 4168, 4177, 4180, 4192, 4202, 4225, 4228, 4240, 4352, 4354, 4357, 4360, 4369, 4372, + 4384, 4417, 4420, 4432, 4480, 4500, 4502, 4609, 4612, 4614, 4624, 4672, 4704, 5120, 5122, 5125, + 5128, 5137, 5140, 5152, 5185, 5188, 5193, 5200, 5220, 5248, 5377, 5380, 5392, 5440, 5632, 5652, + 5705, 6145, 6148, 6160, 6162, 6208, 6228, 6278, 6400, 6405, 6502, 6737, 6825, 8192, 8194, 8197, + 8200, 8202, 8209, 8212, 8224, 8257, 8260, 8272, 8320, 8352, 8449, 8452, 8464, 8512, 8520, 8549, + 8704, 8738, 8832, 8872, 9217, 9220, 9232, 9257, 9280, 9472, 9537, 9554, 9625, 9729, 9754, 9894, + 10240, 10248, 10250, 10272, 10325, 10376, 10402, 10600, 10640, 10760, 10784, 10882, 10888, 10890, 16385, 16388, + 16390, 16393, 16400, 16402, 16405, 16408, 16417, 16420, 16448, 16450, 16453, 16456, 16458, 16465, 16468, 16480, + 16485, 16513, 16516, 16528, 16640, 16642, 16645, 16648, 16657, 16660, 16672, 16705, 16708, 16720, 16768, 16773, + 16802, 16897, 16900, 16912, 16914, 16937, 16960, 17408, 17410, 17413, 17416, 17425, 17428, 17433, 17440, 17473, + 17476, 17488, 17536, 17556, 17665, 17668, 17680, 17700, 17728, 17818, 17920, 17930, 17988, 18000, 18433, 18436, + 18448, 18496, 18501, 18516, 18530, 18688, 18705, 18756, 18768, 18793, 18948, 20480, 20482, 20485, 20488, 20497, + 20500, 20512, 20520, 20545, 20548, 20560, 20608, 20737, 20740, 20752, 20757, 20800, 20802, 20992, 21060, 21162, + 21505, 21508, 21520, 21537, 21568, 21600, 21633, 21665, 21760, 21768, 21888, 21896, 22049, 22120, 22177, 22528, + 22548, 22593, 22608, 22681, 22810, 22848, 22850, 23173, 24577, 24580, 24592, 24640, 24660, 24674, 24710, 24745, + 24832, 25124, 25162, 25234, 25600, 25622, 25872, 25920, 25925, 26020, 26625, 26730, 26917, 27142, 27220, 27234, + 32768, 32770, 32773, 32776, 32785, 32788, 32800, 32810, 32833, 32836, 32848, 32896, 32898, 32936, 32938, 33025, + 33028, 33030, 33040, 33088, 33105, 33113, 33280, 33312, 33408, 33410, 33440, 33448, 33793, 33796, 33808, 33810, + 33813, 33856, 33888, 33929, 34048, 34116, 34213, 34328, 34410, 34816, 34824, 34853, 34906, 34944, 34946, 34984, + 35078, 35362, 35456, 35464, 35478, 35496, 36865, 36868, 36880, 36928, 36950, 36996, 37120, 37154, 37220, 37462, + 37513, 37888, 37893, 37956, 37968, 37976, 38185, 38288, 38290, 38465, 38993, 39078, 39241, 39445, 39520, 40960, + 40962, 40968, 40970, 40992, 41002, 41120, 41297, 41305, 41382, 41472, 41474, 41480, 41514, 41600, 41632, 42048, + 42133, 42597, 42648, 43018, 43040, 43042, 43048, 43168, 43176, 43268, 43396, 43398, 43560, 43562, 43665, 43690, + }; + static const uint16_t kgrid_1bit_2048[NGRID_IQ1S] = { + 0, 2, 5, 8, 10, 17, 21, 32, 34, 40, 42, 69, 81, 84, 86, 101, + 128, 130, 136, 138, 149, 160, 162, 168, 170, 260, 261, 273, 276, 278, 281, 282, + 293, 321, 326, 329, 338, 341, 346, 353, 356, 358, 360, 389, 401, 404, 406, 421, + 512, 514, 520, 522, 533, 544, 546, 552, 554, 581, 593, 601, 612, 617, 640, 642, + 648, 650, 657, 661, 665, 672, 674, 680, 682, 1041, 1044, 1046, 1061, 1089, 1097, 1109, + 1114, 1124, 1125, 1169, 1177, 1189, 1281, 1284, 1285, 1286, 1301, 1304, 1306, 1321, 1344, 1349, + 1354, 1360, 1361, 1364, 1365, 1366, 1369, 1376, 1378, 1381, 1384, 1386, 1409, 1425, 1429, 1432, + 1434, 1441, 1444, 1445, 1446, 1449, 1556, 1561, 1601, 1604, 1616, 1618, 1621, 1624, 1632, 1633, + 1638, 1641, 1669, 1681, 1684, 1689, 2048, 2050, 2056, 2058, 2069, 2080, 2082, 2088, 2090, 2117, + 2129, 2134, 2149, 2176, 2178, 2184, 2186, 2197, 2208, 2210, 2216, 2218, 2309, 2321, 2324, 2329, + 2340, 2341, 2369, 2384, 2385, 2389, 2401, 2404, 2409, 2449, 2452, 2454, 2457, 2469, 2560, 2562, + 2568, 2570, 2581, 2592, 2594, 2600, 2602, 2629, 2641, 2649, 2657, 2661, 2688, 2690, 2693, 2696, + 2698, 2709, 2720, 2722, 2728, 2730, 4112, 4113, 4116, 4121, 4132, 4133, 4161, 4164, 4176, 4181, + 4184, 4193, 4196, 4197, 4201, 4241, 4244, 4246, 4257, 4261, 4353, 4356, 4358, 4361, 4368, 4370, + 4373, 4376, 4385, 4388, 4393, 4421, 4426, 4432, 4433, 4434, 4436, 4437, 4438, 4441, 4448, 4453, + 4484, 4498, 4501, 4513, 4516, 4625, 4628, 4630, 4645, 4672, 4678, 4681, 4690, 4693, 4696, 4698, + 4708, 4710, 4741, 4753, 4756, 4758, 4773, 5121, 5126, 5129, 5140, 5141, 5144, 5145, 5153, 5158, + 5185, 5189, 5190, 5192, 5194, 5201, 5204, 5205, 5206, 5209, 5218, 5221, 5224, 5252, 5257, 5264, + 5268, 5269, 5272, 5273, 5274, 5281, 5284, 5285, 5289, 5378, 5381, 5386, 5393, 5396, 5397, 5398, + 5401, 5408, 5410, 5413, 5416, 5418, 5441, 5444, 5445, 5446, 5457, 5458, 5460, 5461, 5462, 5465, + 5466, 5473, 5476, 5477, 5478, 5481, 5504, 5506, 5508, 5509, 5512, 5514, 5520, 5521, 5524, 5525, + 5526, 5529, 5530, 5536, 5538, 5541, 5633, 5636, 5637, 5638, 5653, 5654, 5656, 5658, 5665, 5670, + 5696, 5698, 5700, 5701, 5704, 5706, 5713, 5717, 5718, 5720, 5721, 5729, 5732, 5733, 5736, 5737, + 5738, 5766, 5770, 5778, 5781, 5796, 5801, 6161, 6166, 6181, 6209, 6212, 6214, 6217, 6224, 6229, + 6232, 6234, 6240, 6241, 6244, 6246, 6249, 6277, 6289, 6292, 6309, 6416, 6418, 6421, 6426, 6433, + 6437, 6466, 6468, 6469, 6472, 6481, 6484, 6485, 6486, 6489, 6490, 6496, 6501, 6506, 6537, 6545, + 6546, 6549, 6552, 6561, 6566, 6569, 6665, 6678, 6692, 6694, 6724, 6726, 6729, 6736, 6738, 6741, + 6744, 6753, 6758, 6761, 6789, 6801, 6806, 6810, 8192, 8194, 8200, 8202, 8213, 8224, 8226, 8229, + 8232, 8234, 8261, 8273, 8281, 8289, 8293, 8320, 8322, 8328, 8330, 8341, 8352, 8354, 8357, 8360, + 8362, 8453, 8465, 8468, 8473, 8485, 8514, 8516, 8521, 8533, 8536, 8538, 8545, 8548, 8549, 8550, + 8581, 8592, 8598, 8601, 8613, 8705, 8712, 8714, 8721, 8725, 8736, 8738, 8744, 8746, 8773, 8785, + 8790, 8793, 8805, 8833, 8840, 8842, 8849, 8853, 8864, 8866, 8872, 8874, 9221, 9236, 9238, 9241, + 9253, 9284, 9285, 9286, 9289, 9298, 9301, 9304, 9306, 9318, 9349, 9361, 9364, 9369, 9377, 9381, + 9481, 9493, 9505, 9513, 9536, 9541, 9544, 9553, 9556, 9557, 9561, 9570, 9573, 9576, 9609, 9616, + 9620, 9621, 9624, 9626, 9633, 9636, 9638, 9641, 9733, 9744, 9746, 9753, 9765, 9793, 9801, 9813, + 9824, 9825, 9833, 9860, 9862, 9872, 9882, 10240, 10242, 10248, 10250, 10261, 10272, 10274, 10280, 10282, + 10309, 10321, 10324, 10341, 10368, 10370, 10376, 10378, 10400, 10402, 10408, 10410, 10505, 10513, 10516, 10521, + 10533, 10566, 10569, 10578, 10581, 10593, 10596, 10598, 10601, 10629, 10640, 10646, 10649, 10660, 10661, 10752, + 10754, 10760, 10762, 10784, 10786, 10792, 10794, 10821, 10833, 10838, 10841, 10853, 10880, 10882, 10888, 10890, + 10901, 10912, 10914, 10920, 10922, 16389, 16401, 16406, 16421, 16457, 16466, 16469, 16472, 16474, 16481, 16484, + 16486, 16532, 16537, 16545, 16550, 16640, 16641, 16644, 16646, 16649, 16658, 16661, 16662, 16664, 16666, 16673, + 16678, 16681, 16709, 16712, 16714, 16721, 16724, 16725, 16726, 16729, 16730, 16741, 16744, 16746, 16769, 16772, + 16774, 16784, 16786, 16789, 16800, 16801, 16802, 16901, 16913, 16916, 16918, 16933, 16961, 16978, 16981, 16986, + 16996, 17001, 17033, 17044, 17061, 17409, 17429, 17433, 17449, 17477, 17480, 17482, 17489, 17492, 17493, 17494, + 17505, 17506, 17509, 17512, 17514, 17537, 17542, 17545, 17552, 17554, 17557, 17568, 17569, 17577, 17665, 17666, + 17669, 17674, 17681, 17684, 17685, 17686, 17689, 17696, 17701, 17706, 17729, 17732, 17733, 17734, 17737, 17744, + 17745, 17748, 17749, 17750, 17752, 17753, 17761, 17764, 17765, 17766, 17769, 17794, 17796, 17797, 17800, 17809, + 17812, 17813, 17814, 17817, 17818, 17829, 17832, 17834, 17921, 17925, 17929, 17940, 17941, 17944, 17946, 17953, + 17956, 17961, 17984, 17986, 17989, 17992, 18000, 18001, 18002, 18005, 18006, 18009, 18018, 18021, 18024, 18049, + 18053, 18058, 18068, 18069, 18081, 18084, 18086, 18437, 18449, 18453, 18458, 18469, 18498, 18505, 18512, 18517, + 18520, 18529, 18532, 18534, 18537, 18565, 18577, 18580, 18582, 18585, 18597, 18689, 18693, 18694, 18698, 18704, + 18708, 18709, 18712, 18721, 18724, 18726, 18752, 18757, 18762, 18769, 18770, 18772, 18773, 18774, 18777, 18784, + 18786, 18789, 18790, 18794, 18822, 18825, 18834, 18837, 18838, 18840, 18849, 18852, 18854, 18857, 18966, 19012, + 19014, 19017, 19029, 19032, 19034, 19044, 19049, 19092, 19109, 20481, 20484, 20485, 20486, 20489, 20498, 20501, + 20506, 20513, 20516, 20521, 20544, 20549, 20552, 20561, 20564, 20565, 20566, 20569, 20581, 20584, 20614, 20617, + 20629, 20632, 20640, 20641, 20646, 20649, 20741, 20744, 20745, 20746, 20753, 20756, 20757, 20758, 20760, 20761, + 20768, 20773, 20774, 20776, 20778, 20801, 20804, 20805, 20806, 20809, 20816, 20817, 20818, 20820, 20821, 20822, + 20824, 20825, 20826, 20833, 20836, 20837, 20838, 20841, 20866, 20869, 20881, 20884, 20885, 20886, 20889, 20896, + 20901, 20906, 20993, 20998, 21010, 21013, 21018, 21025, 21028, 21058, 21061, 21066, 21073, 21076, 21077, 21078, + 21081, 21090, 21093, 21125, 21136, 21138, 21141, 21145, 21146, 21156, 21508, 21509, 21521, 21524, 21525, 21526, + 21528, 21529, 21537, 21541, 21544, 21546, 21569, 21572, 21573, 21574, 21577, 21578, 21584, 21585, 21588, 21589, + 21590, 21592, 21593, 21594, 21601, 21602, 21604, 21605, 21606, 21609, 21632, 21640, 21642, 21649, 21652, 21653, + 21654, 21657, 21665, 21668, 21669, 21674, 21761, 21762, 21764, 21765, 21766, 21769, 21776, 21777, 21778, 21780, + 21781, 21782, 21785, 21786, 21793, 21796, 21797, 21798, 21801, 21824, 21825, 21826, 21828, 21829, 21830, 21832, + 21833, 21840, 21841, 21842, 21844, 21845, 21846, 21848, 21849, 21850, 21856, 21857, 21860, 21861, 21862, 21864, + 21865, 21866, 21889, 21892, 21893, 21897, 21898, 21904, 21905, 21908, 21909, 21910, 21912, 21913, 21921, 21924, + 21925, 21926, 21929, 22016, 22017, 22018, 22020, 22022, 22024, 22025, 22033, 22036, 22037, 22040, 22041, 22048, + 22049, 22050, 22052, 22053, 22054, 22056, 22057, 22081, 22085, 22086, 22088, 22089, 22090, 22096, 22097, 22098, + 22100, 22101, 22102, 22104, 22105, 22106, 22113, 22116, 22117, 22121, 22146, 22149, 22150, 22152, 22153, 22154, + 22161, 22165, 22170, 22178, 22181, 22182, 22184, 22185, 22532, 22533, 22534, 22537, 22544, 22549, 22552, 22561, + 22570, 22597, 22600, 22602, 22609, 22612, 22613, 22614, 22616, 22617, 22624, 22626, 22628, 22629, 22658, 22665, + 22672, 22674, 22677, 22680, 22689, 22697, 22785, 22786, 22789, 22794, 22801, 22804, 22805, 22806, 22809, 22821, + 22849, 22852, 22853, 22854, 22857, 22864, 22865, 22866, 22868, 22869, 22870, 22872, 22873, 22874, 22881, 22884, + 22885, 22886, 22889, 22913, 22917, 22921, 22929, 22932, 22933, 22934, 22936, 22937, 22949, 23044, 23048, 23061, + 23066, 23072, 23077, 23078, 23081, 23109, 23112, 23113, 23121, 23125, 23126, 23128, 23129, 23138, 23141, 23144, + 23146, 23169, 23178, 23186, 23189, 23190, 23192, 23194, 23201, 24581, 24596, 24598, 24601, 24613, 24644, 24656, + 24661, 24662, 24664, 24666, 24673, 24676, 24678, 24681, 24705, 24726, 24741, 24833, 24836, 24838, 24841, 24850, + 24853, 24865, 24866, 24870, 24873, 24901, 24905, 24913, 24917, 24918, 24921, 24933, 24934, 24938, 24964, 24970, + 24978, 24981, 24993, 24998, 25001, 25105, 25110, 25113, 25152, 25153, 25158, 25173, 25174, 25176, 25184, 25221, + 25233, 25238, 25253, 25617, 25618, 25621, 25622, 25626, 25633, 25638, 25641, 25664, 25666, 25669, 25672, 25674, + 25681, 25684, 25685, 25686, 25689, 25690, 25696, 25698, 25701, 25732, 25733, 25737, 25744, 25746, 25748, 25749, + 25750, 25752, 25754, 25761, 25764, 25769, 25861, 25864, 25866, 25873, 25877, 25878, 25881, 25924, 25925, 25926, + 25929, 25936, 25937, 25940, 25941, 25942, 25945, 25953, 25956, 25957, 25958, 25961, 25990, 25993, 25994, 26001, + 26005, 26006, 26009, 26010, 26018, 26021, 26022, 26024, 26114, 26121, 26133, 26144, 26150, 26152, 26153, 26176, + 26181, 26184, 26186, 26193, 26196, 26197, 26198, 26200, 26202, 26208, 26213, 26216, 26240, 26242, 26245, 26250, + 26260, 26262, 26264, 26265, 26272, 26276, 26278, 26282, 26646, 26649, 26661, 26689, 26706, 26709, 26714, 26721, + 26729, 26757, 26769, 26776, 26790, 26881, 26884, 26896, 26901, 26913, 26916, 26918, 26921, 26944, 26945, 26949, + 26950, 26952, 26961, 26964, 26965, 26966, 26969, 26976, 26981, 26986, 27010, 27012, 27018, 27029, 27041, 27044, + 27045, 27049, 27153, 27158, 27160, 27201, 27204, 27209, 27216, 27221, 27224, 27226, 27236, 27237, 27241, 27270, + 27284, 27288, 27290, 27302, 32768, 32770, 32776, 32778, 32800, 32802, 32808, 32810, 32837, 32848, 32849, 32852, + 32854, 32857, 32869, 32896, 32898, 32904, 32906, 32917, 32928, 32930, 32936, 32938, 33029, 33041, 33044, 33046, + 33049, 33061, 33089, 33092, 33097, 33104, 33106, 33109, 33110, 33112, 33113, 33124, 33126, 33129, 33157, 33161, + 33172, 33174, 33177, 33189, 33280, 33282, 33288, 33290, 33301, 33312, 33314, 33320, 33322, 33361, 33364, 33369, + 33381, 33408, 33410, 33416, 33418, 33429, 33440, 33442, 33448, 33450, 33812, 33817, 33857, 33860, 33873, 33877, + 33882, 33889, 33892, 33897, 33940, 33945, 34049, 34057, 34066, 34069, 34074, 34086, 34089, 34112, 34113, 34117, + 34120, 34129, 34132, 34133, 34134, 34137, 34138, 34149, 34150, 34152, 34154, 34177, 34180, 34182, 34185, 34192, + 34194, 34197, 34200, 34214, 34321, 34326, 34329, 34341, 34369, 34372, 34377, 34378, 34384, 34389, 34393, 34394, + 34401, 34406, 34410, 34437, 34449, 34458, 34468, 34816, 34818, 34824, 34826, 34837, 34848, 34850, 34856, 34858, + 34881, 34885, 34897, 34900, 34905, 34917, 34921, 34944, 34946, 34952, 34954, 34965, 34976, 34978, 34984, 34986, + 35077, 35078, 35089, 35092, 35094, 35109, 35137, 35140, 35142, 35145, 35152, 35154, 35157, 35162, 35169, 35172, + 35205, 35222, 35225, 35237, 35328, 35330, 35336, 35338, 35349, 35360, 35362, 35368, 35370, 35397, 35409, 35412, + 35414, 35456, 35458, 35464, 35466, 35477, 35488, 35490, 35496, 35498, 36869, 36881, 36886, 36888, 36889, 36901, + 36929, 36934, 36937, 36949, 36952, 36954, 36969, 36970, 36997, 37009, 37012, 37014, 37017, 37029, 37121, 37124, + 37126, 37129, 37136, 37141, 37144, 37146, 37153, 37156, 37158, 37161, 37184, 37189, 37200, 37201, 37204, 37205, + 37206, 37209, 37218, 37221, 37252, 37254, 37266, 37269, 37272, 37281, 37284, 37286, 37289, 37381, 37393, 37396, + 37401, 37413, 37444, 37446, 37449, 37456, 37458, 37461, 37464, 37478, 37481, 37509, 37524, 37526, 37545, 37889, + 37892, 37894, 37904, 37909, 37912, 37926, 37952, 37962, 37969, 37972, 37973, 37974, 37976, 37977, 37984, 37985, + 37986, 37989, 38020, 38022, 38034, 38036, 38037, 38040, 38049, 38057, 38144, 38149, 38152, 38154, 38160, 38161, + 38164, 38165, 38166, 38169, 38177, 38181, 38185, 38186, 38209, 38212, 38213, 38214, 38217, 38224, 38225, 38226, + 38228, 38229, 38230, 38232, 38233, 38234, 38241, 38244, 38245, 38246, 38249, 38273, 38277, 38280, 38289, 38290, + 38292, 38293, 38294, 38297, 38298, 38304, 38306, 38309, 38312, 38314, 38401, 38404, 38416, 38421, 38425, 38432, + 38438, 38441, 38469, 38472, 38473, 38481, 38482, 38485, 38486, 38489, 38501, 38504, 38530, 38532, 38537, 38538, + 38546, 38548, 38549, 38564, 38566, 38569, 38917, 38934, 38937, 38949, 38977, 38982, 38992, 38994, 38997, 38998, + 39002, 39012, 39013, 39045, 39057, 39062, 39065, 39077, 39172, 39174, 39177, 39184, 39186, 39189, 39192, 39194, + 39200, 39201, 39204, 39206, 39232, 39234, 39237, 39240, 39242, 39249, 39252, 39253, 39254, 39257, 39266, 39269, + 39270, 39274, 39297, 39300, 39312, 39314, 39317, 39322, 39329, 39334, 39429, 39445, 39461, 39492, 39494, 39497, + 39504, 39509, 39512, 39521, 39557, 39569, 39572, 39573, 39574, 40960, 40962, 40968, 40970, 40981, 40992, 40994, + 41000, 41002, 41029, 41041, 41044, 41046, 41049, 41088, 41090, 41096, 41098, 41109, 41120, 41122, 41128, 41130, + 41221, 41225, 41233, 41236, 41238, 41241, 41242, 41286, 41289, 41297, 41301, 41304, 41306, 41313, 41316, 41349, + 41360, 41362, 41366, 41369, 41474, 41480, 41482, 41488, 41497, 41506, 41512, 41514, 41541, 41553, 41558, 41561, + 41573, 41600, 41602, 41608, 41610, 41621, 41632, 41634, 41640, 41642, 42009, 42021, 42049, 42052, 42064, 42068, + 42069, 42072, 42074, 42081, 42085, 42086, 42088, 42089, 42117, 42246, 42249, 42256, 42258, 42261, 42264, 42278, + 42281, 42306, 42309, 42321, 42324, 42325, 42326, 42329, 42341, 42346, 42369, 42372, 42373, 42374, 42377, 42386, + 42389, 42392, 42501, 42513, 42518, 42522, 42529, 42533, 42564, 42566, 42570, 42578, 42581, 42582, 42584, 42592, + 42594, 42630, 42640, 42645, 42646, 42649, 42657, 42660, 42662, 43008, 43010, 43016, 43018, 43040, 43042, 43048, + 43050, 43089, 43092, 43094, 43097, 43136, 43138, 43144, 43146, 43157, 43168, 43170, 43176, 43178, 43269, 43284, + 43289, 43297, 43301, 43329, 43344, 43349, 43354, 43361, 43366, 43369, 43408, 43414, 43520, 43522, 43528, 43530, + 43552, 43554, 43560, 43562, 43601, 43604, 43606, 43648, 43650, 43656, 43658, 43669, 43680, 43682, 43688, 43690, + }; + static const uint16_t kgrid_2bit_1024[1024] = { + 0, 2, 5, 8, 10, 17, 20, 22, 25, 32, 34, 37, 40, 65, 68, 70, + 73, 80, 82, 85, 88, 97, 100, 102, 105, 128, 130, 133, 136, 145, 148, 160, + 165, 170, 257, 260, 262, 265, 272, 274, 277, 280, 289, 292, 320, 322, 325, 328, + 337, 340, 342, 345, 352, 357, 360, 385, 388, 400, 402, 405, 417, 420, 512, 514, + 517, 520, 529, 532, 544, 554, 577, 580, 582, 585, 592, 597, 640, 645, 650, 660, + 674, 1025, 1028, 1030, 1033, 1040, 1042, 1045, 1048, 1057, 1060, 1062, 1065, 1088, 1090, 1093, + 1096, 1098, 1105, 1108, 1110, 1113, 1120, 1122, 1125, 1153, 1156, 1158, 1161, 1168, 1173, 1176, + 1185, 1188, 1280, 1282, 1285, 1288, 1290, 1297, 1300, 1302, 1305, 1312, 1317, 1320, 1345, 1348, + 1350, 1353, 1360, 1362, 1365, 1368, 1377, 1380, 1408, 1410, 1413, 1416, 1425, 1428, 1440, 1537, + 1540, 1542, 1545, 1552, 1557, 1600, 1605, 1608, 1617, 1620, 1632, 1665, 1668, 1680, 2048, 2050, + 2053, 2056, 2065, 2068, 2070, 2073, 2080, 2085, 2090, 2113, 2116, 2118, 2121, 2128, 2130, 2133, + 2136, 2145, 2148, 2176, 2181, 2196, 2218, 2305, 2308, 2320, 2322, 2325, 2328, 2337, 2368, 2373, + 2376, 2385, 2388, 2400, 2433, 2448, 2560, 2577, 2580, 2594, 2600, 2602, 2640, 2713, 4097, 4100, + 4102, 4105, 4112, 4114, 4117, 4120, 4129, 4132, 4134, 4160, 4162, 4165, 4168, 4177, 4180, 4182, + 4185, 4192, 4194, 4197, 4200, 4225, 4228, 4230, 4240, 4245, 4248, 4257, 4260, 4352, 4354, 4357, + 4360, 4362, 4369, 4372, 4374, 4377, 4384, 4386, 4389, 4392, 4417, 4420, 4422, 4425, 4432, 4434, + 4437, 4440, 4449, 4452, 4480, 4482, 4485, 4488, 4497, 4500, 4609, 4612, 4617, 4624, 4629, 4641, + 4644, 4672, 4677, 4689, 4692, 4737, 4740, 4752, 5120, 5122, 5125, 5128, 5137, 5140, 5142, 5145, + 5152, 5157, 5160, 5185, 5188, 5190, 5193, 5200, 5202, 5205, 5208, 5217, 5220, 5248, 5250, 5253, + 5256, 5265, 5268, 5280, 5377, 5380, 5382, 5385, 5392, 5394, 5397, 5400, 5409, 5412, 5440, 5442, + 5445, 5448, 5457, 5460, 5472, 5505, 5508, 5520, 5632, 5637, 5640, 5649, 5652, 5664, 5697, 5700, + 5712, 5760, 5802, 6145, 6148, 6150, 6153, 6160, 6165, 6168, 6177, 6208, 6210, 6213, 6216, 6225, + 6228, 6240, 6273, 6276, 6400, 6402, 6405, 6408, 6417, 6420, 6432, 6465, 6468, 6480, 6505, 6562, + 6660, 6672, 6720, 6742, 8192, 8194, 8197, 8200, 8209, 8212, 8214, 8217, 8224, 8229, 8234, 8257, + 8260, 8272, 8274, 8277, 8292, 8320, 8330, 8340, 8362, 8449, 8452, 8464, 8466, 8469, 8481, 8512, + 8514, 8517, 8529, 8532, 8544, 8577, 8580, 8592, 8704, 8714, 8738, 8744, 8746, 8772, 8784, 8840, + 8842, 8872, 9217, 9220, 9222, 9225, 9232, 9237, 9240, 9249, 9252, 9280, 9282, 9285, 9288, 9297, + 9300, 9312, 9345, 9348, 9360, 9472, 9477, 9480, 9489, 9492, 9504, 9537, 9540, 9552, 9574, 9600, + 9729, 9732, 9744, 9792, 9817, 10240, 10245, 10257, 10260, 10305, 10308, 10320, 10378, 10410, 10497, 10500, + 10512, 10645, 10762, 10786, 10852, 10888, 10890, 16385, 16388, 16390, 16393, 16400, 16402, 16405, 16408, 16410, + 16417, 16420, 16422, 16448, 16450, 16453, 16456, 16458, 16465, 16468, 16470, 16473, 16480, 16482, 16485, 16513, + 16516, 16528, 16533, 16536, 16545, 16548, 16640, 16642, 16645, 16648, 16657, 16660, 16662, 16665, 16672, 16674, + 16677, 16705, 16708, 16710, 16713, 16720, 16722, 16725, 16728, 16737, 16740, 16768, 16770, 16773, 16776, 16785, + 16788, 16800, 16897, 16900, 16912, 16914, 16917, 16920, 16932, 16960, 16965, 16968, 16977, 16980, 16992, 17025, + 17028, 17408, 17410, 17413, 17416, 17418, 17425, 17428, 17430, 17433, 17440, 17442, 17445, 17448, 17473, 17476, + 17478, 17481, 17488, 17490, 17493, 17496, 17505, 17508, 17536, 17538, 17541, 17544, 17553, 17556, 17568, 17665, + 17668, 17670, 17673, 17680, 17682, 17685, 17688, 17697, 17700, 17728, 17730, 17733, 17736, 17745, 17748, 17760, + 17770, 17793, 17796, 17808, 17920, 17922, 17925, 17928, 17937, 17940, 17952, 17985, 17988, 18000, 18048, 18085, + 18433, 18436, 18441, 18448, 18450, 18453, 18456, 18465, 18468, 18496, 18498, 18501, 18504, 18513, 18516, 18528, + 18564, 18576, 18688, 18690, 18693, 18696, 18705, 18708, 18720, 18753, 18756, 18768, 18816, 18838, 18945, 18948, + 18960, 19008, 20480, 20482, 20485, 20488, 20497, 20500, 20502, 20505, 20512, 20514, 20517, 20520, 20545, 20548, + 20550, 20553, 20560, 20562, 20565, 20568, 20577, 20580, 20608, 20610, 20613, 20616, 20625, 20628, 20737, 20740, + 20742, 20745, 20752, 20754, 20757, 20760, 20769, 20772, 20800, 20802, 20805, 20808, 20817, 20820, 20832, 20865, + 20868, 20880, 20992, 20997, 21000, 21009, 21012, 21024, 21057, 21060, 21072, 21097, 21120, 21505, 21508, 21510, + 21513, 21520, 21522, 21525, 21528, 21537, 21540, 21568, 21570, 21573, 21576, 21585, 21588, 21600, 21633, 21636, + 21648, 21760, 21762, 21765, 21768, 21777, 21780, 21792, 21825, 21828, 21840, 21888, 22017, 22020, 22032, 22054, + 22080, 22528, 22530, 22533, 22536, 22545, 22548, 22560, 22593, 22596, 22608, 22618, 22656, 22785, 22788, 22800, + 22848, 23040, 23065, 23173, 23208, 24577, 24580, 24582, 24592, 24594, 24597, 24600, 24609, 24612, 24640, 24645, + 24648, 24657, 24660, 24672, 24708, 24720, 24832, 24834, 24837, 24840, 24849, 24852, 24864, 24897, 24900, 24912, + 24960, 24985, 25092, 25104, 25152, 25174, 25249, 25600, 25605, 25608, 25617, 25620, 25632, 25665, 25668, 25680, + 25728, 25857, 25860, 25872, 25920, 25930, 25960, 26002, 26112, 26260, 26625, 26628, 26640, 26725, 26776, 26880, + 26922, 27202, 27297, 32768, 32770, 32773, 32776, 32785, 32788, 32793, 32800, 32805, 32833, 32836, 32848, 32850, + 32853, 32856, 32865, 32896, 32901, 32913, 32916, 33025, 33028, 33033, 33040, 33042, 33045, 33048, 33057, 33060, + 33088, 33090, 33093, 33096, 33105, 33108, 33153, 33156, 33168, 33193, 33280, 33285, 33290, 33297, 33300, 33345, + 33348, 33360, 33793, 33796, 33798, 33801, 33808, 33810, 33813, 33816, 33825, 33856, 33858, 33861, 33864, 33873, + 33876, 33888, 33921, 33924, 33936, 34048, 34050, 34053, 34056, 34065, 34068, 34080, 34113, 34116, 34128, 34176, + 34186, 34305, 34308, 34320, 34345, 34368, 34816, 34821, 34833, 34836, 34881, 34884, 34896, 34978, 35073, 35076, + 35136, 35173, 35362, 35416, 35418, 35458, 35490, 36865, 36868, 36873, 36880, 36882, 36885, 36888, 36900, 36928, + 36930, 36933, 36936, 36945, 36948, 36960, 36993, 36996, 37008, 37120, 37125, 37137, 37140, 37185, 37188, 37200, + 37210, 37377, 37380, 37392, 37440, 37542, 37888, 37890, 37893, 37896, 37905, 37908, 37920, 37953, 37956, 37968, + 38016, 38038, 38145, 38148, 38160, 38208, 38296, 38305, 38400, 38470, 38500, 38913, 38916, 38928, 38950, 38976, + 39081, 39168, 39241, 39250, 39568, 40960, 40965, 40970, 40980, 40994, 41002, 41025, 41028, 41040, 41122, 41130, + 41280, 41317, 41474, 41482, 41506, 41512, 41514, 41602, 41608, 41610, 41640, 41985, 41988, 42000, 42048, 42121, + 42148, 42240, 42265, 42577, 43018, 43048, 43170, 43348, 43398, 43528, 43530, 43552, 43554, 43560, 43656, 43690, + }; + + const int kmap_size = 43692; + //const int nwant = type == GGML_TYPE_IQ1_S ? 3 : 2; + const int nwant = type == GGML_TYPE_IQ1_S || type == GGML_TYPE_IQ1_M ? 3 : type == GGML_TYPE_IQ2_S ? 1 : 2; + const uint16_t * kgrid = type == GGML_TYPE_IQ2_XXS ? kgrid_2bit_256 : + type == GGML_TYPE_IQ2_XS ? kgrid_2bit_512 : + type == GGML_TYPE_IQ1_S || type == GGML_TYPE_IQ1_M ? kgrid_1bit_2048 : kgrid_2bit_1024; + uint64_t * kgrid_q2xs; + int * kmap_q2xs; + uint16_t * kneighbors_q2xs; + + //printf("================================================================= %s(grid_size = %d)\n", __func__, grid_size); + uint64_t * the_grid = (uint64_t *)malloc(grid_size*sizeof(uint64_t)); + for (int k = 0; k < grid_size; ++k) { + int8_t * pos = (int8_t *)(the_grid + k); + for (int i = 0; i < 8; ++i) { + int l = (kgrid[k] >> 2*i) & 0x3; + pos[i] = 2*l + 1; + } + } + kgrid_q2xs = the_grid; + iq2_data[gindex].grid = the_grid; + kmap_q2xs = (int *)malloc(kmap_size*sizeof(int)); + iq2_data[gindex].map = kmap_q2xs; + for (int i = 0; i < kmap_size; ++i) kmap_q2xs[i] = -1; + uint64_t aux64; + uint8_t * aux8 = (uint8_t *)&aux64; + for (int i = 0; i < grid_size; ++i) { + aux64 = kgrid_q2xs[i]; + uint16_t index = 0; + for (int k=0; k<8; ++k) { + uint16_t q = (aux8[k] - 1)/2; + index |= (q << 2*k); + } + kmap_q2xs[index] = i; + } + int8_t pos[8]; + int * dist2 = (int *)malloc(2*grid_size*sizeof(int)); + int num_neighbors = 0, num_not_in_map = 0; + for (int i = 0; i < kmap_size; ++i) { + if (kmap_q2xs[i] >= 0) continue; + ++num_not_in_map; + for (int k = 0; k < 8; ++k) { + int l = (i >> 2*k) & 0x3; + pos[k] = 2*l + 1; + } + for (int j = 0; j < grid_size; ++j) { + const int8_t * pg = (const int8_t *)(kgrid_q2xs + j); + int d2 = 0; + for (int k = 0; k < 8; ++k) d2 += (pg[k] - pos[k])*(pg[k] - pos[k]); + dist2[2*j+0] = d2; + dist2[2*j+1] = j; + } + qsort(dist2, grid_size, 2*sizeof(int), iq2_compare_func); + int n = 0; int d2 = dist2[0]; + int nhave = 1; + for (int j = 0; j < grid_size; ++j) { + if (dist2[2*j] > d2) { + if (nhave == nwant) break; + d2 = dist2[2*j]; + ++nhave; + } + ++n; + } + num_neighbors += n; + } + //printf("%s: %d neighbours in total\n", __func__, num_neighbors); + kneighbors_q2xs = (uint16_t *)malloc((num_neighbors + num_not_in_map)*sizeof(uint16_t)); + iq2_data[gindex].neighbours = kneighbors_q2xs; + int counter = 0; + for (int i = 0; i < kmap_size; ++i) { + if (kmap_q2xs[i] >= 0) continue; + for (int k = 0; k < 8; ++k) { + int l = (i >> 2*k) & 0x3; + pos[k] = 2*l + 1; + } + for (int j = 0; j < grid_size; ++j) { + const int8_t * pg = (const int8_t *)(kgrid_q2xs + j); + int d2 = 0; + for (int k = 0; k < 8; ++k) d2 += (pg[k] - pos[k])*(pg[k] - pos[k]); + dist2[2*j+0] = d2; + dist2[2*j+1] = j; + } + qsort(dist2, grid_size, 2*sizeof(int), iq2_compare_func); + kmap_q2xs[i] = -(counter + 1); + int d2 = dist2[0]; + uint16_t * start = &kneighbors_q2xs[counter++]; + int n = 0, nhave = 1; + for (int j = 0; j < grid_size; ++j) { + if (dist2[2*j] > d2) { + if (nhave == nwant) break; + d2 = dist2[2*j]; + ++nhave; + } + kneighbors_q2xs[counter++] = dist2[2*j+1]; + ++n; + } + *start = n; + } + free(dist2); +} + +void iq2xs_free_impl(enum ggml_type type) { + GGML_ASSERT(type == GGML_TYPE_IQ2_XXS || type == GGML_TYPE_IQ2_XS || type == GGML_TYPE_IQ1_S || type == GGML_TYPE_IQ1_M || type == GGML_TYPE_IQ2_S); + const int gindex = iq2_data_index(type); + if (iq2_data[gindex].grid) { + free(iq2_data[gindex].grid); iq2_data[gindex].grid = NULL; + free(iq2_data[gindex].map); iq2_data[gindex].map = NULL; + free(iq2_data[gindex].neighbours); iq2_data[gindex].neighbours = NULL; + } +} + +static int iq2_find_best_neighbour(const uint16_t * restrict neighbours, const uint64_t * restrict grid, + const float * restrict xval, const float * restrict weight, float scale, int8_t * restrict L) { + int num_neighbors = neighbours[0]; + GGML_ASSERT(num_neighbors > 0); + float best_d2 = FLT_MAX; + int grid_index = -1; + for (int j = 1; j <= num_neighbors; ++j) { + const int8_t * pg = (const int8_t *)(grid + neighbours[j]); + float d2 = 0; + for (int i = 0; i < 8; ++i) { + float q = pg[i]; + float diff = scale*q - xval[i]; + d2 += weight[i]*diff*diff; + } + if (d2 < best_d2) { + best_d2 = d2; grid_index = neighbours[j]; + } + } + GGML_ASSERT(grid_index >= 0); + const int8_t * pg = (const int8_t *)(grid + grid_index); + for (int i = 0; i < 8; ++i) L[i] = (pg[i] - 1)/2; + return grid_index; +} + +static void quantize_row_iq2_xxs_impl(const float * restrict x, void * restrict vy, int64_t n, const float * restrict quant_weights) { + + const int gindex = iq2_data_index(GGML_TYPE_IQ2_XXS); + + const uint64_t * kgrid_q2xs = iq2_data[gindex].grid; + const int * kmap_q2xs = iq2_data[gindex].map; + const uint16_t * kneighbors_q2xs = iq2_data[gindex].neighbours; + + GGML_ASSERT(quant_weights && "missing quantization weights"); + GGML_ASSERT(kgrid_q2xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(kmap_q2xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(kneighbors_q2xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(n%QK_K == 0); + + const int kMaxQ = 3; + + const int64_t nbl = n/QK_K; + + block_iq2_xxs * y = vy; + + float scales[QK_K/32]; + float weight[32]; + float xval[32]; + int8_t L[32]; + int8_t Laux[32]; + float waux[32]; + uint8_t block_signs[4]; + uint32_t q2[2*(QK_K/32)]; + + for (int ibl = 0; ibl < nbl; ++ibl) { + + y[ibl].d = GGML_FP32_TO_FP16(0.f); + memset(q2, 0, QK_K/4); + + float max_scale = 0; + + const float * xbl = x + QK_K*ibl; + float sumx2 = 0; + for (int i = 0; i < QK_K; ++i) sumx2 += xbl[i]*xbl[i]; + float sigma2 = sumx2/QK_K; + + for (int ib = 0; ib < QK_K/32; ++ib) { + const float * xb = xbl + 32*ib; + const float * qw = quant_weights + QK_K*ibl + 32*ib; + for (int i = 0; i < 32; ++i) weight[i] = qw[i] * sqrtf(sigma2 + xb[i]*xb[i]); + for (int i = 0; i < 32; ++i) waux[i] = sqrtf(weight[i]); + for (int k = 0; k < 4; ++k) { + int nflip = 0; + uint8_t s = 0; + for (int i = 0; i < 8; ++i) { + if (xb[8*k + i] >= 0) xval[8*k + i] = xb[8*k + i]; + else { + xval[8*k + i] = -xb[8*k + i]; ++nflip; s |= (1 << i); + } + } + if (nflip%2) { + int imin = 0; float min = weight[8*k+imin]*xb[8*k+imin]*xb[8*k+imin]; + for (int i = 1; i < 8; ++i) { + float ax = weight[8*k+i]*xb[8*k+i]*xb[8*k+i]; + if (ax < min) { + min = ax; imin = i; + } + } + xval[8*k+imin] = -xval[8*k+imin]; + s ^= (1 << imin); + } + block_signs[k] = s & 127; + } + float max = xval[0]; + for (int i = 1; i < 32; ++i) max = MAX(max, xval[i]); + if (max < GROUP_MAX_EPS) { + scales[ib] = 0; + memset(L, 0, 32); + continue; + } + float scale = make_qp_quants(32, kMaxQ+1, xval, (uint8_t*)L, weight); + float eff_max = scale*kMaxQ; + float best = 0; + for (int is = -6; is <= 6; ++is) { + float id = (2*kMaxQ-1+is*0.1f)/eff_max; + float this_scale = 1/id; + for (int k = 0; k < 4; ++k) { + for (int i = 0; i < 8; ++i) { + int l = nearest_int(0.5f*(id*xval[8*k+i]-1)); + Laux[8*k+i] = MAX(0, MIN(kMaxQ-1, l)); + } + uint16_t u = 0; + for (int i = 0; i < 8; ++i) u |= (Laux[8*k+i] << 2*i); + int grid_index = kmap_q2xs[u]; + if (grid_index < 0) { + const uint16_t * neighbours = kneighbors_q2xs - kmap_q2xs[u] - 1; + grid_index = iq2_find_best_neighbour(neighbours, kgrid_q2xs, xval + 8*k, waux + 8*k, this_scale, Laux + 8*k); + } + } + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < 32; ++i) { + float w = weight[i]; + float q = 2*Laux[i] + 1; + sumqx += w*xval[i]*q; + sumq2 += w*q*q; + } + if (sumq2 > 0 && sumqx*sumqx > best*sumq2) { + scale = sumqx/sumq2; best = scale*sumqx; + memcpy(L, Laux, 32); + } + } + if (scale > 0) { + float id = 1/scale; + for (int k = 0; k < 4; ++k) { + uint16_t u = 0; + for (int i = 0; i < 8; ++i) { + int l = nearest_int(0.5f*(id*xval[8*k+i]-1)); + l = MAX(0, MIN(kMaxQ-1, l)); + u |= (l << 2*i); + } + int grid_index = kmap_q2xs[u]; + if (grid_index < 0) { + const uint16_t * neighbours = kneighbors_q2xs - kmap_q2xs[u] - 1; + grid_index = iq2_find_best_neighbour(neighbours, kgrid_q2xs, xval + 8*k, waux + 8*k, scale, L + 8*k); + } + const int8_t * pg = (const int8_t *)(kgrid_q2xs + grid_index); + for (int i = 0; i < 8; ++i) L[8*k+i] = (pg[i] - 1)/2; + } + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < 32; ++i) { + float w = weight[i]; + float q = 2*L[i] + 1; + sumqx += w*xval[i]*q; + sumq2 += w*q*q; + } + if (sumq2 > 0) scale = sumqx/sumq2; + } + if (scale < 0) { + // This should never happen, but just in case, flip scale so that it is positive (we use uint's to encode the scale) + // and correspondingly flip quant signs. + scale = -scale; + for (int k = 0; k < 4; ++k) block_signs[k] = (~block_signs[k]) & 127; + } + for (int k = 0; k < 4; ++k) { + uint16_t u = 0; + for (int i = 0; i < 8; ++i) u |= (L[8*k+i] << 2*i); + int grid_index = kmap_q2xs[u]; + if (grid_index < 0) { + printf("Oops: found point %u not on grid:", u); + for (int i = 0; i < 8; ++i) printf(" %d", L[8*k+i]); + printf("\n"); + GGML_ABORT("fatal error"); + } + q2[2*ib+0] |= ((uint32_t) grid_index << 8*k); + q2[2*ib+1] |= (block_signs[k] << 7*k); + } + GGML_ASSERT(scale >= 0); + scales[ib] = scale; + max_scale = MAX(max_scale, scale); + } + + if (!max_scale) { + memset(y[ibl].qs, 0, QK_K/4); + continue; + } + + float d = max_scale/31; + y[ibl].d = GGML_FP32_TO_FP16(d); + float id = 1/d; + for (int ib = 0; ib < QK_K/32; ++ib) { + int l = nearest_int(0.5f*(id*scales[ib]-1)); + l = MAX(0, MIN(15, l)); + q2[2*ib+1] |= ((uint32_t)l << 28); + } + memcpy(y[ibl].qs, q2, QK_K/4); + } +} + +static void quantize_row_iq2_xs_impl(const float * restrict x, void * restrict vy, int64_t n, const float * restrict quant_weights) { + + const int gindex = iq2_data_index(GGML_TYPE_IQ2_XS); + + const uint64_t * kgrid_q2xs = iq2_data[gindex].grid; + const int * kmap_q2xs = iq2_data[gindex].map; + const uint16_t * kneighbors_q2xs = iq2_data[gindex].neighbours; + + GGML_ASSERT(quant_weights && "missing quantization weights"); + GGML_ASSERT(kmap_q2xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(kgrid_q2xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(kneighbors_q2xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(n%QK_K == 0); + + const int kMaxQ = 3; + + const int64_t nbl = n/QK_K; + + block_iq2_xs * y = vy; + + float scales[QK_K/16]; + float weight[16]; + float xval[16]; + int8_t L[16]; + int8_t Laux[16]; + float waux[16]; + bool is_on_grid[2]; + bool is_on_grid_aux[2]; + uint8_t block_signs[2]; + uint16_t q2[2*(QK_K/16)]; + + for (int ibl = 0; ibl < nbl; ++ibl) { + + y[ibl].d = GGML_FP32_TO_FP16(0.f); + memset(q2, 0, QK_K/4); + memset(y[ibl].scales, 0, QK_K/32); + + float max_scale = 0; + + const float * xbl = x + QK_K*ibl; + float sumx2 = 0; + for (int i = 0; i < QK_K; ++i) sumx2 += xbl[i]*xbl[i]; + float sigma2 = sumx2/QK_K; + + for (int ib = 0; ib < QK_K/16; ++ib) { + const float * xb = xbl + 16*ib; + const float * qw = quant_weights + QK_K*ibl + 16*ib; + for (int i = 0; i < 16; ++i) weight[i] = qw[i] * sqrtf(sigma2 + xb[i]*xb[i]); + for (int i = 0; i < 16; ++i) waux[i] = sqrtf(weight[i]); + for (int k = 0; k < 2; ++k) { + int nflip = 0; + uint8_t s = 0; + for (int i = 0; i < 8; ++i) { + if (xb[8*k + i] >= 0) xval[8*k + i] = xb[8*k + i]; + else { + xval[8*k + i] = -xb[8*k + i]; ++nflip; s |= (1 << i); + } + } + if (nflip%2) { + int imin = 0; float min = weight[8*k+imin]*xb[8*k+imin]*xb[8*k+imin]; + for (int i = 1; i < 8; ++i) { + float ax = weight[8*k+i]*xb[8*k+i]*xb[8*k+i]; + if (ax < min) { + min = ax; imin = i; + } + } + xval[8*k+imin] = -xval[8*k+imin]; + s ^= (1 << imin); + } + block_signs[k] = s & 127; + } + float max = xval[0]; + for (int i = 1; i < 16; ++i) max = MAX(max, xval[i]); + if (max < GROUP_MAX_EPS) { + scales[ib] = 0; + memset(L, 0, 16); + continue; + } + float best = 0; + float scale = max/(2*kMaxQ-1); + is_on_grid[0] = is_on_grid[1] = true; + for (int is = -9; is <= 9; ++is) { + float id = (2*kMaxQ-1+is*0.1f)/max; + float this_scale = 1/id; + for (int k = 0; k < 2; ++k) { + for (int i = 0; i < 8; ++i) { + int l = nearest_int(0.5f*(id*xval[8*k+i]-1)); + Laux[8*k+i] = MAX(0, MIN(kMaxQ-1, l)); + } + uint16_t u = 0; + for (int i = 0; i < 8; ++i) u |= (Laux[8*k+i] << 2*i); + int grid_index = kmap_q2xs[u]; + is_on_grid_aux[k] = true; + if (grid_index < 0) { + is_on_grid_aux[k] = false; + const uint16_t * neighbours = kneighbors_q2xs - kmap_q2xs[u] - 1; + grid_index = iq2_find_best_neighbour(neighbours, kgrid_q2xs, xval + 8*k, waux + 8*k, this_scale, Laux + 8*k); + } + } + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < 16; ++i) { + float w = weight[i]; + float q = 2*Laux[i] + 1; + sumqx += w*xval[i]*q; + sumq2 += w*q*q; + } + if (sumq2 > 0 && sumqx*sumqx > best*sumq2) { + scale = sumqx/sumq2; best = scale*sumqx; + for (int i = 0; i < 16; ++i) L[i] = Laux[i]; + for (int k = 0; k < 2; ++k) is_on_grid[k] = is_on_grid_aux[k]; + } + } + int n_not_ongrid = 0; + for (int k = 0; k < 2; ++k) if (!is_on_grid[k]) ++n_not_ongrid; + if (n_not_ongrid > 0 && scale > 0) { + float id = 1/scale; + for (int k = 0; k < 2; ++k) { + if (is_on_grid[k]) continue; + uint16_t u = 0; + for (int i = 0; i < 8; ++i) { + int l = nearest_int(0.5f*(id*xval[8*k+i]-1)); + l = MAX(0, MIN(kMaxQ-1, l)); + u |= (l << 2*i); + L[8*k + i] = l; + } + int grid_index = kmap_q2xs[u]; + if (grid_index < 0) { + const uint16_t * neighbours = kneighbors_q2xs - kmap_q2xs[u] - 1; + grid_index = iq2_find_best_neighbour(neighbours, kgrid_q2xs, xval + 8*k, waux + 8*k, scale, L + 8*k); + } + } + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < 16; ++i) { + float w = weight[i]; + float q = 2*L[i] + 1; + sumqx += w*xval[i]*q; + sumq2 += w*q*q; + } + if (sumq2 > 0) scale = sumqx/sumq2; + } + if (scale < 0) { + scale = -scale; + for (int k = 0; k < 2; ++k) block_signs[k] = (~block_signs[k]) & 127; + } + for (int k = 0; k < 2; ++k) { + uint16_t u = 0; + for (int i = 0; i < 8; ++i) u |= (L[8*k+i] << 2*i); + int grid_index = kmap_q2xs[u]; + if (grid_index < 0) { + printf("Oops: found point %u not on grid:", u); + for (int i = 0; i < 8; ++i) printf(" %d", L[8*k+i]); + printf("\n"); + GGML_ABORT("fatal error"); + } + q2[2*ib+k] = grid_index | (block_signs[k] << 9); + } + GGML_ASSERT(scale >= 0); + scales[ib] = scale; + max_scale = MAX(max_scale, scale); + } + + if (!max_scale) { + memset(y[ibl].qs, 0, QK_K/4); + continue; + } + + float d = max_scale/31; + y[ibl].d = GGML_FP32_TO_FP16(d); + float id = 1/d; + for (int ib = 0; ib < QK_K/16; ++ib) { + int l = nearest_int(0.5f*(id*scales[ib]-1)); + l = MAX(0, MIN(15, l)); + if (ib%2 == 0) y[ibl].scales[ib/2] = l; + else y[ibl].scales[ib/2] |= (l << 4); + } + memcpy(y[ibl].qs, q2, QK_K/4); + + } +} + +size_t quantize_iq2_xxs(const float * restrict src, void * restrict dst, int64_t nrow, int64_t n_per_row, const float * quant_weights) { + GGML_ASSERT(n_per_row%QK_K == 0); + int64_t nblock = n_per_row/QK_K; + char * qrow = (char *)dst; + for (int64_t row = 0; row < nrow; ++row) { + quantize_row_iq2_xxs_impl(src, qrow, n_per_row, quant_weights); + src += n_per_row; + qrow += nblock*sizeof(block_iq2_xxs); + } + return nrow * nblock * sizeof(block_iq2_xxs); +} + +size_t quantize_iq2_xs(const float * restrict src, void * restrict dst, int64_t nrow, int64_t n_per_row, const float * quant_weights) { + GGML_ASSERT(n_per_row%QK_K == 0); + int64_t nblock = n_per_row/QK_K; + char * qrow = (char *)dst; + for (int64_t row = 0; row < nrow; ++row) { + quantize_row_iq2_xs_impl(src, qrow, n_per_row, quant_weights); + src += n_per_row; + qrow += nblock*sizeof(block_iq2_xs); + } + return nrow * nblock * sizeof(block_iq2_xs); +} + +// +// ============================================= 3-bit using D4 lattice +// + +typedef struct { + uint32_t * grid; + int * map; + uint16_t * neighbours; +} iq3_entry_t; + +static iq3_entry_t iq3_data[2] = { + {NULL, NULL, NULL}, + {NULL, NULL, NULL}, +}; + +static inline int iq3_data_index(int grid_size) { + (void)grid_size; + GGML_ASSERT(grid_size == 256 || grid_size == 512); + return grid_size == 256 ? 0 : 1; +} + +static int iq3_compare_func(const void * left, const void * right) { + const int * l = (const int *)left; + const int * r = (const int *)right; + return l[0] < r[0] ? -1 : l[0] > r[0] ? 1 : l[1] < r[1] ? -1 : l[1] > r[1] ? 1 : 0; +} + +void iq3xs_init_impl(int grid_size) { + const int gindex = iq3_data_index(grid_size); + if (iq3_data[gindex].grid) { + return; + } + static const uint16_t kgrid_256[256] = { + 0, 2, 4, 9, 11, 15, 16, 18, 25, 34, 59, 61, 65, 67, 72, 74, + 81, 85, 88, 90, 97, 108, 120, 128, 130, 132, 137, 144, 146, 153, 155, 159, + 169, 175, 189, 193, 199, 200, 202, 213, 248, 267, 287, 292, 303, 315, 317, 321, + 327, 346, 362, 413, 436, 456, 460, 462, 483, 497, 513, 515, 520, 522, 529, 531, + 536, 538, 540, 551, 552, 576, 578, 585, 592, 594, 641, 643, 648, 650, 657, 664, + 698, 704, 706, 720, 729, 742, 758, 769, 773, 808, 848, 852, 870, 889, 901, 978, + 992, 1024, 1026, 1033, 1035, 1040, 1042, 1046, 1049, 1058, 1089, 1091, 1093, 1096, 1098, 1105, + 1112, 1139, 1143, 1144, 1152, 1154, 1161, 1167, 1168, 1170, 1183, 1184, 1197, 1217, 1224, 1228, + 1272, 1276, 1309, 1323, 1347, 1367, 1377, 1404, 1473, 1475, 1486, 1509, 1537, 1544, 1546, 1553, + 1555, 1576, 1589, 1594, 1600, 1602, 1616, 1625, 1636, 1638, 1665, 1667, 1672, 1685, 1706, 1722, + 1737, 1755, 1816, 1831, 1850, 1856, 1862, 1874, 1901, 1932, 1950, 1971, 2011, 2032, 2052, 2063, + 2077, 2079, 2091, 2095, 2172, 2192, 2207, 2208, 2224, 2230, 2247, 2277, 2308, 2345, 2356, 2389, + 2403, 2424, 2501, 2504, 2506, 2520, 2570, 2593, 2616, 2624, 2630, 2646, 2669, 2700, 2714, 2746, + 2754, 2795, 2824, 2835, 2839, 2874, 2882, 2905, 2984, 3028, 3042, 3092, 3108, 3110, 3124, 3153, + 3185, 3215, 3252, 3288, 3294, 3364, 3397, 3434, 3483, 3523, 3537, 3587, 3589, 3591, 3592, 3610, + 3626, 3670, 3680, 3722, 3749, 3754, 3776, 3789, 3803, 3824, 3857, 3873, 3904, 3906, 3924, 3992, + }; + static const uint16_t kgrid_512[512] = { + 0, 1, 2, 5, 7, 8, 9, 10, 12, 14, 16, 17, 21, 27, 32, 34, + 37, 39, 41, 43, 48, 50, 57, 60, 63, 64, 65, 66, 68, 72, 73, 77, + 80, 83, 87, 89, 93, 100, 113, 117, 122, 128, 129, 133, 135, 136, 139, 142, + 145, 149, 152, 156, 162, 165, 167, 169, 171, 184, 187, 195, 201, 205, 208, 210, + 217, 219, 222, 228, 232, 234, 247, 249, 253, 256, 267, 271, 273, 276, 282, 288, + 291, 297, 312, 322, 324, 336, 338, 342, 347, 353, 357, 359, 374, 379, 390, 393, + 395, 409, 426, 441, 448, 450, 452, 464, 466, 470, 475, 488, 492, 512, 513, 514, + 516, 520, 521, 523, 525, 527, 528, 530, 537, 540, 542, 556, 558, 561, 570, 576, + 577, 579, 582, 584, 588, 593, 600, 603, 609, 616, 618, 632, 638, 640, 650, 653, + 655, 656, 660, 666, 672, 675, 685, 688, 698, 705, 708, 711, 712, 715, 721, 727, + 728, 732, 737, 754, 760, 771, 773, 778, 780, 793, 795, 802, 806, 808, 812, 833, + 840, 843, 849, 856, 858, 873, 912, 916, 919, 932, 934, 961, 963, 968, 970, 977, + 989, 993, 1010, 1016, 1024, 1025, 1027, 1029, 1031, 1032, 1034, 1036, 1038, 1041, 1043, 1047, + 1048, 1050, 1057, 1059, 1061, 1064, 1066, 1079, 1080, 1083, 1085, 1088, 1090, 1096, 1099, 1103, + 1106, 1109, 1113, 1116, 1122, 1129, 1153, 1156, 1159, 1169, 1171, 1176, 1183, 1185, 1195, 1199, + 1209, 1212, 1216, 1218, 1221, 1225, 1234, 1236, 1241, 1243, 1250, 1256, 1270, 1281, 1287, 1296, + 1299, 1306, 1309, 1313, 1338, 1341, 1348, 1353, 1362, 1375, 1376, 1387, 1400, 1408, 1410, 1415, + 1425, 1453, 1457, 1477, 1481, 1494, 1496, 1507, 1512, 1538, 1545, 1547, 1549, 1551, 1554, 1561, + 1563, 1565, 1570, 1572, 1575, 1577, 1587, 1593, 1601, 1603, 1605, 1612, 1617, 1619, 1632, 1648, + 1658, 1662, 1664, 1674, 1680, 1690, 1692, 1704, 1729, 1736, 1740, 1745, 1747, 1751, 1752, 1761, + 1763, 1767, 1773, 1787, 1795, 1801, 1806, 1810, 1817, 1834, 1840, 1844, 1857, 1864, 1866, 1877, + 1882, 1892, 1902, 1915, 1934, 1953, 1985, 1987, 2000, 2002, 2013, 2048, 2052, 2058, 2064, 2068, + 2071, 2074, 2081, 2088, 2104, 2114, 2119, 2121, 2123, 2130, 2136, 2141, 2147, 2153, 2157, 2177, + 2179, 2184, 2189, 2193, 2203, 2208, 2223, 2226, 2232, 2244, 2249, 2251, 2256, 2258, 2265, 2269, + 2304, 2306, 2324, 2335, 2336, 2361, 2373, 2375, 2385, 2418, 2443, 2460, 2480, 2504, 2509, 2520, + 2531, 2537, 2562, 2568, 2572, 2578, 2592, 2596, 2599, 2602, 2614, 2620, 2625, 2627, 2629, 2634, + 2641, 2650, 2682, 2688, 2697, 2707, 2712, 2718, 2731, 2754, 2759, 2760, 2775, 2788, 2793, 2805, + 2811, 2817, 2820, 2832, 2842, 2854, 2890, 2902, 2921, 2923, 2978, 3010, 3012, 3026, 3081, 3083, + 3085, 3097, 3099, 3120, 3136, 3152, 3159, 3188, 3210, 3228, 3234, 3245, 3250, 3256, 3264, 3276, + 3281, 3296, 3349, 3363, 3378, 3392, 3395, 3420, 3440, 3461, 3488, 3529, 3531, 3584, 3588, 3591, + 3600, 3602, 3614, 3616, 3628, 3634, 3650, 3657, 3668, 3683, 3685, 3713, 3716, 3720, 3726, 3729, + 3736, 3753, 3778, 3802, 3805, 3819, 3841, 3845, 3851, 3856, 3880, 3922, 3938, 3970, 3993, 4032, + }; + + const int kmap_size = 4096; + const int nwant = grid_size == 256 ? 2 : 3; + const uint16_t * kgrid = grid_size == 256 ? kgrid_256 : kgrid_512; + uint32_t * kgrid_q3xs; + int * kmap_q3xs; + uint16_t * kneighbors_q3xs; + + //printf("================================================================= %s(grid_size = %d)\n", __func__, grid_size); + uint32_t * the_grid = (uint32_t *)malloc(grid_size*sizeof(uint32_t)); + for (int k = 0; k < grid_size; ++k) { + int8_t * pos = (int8_t *)(the_grid + k); + for (int i = 0; i < 4; ++i) { + int l = (kgrid[k] >> 3*i) & 0x7; + pos[i] = 2*l + 1; + } + } + kgrid_q3xs = the_grid; + iq3_data[gindex].grid = the_grid; + kmap_q3xs = (int *)malloc(kmap_size*sizeof(int)); + iq3_data[gindex].map = kmap_q3xs; + for (int i = 0; i < kmap_size; ++i) kmap_q3xs[i] = -1; + uint32_t aux32; + uint8_t * aux8 = (uint8_t *)&aux32; + for (int i = 0; i < grid_size; ++i) { + aux32 = kgrid_q3xs[i]; + uint16_t index = 0; + for (int k=0; k<4; ++k) { + uint16_t q = (aux8[k] - 1)/2; + index |= (q << 3*k); + } + kmap_q3xs[index] = i; + } + int8_t pos[4]; + int * dist2 = (int *)malloc(2*grid_size*sizeof(int)); + int num_neighbors = 0, num_not_in_map = 0; + for (int i = 0; i < kmap_size; ++i) { + if (kmap_q3xs[i] >= 0) continue; + ++num_not_in_map; + for (int k = 0; k < 4; ++k) { + int l = (i >> 3*k) & 0x7; + pos[k] = 2*l + 1; + } + for (int j = 0; j < grid_size; ++j) { + const int8_t * pg = (const int8_t *)(kgrid_q3xs + j); + int d2 = 0; + for (int k = 0; k < 4; ++k) d2 += (pg[k] - pos[k])*(pg[k] - pos[k]); + dist2[2*j+0] = d2; + dist2[2*j+1] = j; + } + qsort(dist2, grid_size, 2*sizeof(int), iq3_compare_func); + int n = 0; int d2 = dist2[0]; + int nhave = 1; + for (int j = 0; j < grid_size; ++j) { + if (dist2[2*j] > d2) { + if (nhave == nwant) break; + d2 = dist2[2*j]; + ++nhave; + } + ++n; + } + num_neighbors += n; + } + //printf("%s: %d neighbours in total\n", __func__, num_neighbors); + kneighbors_q3xs = (uint16_t *)malloc((num_neighbors + num_not_in_map)*sizeof(uint16_t)); + iq3_data[gindex].neighbours = kneighbors_q3xs; + int counter = 0; + for (int i = 0; i < kmap_size; ++i) { + if (kmap_q3xs[i] >= 0) continue; + for (int k = 0; k < 4; ++k) { + int l = (i >> 3*k) & 0x7; + pos[k] = 2*l + 1; + } + for (int j = 0; j < grid_size; ++j) { + const int8_t * pg = (const int8_t *)(kgrid_q3xs + j); + int d2 = 0; + for (int k = 0; k < 4; ++k) d2 += (pg[k] - pos[k])*(pg[k] - pos[k]); + dist2[2*j+0] = d2; + dist2[2*j+1] = j; + } + qsort(dist2, grid_size, 2*sizeof(int), iq3_compare_func); + kmap_q3xs[i] = -(counter + 1); + int d2 = dist2[0]; + uint16_t * start = &kneighbors_q3xs[counter++]; + int n = 0, nhave = 1; + for (int j = 0; j < grid_size; ++j) { + if (dist2[2*j] > d2) { + if (nhave == nwant) break; + d2 = dist2[2*j]; + ++nhave; + } + kneighbors_q3xs[counter++] = dist2[2*j+1]; + ++n; + } + *start = n; + } + free(dist2); +} + +void iq3xs_free_impl(int grid_size) { + GGML_ASSERT(grid_size == 256 || grid_size == 512); + const int gindex = iq3_data_index(grid_size); + if (iq3_data[gindex].grid) { + free(iq3_data[gindex].grid); iq3_data[gindex].grid = NULL; + free(iq3_data[gindex].map); iq3_data[gindex].map = NULL; + free(iq3_data[gindex].neighbours); iq3_data[gindex].neighbours = NULL; + } +} + +static int iq3_find_best_neighbour(const uint16_t * restrict neighbours, const uint32_t * restrict grid, + const float * restrict xval, const float * restrict weight, float scale, int8_t * restrict L) { + int num_neighbors = neighbours[0]; + GGML_ASSERT(num_neighbors > 0); + float best_d2 = FLT_MAX; + int grid_index = -1; + for (int j = 1; j <= num_neighbors; ++j) { + const int8_t * pg = (const int8_t *)(grid + neighbours[j]); + float d2 = 0; + for (int i = 0; i < 4; ++i) { + float q = pg[i]; + float diff = scale*q - xval[i]; + d2 += weight[i]*diff*diff; + } + if (d2 < best_d2) { + best_d2 = d2; grid_index = neighbours[j]; + } + } + GGML_ASSERT(grid_index >= 0); + const int8_t * pg = (const int8_t *)(grid + grid_index); + for (int i = 0; i < 4; ++i) L[i] = (pg[i] - 1)/2; + return grid_index; +} + +static void quantize_row_iq3_xxs_impl(int grid_size, const float * restrict x, void * restrict vy, int64_t n, + const float * restrict quant_weights) { + + const int gindex = iq3_data_index(grid_size); + + const uint32_t * kgrid_q3xs = iq3_data[gindex].grid; + const int * kmap_q3xs = iq3_data[gindex].map; + const uint16_t * kneighbors_q3xs = iq3_data[gindex].neighbours; + + //GGML_ASSERT(quant_weights && "missing quantization weights"); + GGML_ASSERT(kgrid_q3xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(kmap_q3xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(kneighbors_q3xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(n%QK_K == 0); + + const int kMaxQ = 8; + + const int64_t nbl = n/QK_K; + + ggml_fp16_t * dh; + uint8_t * qs; + int block_size; + if (grid_size == 256) { + block_iq3_xxs * y = vy; + dh = &y->d; + qs = y->qs; + block_size = sizeof(block_iq3_xxs); + } else { + block_iq3_s * y = vy; + dh = &y->d; + qs = y->qs; + block_size = sizeof(block_iq3_s); + } + int quant_size = block_size - sizeof(ggml_fp16_t); + + float scales[QK_K/32]; + float weight[32]; + float xval[32]; + int8_t L[32]; + int8_t Laux[32]; + float waux[32]; + bool is_on_grid[8]; + bool is_on_grid_aux[8]; + uint8_t block_signs[8]; + uint8_t q3[3*(QK_K/8)+QK_K/32]; + uint32_t * scales_and_signs = (uint32_t *)(q3 + QK_K/4); + uint8_t * qh = q3 + 3*(QK_K/8); + + for (int ibl = 0; ibl < nbl; ++ibl) { + + dh[0] = GGML_FP32_TO_FP16(0.f); + memset(q3, 0, 3*QK_K/8+QK_K/32); + + float max_scale = 0; + + const float * xbl = x + QK_K*ibl; + float sumx2 = 0; + for (int i = 0; i < QK_K; ++i) sumx2 += xbl[i]*xbl[i]; + float sigma2 = 2*sumx2/QK_K; + + for (int ib = 0; ib < QK_K/32; ++ib) { + const float * xb = xbl + 32*ib; + if (quant_weights) { + const float * qw = quant_weights + QK_K*ibl + 32*ib; + for (int i = 0; i < 32; ++i) weight[i] = qw[i] * sqrtf(sigma2 + xb[i]*xb[i]); + } else { + for (int i = 0; i < 32; ++i) weight[i] = xb[i]*xb[i]; + } + for (int i = 0; i < 32; ++i) waux[i] = sqrtf(weight[i]); + for (int k = 0; k < 4; ++k) { + int nflip = 0; + uint8_t s = 0; + for (int i = 0; i < 8; ++i) { + if (xb[8*k + i] >= 0) xval[8*k + i] = xb[8*k + i]; + else { + xval[8*k + i] = -xb[8*k + i]; ++nflip; s |= (1 << i); + } + } + if (nflip%2) { + int imin = 0; float min = weight[8*k+imin]*xb[8*k+imin]*xb[8*k+imin]; + for (int i = 1; i < 8; ++i) { + float ax = weight[8*k+i]*xb[8*k+i]*xb[8*k+i]; + if (ax < min) { + min = ax; imin = i; + } + } + xval[8*k+imin] = -xval[8*k+imin]; + s ^= (1 << imin); + } + block_signs[k] = s & 127; + } + float max = xval[0]; + for (int i = 1; i < 32; ++i) max = MAX(max, xval[i]); + if (max < GROUP_MAX_EPS_IQ3_XXS) { + scales[ib] = 0; + memset(L, 0, 32); + continue; + } + float best = 0; + float scale = max/(2*kMaxQ-1); + for (int is = -15; is <= 15; ++is) { + float id = (2*kMaxQ-1+is*0.2f)/max; + float this_scale = 1/id; + for (int k = 0; k < 8; ++k) { + for (int i = 0; i < 4; ++i) { + int l = nearest_int(0.5f*(id*xval[4*k+i]-1)); + Laux[4*k+i] = MAX(0, MIN(kMaxQ-1, l)); + } + uint16_t u = 0; + for (int i = 0; i < 4; ++i) u |= (Laux[4*k+i] << 3*i); + int grid_index = kmap_q3xs[u]; + is_on_grid_aux[k] = true; + if (grid_index < 0) { + is_on_grid_aux[k] = false; + const uint16_t * neighbours = kneighbors_q3xs - kmap_q3xs[u] - 1; + grid_index = iq3_find_best_neighbour(neighbours, kgrid_q3xs, xval + 4*k, waux + 4*k, this_scale, Laux + 4*k); + } + } + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < 32; ++i) { + float w = weight[i]; + float q = 2*Laux[i] + 1; + sumqx += w*xval[i]*q; + sumq2 += w*q*q; + } + if (sumq2 > 0 && sumqx*sumqx > best*sumq2) { + scale = sumqx/sumq2; best = scale*sumqx; + for (int i = 0; i < 32; ++i) L[i] = Laux[i]; + for (int k = 0; k < 8; ++k) is_on_grid[k] = is_on_grid_aux[k]; + } + } + int n_not_ongrid = 0; + for (int k = 0; k < 8; ++k) if (!is_on_grid[k]) ++n_not_ongrid; + if (n_not_ongrid > 0 && scale > 0) { + float id = 1/scale; + for (int k = 0; k < 8; ++k) { + if (is_on_grid[k]) continue; + uint16_t u = 0; + for (int i = 0; i < 4; ++i) { + int l = nearest_int(0.5f*(id*xval[4*k+i]-1)); + l = MAX(0, MIN(kMaxQ-1, l)); + u |= (l << 3*i); + } + int grid_index = kmap_q3xs[u]; + if (grid_index < 0) { + const uint16_t * neighbours = kneighbors_q3xs - kmap_q3xs[u] - 1; + grid_index = iq3_find_best_neighbour(neighbours, kgrid_q3xs, xval + 4*k, waux + 4*k, scale, L + 4*k); + } + const int8_t * pg = (const int8_t *)(kgrid_q3xs + grid_index); + for (int i = 0; i < 4; ++i) L[4*k+i] = (pg[i] - 1)/2; + } + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < 32; ++i) { + float w = weight[i]; + float q = 2*L[i] + 1; + sumqx += w*xval[i]*q; + sumq2 += w*q*q; + } + if (sumq2 > 0) scale = sumqx/sumq2; + } + if (scale < 0) { + // This should never happen, but just in case, flip scale so that it is positive (we use uint's to encode the scale) + // and correspondingly flip quant signs. + scale = -scale; + for (int k = 0; k < 4; ++k) block_signs[k] = (~block_signs[k]) & 127; + } + for (int k = 0; k < 8; ++k) { + uint16_t u = 0; + for (int i = 0; i < 4; ++i) u |= (L[4*k+i] << 3*i); + int grid_index = kmap_q3xs[u]; + if (grid_index < 0) { + printf("Oops: found point %u not on grid:", u); + for (int i = 0; i < 4; ++i) printf(" %d", L[4*k+i]); + printf("\n"); + GGML_ABORT("fatal error"); + } + if (grid_size == 256) { + q3[8*ib+k] = grid_index; + } else { + q3[8*ib+k] = grid_index & 255; + qh[ib] |= ((grid_index >> 8) << k); + } + + } + scales_and_signs[ib] = block_signs[0] | (block_signs[1] << 7) | (block_signs[2] << 14) | (block_signs[3] << 21); + GGML_ASSERT(scale >= 0); + scales[ib] = scale; + max_scale = MAX(max_scale, scale); + } + + if (!max_scale) { + memset(qs, 0, quant_size); + dh += block_size/sizeof(ggml_fp16_t); + qs += block_size; + continue; + } + + float d = max_scale/31; + dh[0] = GGML_FP32_TO_FP16(d * 1.0125f); // small improvement via this fudge factor + float id = 1/d; + for (int ib = 0; ib < QK_K/32; ++ib) { + int l = nearest_int(0.5f*(id*scales[ib]-1)); + l = MAX(0, MIN(15, l)); + scales_and_signs[ib] |= ((uint32_t)l << 28); + } + memcpy(qs, q3, quant_size); + + dh += block_size/sizeof(ggml_fp16_t); + qs += block_size; + + } +} + +size_t quantize_iq3_xxs(const float * restrict src, void * restrict dst, int64_t nrow, int64_t n_per_row, const float * quant_weights) { + GGML_ASSERT(n_per_row%QK_K == 0); + int64_t nblock = n_per_row/QK_K; + char * qrow = (char *)dst; + for (int64_t row = 0; row < nrow; ++row) { + quantize_row_iq3_xxs_impl(256, src, qrow, n_per_row, quant_weights); + src += n_per_row; + qrow += nblock*sizeof(block_iq3_xxs); + } + return nrow * nblock * sizeof(block_iq3_xxs); +} + +void quantize_row_iq3_xxs(const float * restrict x, void * restrict vy, int64_t k) { + assert(k % QK_K == 0); + block_iq3_xxs * restrict y = vy; + quantize_row_iq3_xxs_ref(x, y, k); +} + +void quantize_row_iq3_xxs_ref(const float * restrict x, block_iq3_xxs * restrict y, int64_t k) { + assert(k % QK_K == 0); + quantize_row_iq3_xxs_impl(256, x, y, k, NULL); +} + +static void quantize_row_iq3_s_impl(int block_size, const float * restrict x, void * restrict vy, int n, + const float * restrict quant_weights, + float * scales, + float * weight, + float * xval, + int8_t * L, + int8_t * Laux, + float * waux, + bool * is_on_grid, + bool * is_on_grid_aux, + uint8_t * block_signs) { + + const int gindex = iq3_data_index(512); + + const uint32_t * kgrid_q3xs = iq3_data[gindex].grid; + const int * kmap_q3xs = iq3_data[gindex].map; + const uint16_t * kneighbors_q3xs = iq3_data[gindex].neighbours; + + //GGML_ASSERT(quant_weights && "missing quantization weights"); + GGML_ASSERT(kgrid_q3xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(kmap_q3xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(kneighbors_q3xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(n%QK_K == 0); + + const int kMaxQ = 8; + + const int64_t nbl = n/QK_K; + + block_iq3_s * y = vy; + + const int bs4 = block_size/4; + const int bs8 = block_size/8; + + for (int ibl = 0; ibl < nbl; ++ibl) { + + memset(&y[ibl], 0, sizeof(block_iq3_s)); + y[ibl].d = GGML_FP32_TO_FP16(0.f); + + uint8_t * qs = y[ibl].qs; + uint8_t * qh = y[ibl].qh; + uint8_t * signs = y[ibl].signs; + + float max_scale = 0; + + const float * xbl = x + QK_K*ibl; + float sumx2 = 0; + for (int i = 0; i < QK_K; ++i) sumx2 += xbl[i]*xbl[i]; + float sigma2 = 2*sumx2/QK_K; + + for (int ib = 0; ib < QK_K/block_size; ++ib) { + const float * xb = xbl + block_size*ib; + if (quant_weights) { + const float * qw = quant_weights + QK_K*ibl + block_size*ib; + for (int i = 0; i < block_size; ++i) weight[i] = qw[i] * sqrtf(sigma2 + xb[i]*xb[i]); + } else { + for (int i = 0; i < block_size; ++i) weight[i] = xb[i]*xb[i]; + } + for (int i = 0; i < block_size; ++i) waux[i] = sqrtf(weight[i]); + for (int k = 0; k < bs8; ++k) { + uint8_t s = 0; + for (int i = 0; i < 8; ++i) { + if (xb[8*k + i] >= 0) xval[8*k + i] = xb[8*k + i]; + else { + xval[8*k + i] = -xb[8*k + i]; s |= (1 << i); + } + } + block_signs[k] = s; + } + float max = xval[0]; + for (int i = 1; i < block_size; ++i) max = MAX(max, xval[i]); + if (!max) { + scales[ib] = 0; + continue; + } + float best = 0; + float scale = max/(2*kMaxQ-1); + for (int k = 0; k < bs4; ++k) is_on_grid[k] = false; + for (int is = -9; is <= 9; ++is) { + float id = (2*kMaxQ-1+is*0.2f)/max; + float this_scale = 1/id; + for (int k = 0; k < bs4; ++k) { + for (int i = 0; i < 4; ++i) { + int l = nearest_int(0.5f*(id*xval[4*k+i]-1)); + Laux[4*k+i] = MAX(0, MIN(kMaxQ-1, l)); + } + uint16_t u = 0; + for (int i = 0; i < 4; ++i) u |= (Laux[4*k+i] << 3*i); + int grid_index = kmap_q3xs[u]; + is_on_grid_aux[k] = true; + if (grid_index < 0) { + is_on_grid_aux[k] = false; + const uint16_t * neighbours = kneighbors_q3xs - kmap_q3xs[u] - 1; + grid_index = iq3_find_best_neighbour(neighbours, kgrid_q3xs, xval + 4*k, waux + 4*k, this_scale, Laux + 4*k); + } + } + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < block_size; ++i) { + float w = weight[i]; + float q = 2*Laux[i] + 1; + sumqx += w*xval[i]*q; + sumq2 += w*q*q; + } + if (sumq2 > 0 && sumqx*sumqx > best*sumq2) { + scale = sumqx/sumq2; best = scale*sumqx; + for (int i = 0; i < block_size; ++i) L[i] = Laux[i]; + for (int k = 0; k < bs4; ++k) is_on_grid[k] = is_on_grid_aux[k]; + } + } + int n_not_ongrid = 0; + for (int k = 0; k < bs4; ++k) if (!is_on_grid[k]) ++n_not_ongrid; + if (n_not_ongrid > 0 && scale > 0) { + float id = 1/scale; + for (int k = 0; k < bs4; ++k) { + //if (is_on_grid[k]) continue; + uint16_t u = 0; + for (int i = 0; i < 4; ++i) { + int l = nearest_int(0.5f*(id*xval[4*k+i]-1)); + l = MAX(0, MIN(kMaxQ-1, l)); + u |= (l << 3*i); + } + int grid_index = kmap_q3xs[u]; + if (grid_index < 0) { + const uint16_t * neighbours = kneighbors_q3xs - kmap_q3xs[u] - 1; + grid_index = iq3_find_best_neighbour(neighbours, kgrid_q3xs, xval + 4*k, waux + 4*k, scale, L + 4*k); + } + const int8_t * pg = (const int8_t *)(kgrid_q3xs + grid_index); + for (int i = 0; i < 4; ++i) L[4*k+i] = (pg[i] - 1)/2; + } + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < block_size; ++i) { + float w = weight[i]; + float q = 2*L[i] + 1; + sumqx += w*xval[i]*q; + sumq2 += w*q*q; + } + if (sumq2 > 0) scale = sumqx/sumq2; + } + if (scale < 0) { + // This should never happen, but just in case, flip scale so that it is positive (we use uint's to encode the scale) + // and correspondingly flip quant signs. + scale = -scale; + for (int k = 0; k < bs8; ++k) block_signs[k] = ~block_signs[k]; + } + for (int k = 0; k < bs4; ++k) { + uint16_t u = 0; + for (int i = 0; i < 4; ++i) u |= (L[4*k+i] << 3*i); + int grid_index = kmap_q3xs[u]; + if (grid_index < 0) { + printf("Oops: found point %u not on grid:", u); + for (int i = 0; i < 4; ++i) printf(" %d", L[4*k+i]); + printf("\n"); + GGML_ABORT("fatal error"); + } + qs[k] = grid_index & 255; + qh[(ib*bs4+k)/8] |= ((grid_index >> 8) << ((ib*bs4+k)%8)); + } + qs += bs4; + for (int k = 0; k < bs8; ++k) signs[k] = block_signs[k]; + signs += bs8; + GGML_ASSERT(scale >= 0); + scales[ib] = scale; + max_scale = MAX(max_scale, scale); + } + + if (!max_scale) { + continue; + } + + float d = max_scale/31; + y[ibl].d = GGML_FP32_TO_FP16(d * 1.033f); + float id = 1/d; + for (int ib = 0; ib < QK_K/block_size; ib += 2) { + int l1 = nearest_int(0.5f*(id*scales[ib+0]-1)); + l1 = MAX(0, MIN(15, l1)); + int l2 = nearest_int(0.5f*(id*scales[ib+1]-1)); + l2 = MAX(0, MIN(15, l2)); + y[ibl].scales[ib/2] = l1 | (l2 << 4); + } + + } +} + +#define IQ3S_BLOCK_SIZE 32 +size_t quantize_iq3_s(const float * restrict src, void * restrict dst, int64_t nrow, int64_t n_per_row, const float * quant_weights) { + GGML_ASSERT(n_per_row%QK_K == 0); + int64_t nblock = n_per_row/QK_K; + float scales[QK_K/IQ3S_BLOCK_SIZE]; + float weight[IQ3S_BLOCK_SIZE]; + float xval[IQ3S_BLOCK_SIZE]; + int8_t L[IQ3S_BLOCK_SIZE]; + int8_t Laux[IQ3S_BLOCK_SIZE]; + float waux[IQ3S_BLOCK_SIZE]; + bool is_on_grid[IQ3S_BLOCK_SIZE/4]; + bool is_on_grid_aux[IQ3S_BLOCK_SIZE/4]; + uint8_t block_signs[IQ3S_BLOCK_SIZE/8]; + char * qrow = (char *)dst; + for (int64_t row = 0; row < nrow; ++row) { + quantize_row_iq3_s_impl(IQ3S_BLOCK_SIZE, src, qrow, n_per_row, quant_weights, + scales, weight, xval, L, Laux, waux, is_on_grid, is_on_grid_aux, block_signs); + src += n_per_row; + qrow += nblock*sizeof(block_iq3_s); + } + return nrow * nblock * sizeof(block_iq3_s); +} + +void quantize_row_iq3_s(const float * restrict x, void * restrict vy, int64_t k) { + assert(k % QK_K == 0); + block_iq3_s * restrict y = vy; + quantize_row_iq3_s_ref(x, y, k); +} + +void quantize_row_iq3_s_ref(const float * restrict x, block_iq3_s * restrict y, int64_t k) { + assert(k % QK_K == 0); + quantize_iq3_s(x, y, 1, k, NULL); +} + + +// =================================== 1.5 bpw =================================================== + +static int iq1_find_best_neighbour(const uint16_t * restrict neighbours, const uint64_t * restrict grid, + const float * restrict xval, const float * restrict weight, float * scale, int8_t * restrict L, int ngrid) { + int num_neighbors = neighbours[0]; + GGML_ASSERT(num_neighbors > 0); + float best_score = -FLT_MAX; + int grid_index = -1; + for (int j = 1; j <= num_neighbors; ++j) { + const int8_t * pg = (const int8_t *)(grid + neighbours[j]); + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < 8; ++i) { + float q = (pg[i] - 3)/2; + float w = weight[i]; + sumqx += w*q*xval[i]; + sumq2 += w*q*q; + } + if (sumqx > 0 && sumq2 > 0 && sumqx*sumqx > best_score*sumq2) { + *scale = sumqx/sumq2; best_score = *scale * sumqx; + grid_index = neighbours[j]; + } + } + if (grid_index < 0) { + for (int i = 0; i < ngrid; ++i) { + const int8_t * grid_i = (const int8_t *)(grid + i); + float sumqx = 0, sumq2 = 0; + for (int j = 0; j < 8; ++j) { + float w = weight[j]; + float q = (grid_i[j] - 3)/2; + sumqx += w*q*xval[j]; + sumq2 += w*q*q; + } + if (sumqx > 0 && sumq2 > 0 && sumqx*sumqx > best_score*sumq2) { + *scale = sumqx/sumq2; best_score = *scale*sumqx; + grid_index = i; + } + } + } + if (grid_index < 0) { + printf("Oops, did not find grid point\n"); + printf("Have %d neighbours\n", num_neighbors); + for (int j = 1; j <= num_neighbors; ++j) { + const int8_t * pg = (const int8_t *)(grid + neighbours[j]); + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < 8; ++i) { + float q = (pg[i] - 3)/2; + float w = weight[i]; + sumqx += w*q*xval[i]; + sumq2 += w*q*q; + } + printf(" neighbour %d: sumqx = %g sumq2 = %g\n", j, (double)sumqx, (double)sumq2); + } + } + GGML_ASSERT(grid_index >= 0); + //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + *scale *= 1.05f; // This is a fudge factor. Don't ask me why it improves the result. + //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + const int8_t * pg = (const int8_t *)(grid + grid_index); + for (int i = 0; i < 8; ++i) L[i] = (pg[i] - 1)/2; + return grid_index; +} + +static int iq1_find_best_neighbour2(const uint16_t * restrict neighbours, const uint64_t * restrict grid, + const float * restrict xval, const float * restrict weight, float scale, const float * restrict xg, int8_t * restrict L, int ngrid) { + int num_neighbors = neighbours[0]; + GGML_ASSERT(num_neighbors > 0); + float best_score = FLT_MAX; + int grid_index = -1; + for (int j = 1; j <= num_neighbors; ++j) { + const int8_t * pg = (const int8_t *)(grid + neighbours[j]); + float d2 = 0; + for (int i = 0; i < 8; ++i) { + float q = xg[(pg[i] - 1)/2]; + float w = weight[i]; + float diff = scale*q - xval[i]; + d2 += w*diff*diff; + } + if (d2 < best_score) { + best_score = d2; + grid_index = neighbours[j]; + } + } + if (grid_index < 0) { + for (int i = 0; i < ngrid; ++i) { + const int8_t * grid_i = (const int8_t *)(grid + i); + float d2 = 0; + for (int j = 0; j < 8; ++j) { + float w = weight[j]; + float q = xg[(grid_i[j] - 1)/2]; + float diff = scale*q - xval[i]; + d2 += w*diff*diff; + } + if (d2 < best_score) { + best_score = d2; + grid_index = i; + } + } + } + if (grid_index < 0) { + printf("Oops, did not find grid point\n"); + printf("Have %d neighbours\n", num_neighbors); + for (int j = 1; j <= num_neighbors; ++j) { + const int8_t * pg = (const int8_t *)(grid + neighbours[j]); + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < 8; ++i) { + float q = xg[(pg[i] - 1)/2]; + float w = weight[i]; + sumqx += w*q*xval[i]; + sumq2 += w*q*q; + } + printf(" neighbour %d: sumqx = %g sumq2 = %g\n", j, (double)sumqx, (double)sumq2); + } + } + GGML_ASSERT(grid_index >= 0); + const int8_t * pg = (const int8_t *)(grid + grid_index); + for (int i = 0; i < 8; ++i) L[i] = (pg[i] - 1)/2; + return grid_index; +} + +static int iq1_sort_helper(const void * left, const void * right) { + const float * l = left; + const float * r = right; + return *l < *r ? -1 : *l > *r ? 1 : 0; +} + +#define IQ1S_BLOCK_SIZE 32 +#define IQ1M_BLOCK_SIZE 16 +static void quantize_row_iq1_s_impl(const float * restrict x, void * restrict vy, int64_t n, const float * restrict quant_weights, + float * scales, + float * weight, + float * sumx, + float * sumw, + float * pairs, + int8_t * L, + uint16_t * index, + int8_t * shifts) { + + const int gindex = iq2_data_index(GGML_TYPE_IQ1_S); + + const uint64_t * kgrid_q2xs = iq2_data[gindex].grid; + const int * kmap_q2xs = iq2_data[gindex].map; + const uint16_t * kneighbors_q2xs = iq2_data[gindex].neighbours; + + GGML_ASSERT(quant_weights && "missing quantization weights"); + GGML_ASSERT(kgrid_q2xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(kmap_q2xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(kneighbors_q2xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(n%QK_K == 0); + + block_iq1_s * y = vy; + + const int64_t nbl = n/QK_K; + + const int block_size = IQ1S_BLOCK_SIZE; + + const float x_p[3] = {-1 + IQ1S_DELTA, IQ1S_DELTA, 1 + IQ1S_DELTA}; + const float x_m[3] = {-1 - IQ1S_DELTA, -IQ1S_DELTA, 1 - IQ1S_DELTA}; + + + int * idx = (int *)(pairs + 1); + + for (int ibl = 0; ibl < nbl; ++ibl) { + + y[ibl].d = GGML_FP32_TO_FP16(0.f); + memset(y[ibl].qs, 0, QK_K/8); + memset(y[ibl].qh, 0, QK_K/16); + + float max_scale = 0; + + const float * xbl = x + QK_K*ibl; + float sumx2 = 0; + for (int i = 0; i < QK_K; ++i) sumx2 += xbl[i]*xbl[i]; + float sigma2 = 2*sumx2/QK_K; + + for (int ib = 0; ib < QK_K/block_size; ++ib) { + const float * xb = xbl + block_size*ib; + const float * qw = quant_weights + QK_K*ibl + block_size*ib; + for (int i = 0; i < block_size; ++i) weight[i] = qw[i] * sqrtf(sigma2 + xb[i]*xb[i]); + float max = fabsf(xb[0]); + for (int i = 1; i < block_size; ++i) max = MAX(max, fabsf(xb[i])); + if (max < GROUP_MAX_EPS_IQ1_S) { + scales[ib] = 0; + memset(L, 1, block_size); + continue; + } + // Here we solve exactly the sum of squared difference (SSD) weighted minimization problem. + // With just 3 allowed quant values (-1, 0, 1), we can search exhaustively for the two + // boundaries that split the weights xb[i] into 3 groups. To do so, we sort the weights + // in ascending order, compute Si = sum[weight[j] xb[j], j = 0...i] and + // Wi = sum[weight[j], j = 0...i], and use these to quckly get get the optimum scale + // for each possible and score for each split. + for (int j = 0; j < block_size; ++j) { + pairs[2*j] = xb[j]; + idx[2*j] = j; + } + qsort(pairs, block_size, 2*sizeof(float), iq1_sort_helper); + { + sumx[0] = sumw[0] = 0; + for (int j = 0; j < block_size; ++j) { + int i = idx[2*j]; + sumx[j+1] = sumx[j] + weight[i]*xb[i]; + sumw[j+1] = sumw[j] + weight[i]; + } + } + float best_score = -FLT_MIN, scale = max; + int besti1 = -1, besti2 = -1, best_shift = 0; + for (int i1 = 0; i1 <= block_size; ++i1) { + for (int i2 = i1; i2 <= block_size; ++i2) { + float sumqx = (sumx[i1] - sumx[0])*x_p[0] + (sumx[i2] - sumx[i1])*x_p[1] + (sumx[block_size] - sumx[i2])*x_p[2]; + float sumq2 = (sumw[i1] - sumw[0])*x_p[0]*x_p[0] + (sumw[i2] - sumw[i1])*x_p[1]*x_p[1] + (sumw[block_size] - sumw[i2])*x_p[2]*x_p[2]; + if (sumq2 > 0 && sumqx*sumqx > best_score*sumq2) { + scale = sumqx/sumq2; best_score = scale*sumqx; + besti1 = i1; besti2 = i2; best_shift = 1; + } + sumqx = (sumx[i1] - sumx[0])*x_m[0] + (sumx[i2] - sumx[i1])*x_m[1] + (sumx[block_size] - sumx[i2])*x_m[2]; + sumq2 = (sumw[i1] - sumw[0])*x_m[0]*x_m[0] + (sumw[i2] - sumw[i1])*x_m[1]*x_m[1] + (sumw[block_size] - sumw[i2])*x_m[2]*x_m[2]; + if (sumq2 > 0 && sumqx*sumqx > best_score*sumq2) { + scale = sumqx/sumq2; best_score = scale*sumqx; + besti1 = i1; besti2 = i2; best_shift = -1; + } + } + } + GGML_ASSERT(besti1 >= 0 && besti2 >= 0 && best_shift != 0); + for (int j = 0; j < besti1; ++j) L[idx[2*j]] = 0; + for (int j = besti1; j < besti2; ++j) L[idx[2*j]] = 1; + for (int j = besti2; j < block_size; ++j) L[idx[2*j]] = 2; + if (scale < 0) { + for (int j = 0; j < block_size; ++j) L[j] = 2 - L[j]; + scale = -scale; best_shift = -best_shift; + } + bool all_on_grid = true; + const float * xx = best_shift == 1 ? x_p : x_m; + for (int k = 0; k < block_size/8; ++k) { + uint16_t u = 0; + for (int j = 0; j < 8; ++j) u |= (L[8*k+j] << 2*j); + int grid_index = kmap_q2xs[u]; + if (grid_index < 0) { + all_on_grid = false; + const uint16_t * neighbours = kneighbors_q2xs - kmap_q2xs[u] - 1; + grid_index = iq1_find_best_neighbour2(neighbours, kgrid_q2xs, xb + 8*k, weight + 8*k, scale, xx, L + 8*k, NGRID_IQ1S); + GGML_ASSERT(grid_index >= 0); + } + index[k] = grid_index; + } + if (!all_on_grid) { + float sumqx = 0, sumq2 = 0; + for (int k = 0; k < block_size/8; ++k) { + const int8_t * pg = (const int8_t *)(kgrid_q2xs + index[k]); + for (int j = 0; j < 8; ++j) { + float w = weight[8*k + j]; + float q = xx[(pg[j] - 1)/2]; + sumqx += w*q*xb[8*k+j]; + sumq2 += w*q*q; + } + } + if (sumqx > 0 && sumq2 > 0) scale = sumqx/sumq2; + } + uint16_t h = 0; + for (int k = 0; k < block_size/8; ++k) { + y[ibl].qs[(block_size/8)*ib + k] = index[k] & 255; + h |= (index[k] >> 8) << 3*k; + } + y[ibl].qh[ib] = h; + GGML_ASSERT(scale >= 0); + scales[ib] = scale; + shifts[ib] = best_shift; + max_scale = MAX(max_scale, scale); + } + + if (!max_scale) { + continue; + } + + float d = max_scale/15; + y[ibl].d = GGML_FP32_TO_FP16(d*1.125f); // 1.125f is another fudge factor. Don't ask me why it is needed. + float id = 1/d; + for (int ib = 0; ib < QK_K/block_size; ++ib) { + int l = nearest_int(0.5f*(id*scales[ib]-1)); + l = MAX(0, MIN(7, l)); + if (shifts[ib] == -1) l |= 8; + y[ibl].qh[ib] |= (l << 12); + } + } +} + +size_t quantize_iq1_s(const float * restrict src, void * restrict dst, int64_t nrow, int64_t n_per_row, const float * quant_weights) { + GGML_ASSERT(n_per_row%QK_K == 0); + float scales[QK_K/IQ1S_BLOCK_SIZE]; + float weight[IQ1S_BLOCK_SIZE]; + int8_t L[IQ1S_BLOCK_SIZE]; + float sumx[IQ1S_BLOCK_SIZE+1]; + float sumw[IQ1S_BLOCK_SIZE+1]; + float pairs[2*IQ1S_BLOCK_SIZE]; + uint16_t index[IQ1S_BLOCK_SIZE/8]; + int8_t shifts[QK_K/IQ1S_BLOCK_SIZE]; + int64_t nblock = n_per_row/QK_K; + char * qrow = (char *)dst; + for (int64_t row = 0; row < nrow; ++row) { + quantize_row_iq1_s_impl(src, qrow, n_per_row, quant_weights, scales, weight, sumx, sumw, pairs, L, index, shifts); + src += n_per_row; + qrow += nblock*sizeof(block_iq1_s); + } + return nrow * nblock * sizeof(block_iq1_s); +} + +static void quantize_row_iq1_m_impl(const float * restrict x, void * restrict vy, int64_t n, const float * restrict quant_weights, + float * scales, + float * weight, + float * pairs, + int8_t * L, + uint16_t * index, + int8_t * shifts) { + + const int gindex = iq2_data_index(GGML_TYPE_IQ1_M); + + const uint64_t * kgrid_q2xs = iq2_data[gindex].grid; + const int * kmap_q2xs = iq2_data[gindex].map; + const uint16_t * kneighbors_q2xs = iq2_data[gindex].neighbours; + + //GGML_ASSERT(quant_weights && "missing quantization weights"); + GGML_ASSERT(kgrid_q2xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(kmap_q2xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(kneighbors_q2xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(n%QK_K == 0); + + block_iq1_m * y = vy; + + const int64_t nbl = n/QK_K; + + const int block_size = IQ1M_BLOCK_SIZE; + + const float x_p[3] = {-1 + IQ1M_DELTA, IQ1M_DELTA, 1 + IQ1M_DELTA}; + const float x_m[3] = {-1 - IQ1M_DELTA, -IQ1M_DELTA, 1 - IQ1M_DELTA}; + const uint8_t masks[4] = {0x00, 0x80, 0x08, 0x88}; + + int * idx = (int *)(pairs + 1); + + float sumqx[4], sumq2[4]; + + iq1m_scale_t s; + const float * xx; + + for (int ibl = 0; ibl < nbl; ++ibl) { + memset(y[ibl].qs, 0, QK_K/8); + memset(y[ibl].qh, 0, QK_K/16); + memset(y[ibl].scales, 0, QK_K/32); + + float max_scale = 0; + + const float * xbl = x + QK_K*ibl; + float sumx2 = 0; + for (int i = 0; i < QK_K; ++i) sumx2 += xbl[i]*xbl[i]; + float sigma2 = 2*sumx2/QK_K; + + for (int ib = 0; ib < QK_K/block_size; ++ib) { + const float * xb = xbl + block_size*ib; + if (quant_weights) { + const float * qw = quant_weights + QK_K*ibl + block_size*ib; + for (int i = 0; i < block_size; ++i) weight[i] = qw[i] * sqrtf(sigma2 + xb[i]*xb[i]); + } else { + for (int i = 0; i < block_size; ++i) weight[i] = xb[i]*xb[i]; + } + float max = fabsf(xb[0]); + for (int i = 1; i < block_size; ++i) max = MAX(max, fabsf(xb[i])); + if (max < GROUP_MAX_EPS_IQ1_M) { + scales[ib] = 0; + memset(L, 1, block_size); + continue; + } + // Here we solve exactly the sum of squared difference (SSD) weighted minimization problem. + // With just 3 allowed quant values (-1, 0, 1), we can search exhaustively for the two + // boundaries that split the weights xb[i] into 3 groups. To do so, we sort the weights + // in ascending order, compute Si = sum[weight[j] xb[j], j = 0...i] and + // Wi = sum[weight[j], j = 0...i], and use these to quckly get get the optimum scale + // for each possible and score for each split. + for (int j = 0; j < block_size; ++j) { + pairs[2*j] = xb[j]; + idx[2*j] = j; + } + qsort(pairs, block_size, 2*sizeof(float), iq1_sort_helper); + float best_score = -FLT_MIN, scale = max; + int besti1 = -1, besti2 = -1, best_k = -1; + // 0: +, + + // 1: +, - + // 2: -, + + // 3: -, - + for (int i1 = 0; i1 <= block_size; ++i1) { + for (int i2 = i1; i2 <= block_size; ++i2) { + memset(sumqx, 0, 4*sizeof(float)); + memset(sumq2, 0, 4*sizeof(float)); + for (int j = 0; j < i1; ++j) { + int i = idx[2*j]; + if (i < block_size/2) { + sumqx[0] += weight[i]*x_p[0]*xb[i]; + sumqx[1] += weight[i]*x_p[0]*xb[i]; + sumqx[2] += weight[i]*x_m[0]*xb[i]; + sumqx[3] += weight[i]*x_m[0]*xb[i]; + sumq2[0] += weight[i]*x_p[0]*x_p[0]; + sumq2[1] += weight[i]*x_p[0]*x_p[0]; + sumq2[2] += weight[i]*x_m[0]*x_m[0]; + sumq2[3] += weight[i]*x_m[0]*x_m[0]; + } else { + sumqx[0] += weight[i]*x_p[0]*xb[i]; + sumqx[2] += weight[i]*x_p[0]*xb[i]; + sumqx[1] += weight[i]*x_m[0]*xb[i]; + sumqx[3] += weight[i]*x_m[0]*xb[i]; + sumq2[0] += weight[i]*x_p[0]*x_p[0]; + sumq2[2] += weight[i]*x_p[0]*x_p[0]; + sumq2[1] += weight[i]*x_m[0]*x_m[0]; + sumq2[3] += weight[i]*x_m[0]*x_m[0]; + } + } + for (int j = i1; j < i2; ++j) { + int i = idx[2*j]; + if (i < block_size/2) { + sumqx[0] += weight[i]*x_p[1]*xb[i]; + sumqx[1] += weight[i]*x_p[1]*xb[i]; + sumqx[2] += weight[i]*x_m[1]*xb[i]; + sumqx[3] += weight[i]*x_m[1]*xb[i]; + sumq2[0] += weight[i]*x_p[1]*x_p[1]; + sumq2[1] += weight[i]*x_p[1]*x_p[1]; + sumq2[2] += weight[i]*x_m[1]*x_m[1]; + sumq2[3] += weight[i]*x_m[1]*x_m[1]; + } else { + sumqx[0] += weight[i]*x_p[1]*xb[i]; + sumqx[2] += weight[i]*x_p[1]*xb[i]; + sumqx[1] += weight[i]*x_m[1]*xb[i]; + sumqx[3] += weight[i]*x_m[1]*xb[i]; + sumq2[0] += weight[i]*x_p[1]*x_p[1]; + sumq2[2] += weight[i]*x_p[1]*x_p[1]; + sumq2[1] += weight[i]*x_m[1]*x_m[1]; + sumq2[3] += weight[i]*x_m[1]*x_m[1]; + } + } + for (int j = i2; j < block_size; ++j) { + int i = idx[2*j]; + if (i < block_size/2) { + sumqx[0] += weight[i]*x_p[2]*xb[i]; + sumqx[1] += weight[i]*x_p[2]*xb[i]; + sumqx[2] += weight[i]*x_m[2]*xb[i]; + sumqx[3] += weight[i]*x_m[2]*xb[i]; + sumq2[0] += weight[i]*x_p[2]*x_p[2]; + sumq2[1] += weight[i]*x_p[2]*x_p[2]; + sumq2[2] += weight[i]*x_m[2]*x_m[2]; + sumq2[3] += weight[i]*x_m[2]*x_m[2]; + } else { + sumqx[0] += weight[i]*x_p[2]*xb[i]; + sumqx[2] += weight[i]*x_p[2]*xb[i]; + sumqx[1] += weight[i]*x_m[2]*xb[i]; + sumqx[3] += weight[i]*x_m[2]*xb[i]; + sumq2[0] += weight[i]*x_p[2]*x_p[2]; + sumq2[2] += weight[i]*x_p[2]*x_p[2]; + sumq2[1] += weight[i]*x_m[2]*x_m[2]; + sumq2[3] += weight[i]*x_m[2]*x_m[2]; + } + } + for (int k = 0; k < 4; ++k) { + if (sumq2[k] > 0 && sumqx[k]*sumqx[k] > best_score*sumq2[k]) { + scale = sumqx[k]/sumq2[k]; best_score = scale*sumqx[k]; + besti1 = i1; besti2 = i2; best_k = k; + } + } + } + } + GGML_ASSERT(besti1 >= 0 && besti2 >= 0 && best_k >= 0); + for (int j = 0; j < besti1; ++j) L[idx[2*j]] = 0; + for (int j = besti1; j < besti2; ++j) L[idx[2*j]] = 1; + for (int j = besti2; j < block_size; ++j) L[idx[2*j]] = 2; + if (scale < 0) { + for (int j = 0; j < block_size; ++j) L[j] = 2 - L[j]; + scale = -scale; + best_k = best_k == 0 ? 3 : best_k == 1 ? 2 : best_k == 2 ? 1 : 0; + } + bool all_on_grid = true; + for (int k = 0; k < block_size/8; ++k) { + if (k == 0) xx = best_k < 2 ? x_p : x_m; + else xx = best_k%2 == 0 ? x_p : x_m; + uint16_t u = 0; + for (int j = 0; j < 8; ++j) u |= (L[8*k+j] << 2*j); + int grid_index = kmap_q2xs[u]; + if (grid_index < 0) { + all_on_grid = false; + const uint16_t * neighbours = kneighbors_q2xs - kmap_q2xs[u] - 1; + grid_index = iq1_find_best_neighbour2(neighbours, kgrid_q2xs, xb + 8*k, weight + 8*k, scale, xx, L + 8*k, NGRID_IQ1S); + GGML_ASSERT(grid_index >= 0); + } + index[k] = grid_index; + } + if (!all_on_grid) { + float sumqx_f = 0, sumq2_f = 0; + for (int k = 0; k < block_size/8; ++k) { + if (k == 0) xx = best_k < 2 ? x_p : x_m; + else xx = best_k%2 == 0 ? x_p : x_m; + const int8_t * pg = (const int8_t *)(kgrid_q2xs + index[k]); + for (int j = 0; j < 8; ++j) { + float w = weight[8*k + j]; + float q = xx[(pg[j] - 1)/2]; + sumqx_f += w*q*xb[8*k+j]; + sumq2_f += w*q*q; + } + } + if (sumqx_f > 0 && sumq2_f > 0) scale = sumqx_f/sumq2_f; + } + y[ibl].qs[2*ib + 0] = index[0] & 255; + y[ibl].qs[2*ib + 1] = index[1] & 255; + y[ibl].qh[ib] = (index[0] >> 8) | ((index[1] >> 8) << 4); + GGML_ASSERT(scale >= 0); + scales[ib] = scale; + shifts[ib] = best_k; + max_scale = MAX(max_scale, scale); + } + + if (!max_scale) { + continue; + } + + uint16_t * sc = (uint16_t *)y[ibl].scales; + float d = max_scale/15; + float id = 1/d; + float sumqx_f = 0, sumq2_f = 0; + for (int ib = 0; ib < QK_K/block_size; ++ib) { + int l = nearest_int(0.5f*(id*scales[ib+0]-1)); + l = MAX(0, MIN(7, l)); + sc[ib/4] |= (l << 3*(ib%4)); + y[ibl].qh[ib] |= masks[shifts[ib]]; + const float * xb = xbl + block_size*ib; + if (quant_weights) { + const float * qw = quant_weights + QK_K*ibl + block_size*ib; + for (int i = 0; i < block_size; ++i) weight[i] = qw[i] * sqrtf(sigma2 + xb[i]*xb[i]); + } else { + for (int i = 0; i < block_size; ++i) weight[i] = xb[i]*xb[i]; + } + for (int k = 0; k < block_size/8; ++k) { + if (k == 0) xx = shifts[ib] < 2 ? x_p : x_m; + else xx = shifts[ib]%2 == 0 ? x_p : x_m; + const int8_t * pg = (const int8_t *)(kgrid_q2xs + y[ibl].qs[2*ib+k] + ((y[ibl].qh[ib] << (8 - 4*k)) & 0x700)); + for (int j = 0; j < 8; ++j) { + float w = weight[8*k + j]; + float q = xx[(pg[j] - 1)/2]*(2*l+1); + sumqx_f += w*q*xb[8*k+j]; + sumq2_f += w*q*q; + } + } + } + if (sumq2_f > 0) d = sumqx_f/sumq2_f; + s.f16 = GGML_FP32_TO_FP16(d*1.1125f); // 1.1125f is another fudge factor. Don't ask me why it is needed. + sc[0] |= ((s.u16 & 0x000f) << 12); + sc[1] |= ((s.u16 & 0x00f0) << 8); + sc[2] |= ((s.u16 & 0x0f00) << 4); + sc[3] |= ((s.u16 & 0xf000) << 0); + } +} + +size_t quantize_iq1_m(const float * restrict src, void * restrict dst, int64_t nrow, int64_t n_per_row, const float * quant_weights) { + GGML_ASSERT(n_per_row%QK_K == 0); + float scales[QK_K/IQ1M_BLOCK_SIZE]; + float weight[IQ1M_BLOCK_SIZE]; + int8_t L[IQ1M_BLOCK_SIZE]; + float pairs[2*IQ1M_BLOCK_SIZE]; + uint16_t index[IQ1M_BLOCK_SIZE/8]; + int8_t shifts[QK_K/IQ1M_BLOCK_SIZE]; + int64_t nblock = n_per_row/QK_K; + char * qrow = (char *)dst; + for (int64_t row = 0; row < nrow; ++row) { + quantize_row_iq1_m_impl(src, qrow, n_per_row, quant_weights, scales, weight, pairs, L, index, shifts); + src += n_per_row; + qrow += nblock*sizeof(block_iq1_m); + } + return nrow * nblock * sizeof(block_iq1_m); +} + +// ============================ 4-bit non-linear quants + +static inline int best_index_int8(int n, const int8_t * val, float x) { + if (x <= val[0]) return 0; + if (x >= val[n-1]) return n-1; + int ml = 0, mu = n-1; + while (mu-ml > 1) { + int mav = (ml+mu)/2; + if (x < val[mav]) mu = mav; else ml = mav; + } + return x - val[mu-1] < val[mu] - x ? mu-1 : mu; +} + +static void quantize_row_iq4_nl_impl(const int super_block_size, const int block_size, const float * restrict x, + ggml_fp16_t * dh, uint8_t * q4, uint16_t * scales_h, uint8_t * scales_l, + float * scales, float * weight, uint8_t * L, + const int8_t * values, + const float * quant_weights, + const int ntry) { + + float sigma2 = 0; + for (int j = 0; j < super_block_size; ++j) sigma2 += x[j]*x[j]; + sigma2 *= 2.f/super_block_size; + + memset(q4, 0, super_block_size/2); + dh[0] = GGML_FP32_TO_FP16(0.f); + + float max_scale = 0, amax_scale = 0; + for (int ib = 0; ib < super_block_size/block_size; ++ib) { + const float * xb = x + ib*block_size; + uint8_t * Lb = L + ib*block_size; + if (quant_weights) { + const float * qw = quant_weights + ib*block_size; + for (int j = 0; j < block_size; ++j) weight[j] = qw[j] * sqrtf(sigma2 + xb[j]*xb[j]); + } else { + for (int j = 0; j < block_size; ++j) weight[j] = xb[j]*xb[j]; + } + float amax = 0, max = 0; + for (int j = 0; j < block_size; ++j) { + float ax = fabsf(xb[j]); + if (ax > amax) { + amax = ax; max = xb[j]; + } + } + if (amax < GROUP_MAX_EPS) { + scales[ib] = 0; + continue; + } + float d = ntry > 0 ? -max/values[0] : max/values[0]; + float id = 1/d; + float sumqx = 0, sumq2 = 0; + for (int j = 0; j < block_size; ++j) { + float al = id*xb[j]; + int l = best_index_int8(16, values, al); + Lb[j] = l; + float q = values[l]; + float w = weight[j]; + sumqx += w*q*xb[j]; + sumq2 += w*q*q; + } + d = sumqx/sumq2; + float best = d*sumqx; + for (int itry = -ntry; itry <= ntry; ++itry) { + id = (itry + values[0])/max; + sumqx = sumq2 = 0; + for (int j = 0; j < block_size; ++j) { + float al = id*xb[j]; + int l = best_index_int8(16, values, al); + float q = values[l]; + float w = weight[j]; + sumqx += w*q*xb[j]; + sumq2 += w*q*q; + } + if (sumq2 > 0 && sumqx*sumqx > best*sumq2) { + d = sumqx/sumq2; best = d * sumqx; + } + } + scales[ib] = d; + float abs_d = fabsf(d); + if (abs_d > amax_scale) { + amax_scale = abs_d; max_scale = d; + } + } + + if (super_block_size/block_size > 1) { + int nb = super_block_size/block_size; + memset(scales_h, 0, ((nb+7)/8)*sizeof(uint16_t)); + float d = -max_scale/32; + dh[0] = GGML_FP32_TO_FP16(d); + float id = d ? 1/d : 0.f; + for (int ib = 0; ib < super_block_size/block_size; ++ib) { + int l = nearest_int(id*scales[ib]); + l = MAX(-32, MIN(31, l)); + float dl = d * l; + float idl = dl ? 1/dl : 0.f; + uint8_t * Lb = L + ib*block_size; + const float * xb = x + ib*block_size; + for (int j = 0; j < block_size; ++j) { + Lb[j] = best_index_int8(16, values, idl*xb[j]); + } + l += 32; + uint8_t l_l = l & 0xf; + uint8_t l_h = l >> 4; + if (ib%2 == 0) scales_l[ib/2] = l_l; + else scales_l[ib/2] |= (l_l << 4); + scales_h[ib/8] |= (l_h << 2*(ib%8)); + } + } else { + dh[0] = GGML_FP32_TO_FP16(scales[0]); + if (ntry > 0) { + float id = scales[0] ? 1/scales[0] : 0; + for (int j = 0; j < super_block_size; ++j) { + L[j] = best_index_int8(16, values, id*x[j]); + } + } + } + + for (int i = 0; i < super_block_size/32; ++i) { + for (int j = 0; j < 16; ++j) { + q4[16*i + j] = L[32*i + j] | (L[32*i + 16 + j] << 4); + } + } +} + +size_t quantize_iq4_nl(const float * restrict src, void * restrict dst, int64_t nrow, int64_t n_per_row, const float * quant_weights) { + GGML_ASSERT(n_per_row%QK4_NL == 0); + int64_t nblock = n_per_row/QK4_NL; + char * qrow = (char *)dst; + uint8_t L[QK4_NL]; + float weight[QK4_NL]; + uint16_t unused_h; + uint8_t * unused_l = NULL; + float scale; + for (int64_t row = 0; row < nrow; ++row) { + block_iq4_nl * iq4 = (block_iq4_nl *)qrow; + for (int ibl = 0; ibl < nblock; ++ibl) { + const float * qw = quant_weights ? quant_weights + QK4_NL*ibl : NULL; + quantize_row_iq4_nl_impl(QK4_NL, 32, src + QK4_NL*ibl, &iq4[ibl].d, iq4[ibl].qs, &unused_h, unused_l, + &scale, weight, L, kvalues_iq4nl, qw, 7); + } + src += n_per_row; + qrow += nblock*sizeof(block_iq4_nl); + } + return nrow * nblock * sizeof(block_iq4_nl); +} + +void quantize_row_iq4_nl(const float * restrict x, void * restrict vy, int64_t k) { + GGML_ASSERT(k%QK4_NL == 0); + int64_t nblock = k/QK4_NL; + uint8_t L[QK4_NL]; + float weight[QK4_NL]; + uint16_t unused_h; + uint8_t * unused_l = NULL; + float scale; + block_iq4_nl * iq4 = (block_iq4_nl *)vy; + for (int ibl = 0; ibl < nblock; ++ibl) { + quantize_row_iq4_nl_impl(QK4_NL, 32, x + QK4_NL*ibl, &iq4[ibl].d, iq4[ibl].qs, &unused_h, unused_l, + &scale, weight, L, kvalues_iq4nl, NULL, -1); + } +} + +void quantize_row_iq4_nl_ref(const float * restrict x, block_iq4_nl * restrict y, int64_t k) { + assert(k % QK4_NL == 0); + quantize_row_iq4_nl(x, y, k); +} + +size_t quantize_iq4_xs(const float * restrict src, void * restrict dst, int64_t nrow, int64_t n_per_row, const float * quant_weights) { + GGML_ASSERT(n_per_row%QK_K == 0); + int64_t nblock = n_per_row/QK_K; + char * qrow = (char *)dst; + uint8_t L[QK_K]; + float weight[32]; + float scales[QK_K/32]; + for (int64_t row = 0; row < nrow; ++row) { + block_iq4_xs * iq4 = (block_iq4_xs *)qrow; + for (int ibl = 0; ibl < nblock; ++ibl) { + const float * qw = quant_weights ? quant_weights + QK_K*ibl : NULL; + quantize_row_iq4_nl_impl(QK_K, 32, src + QK_K*ibl, &iq4[ibl].d, iq4[ibl].qs, &iq4[ibl].scales_h, iq4[ibl].scales_l, + scales, weight, L, kvalues_iq4nl, qw, 7); + } + src += n_per_row; + qrow += nblock*sizeof(block_iq4_xs); + } + return nrow * nblock * sizeof(block_iq4_xs); +} + +void quantize_row_iq4_xs(const float * restrict x, void * restrict vy, int64_t k) { + assert(k % QK_K == 0); + block_iq4_xs * restrict y = vy; + quantize_row_iq4_xs_ref(x, y, k); +} + +void quantize_row_iq4_xs_ref(const float * restrict x, block_iq4_xs * restrict y, int64_t k) { + assert(k % QK_K == 0); + quantize_iq4_xs(x, y, 1, k, NULL); +} + +// =============================== 2.5625 bpw + +static void quantize_row_iq2_s_impl(const float * restrict x, void * restrict vy, int64_t n, const float * restrict quant_weights) { + + const int gindex = iq2_data_index(GGML_TYPE_IQ2_S); + + const uint64_t * kgrid_q2xs = iq2_data[gindex].grid; + const int * kmap_q2xs = iq2_data[gindex].map; + const uint16_t * kneighbors_q2xs = iq2_data[gindex].neighbours; + + GGML_ASSERT(kmap_q2xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(kgrid_q2xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(kneighbors_q2xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(n%QK_K == 0); + + const int kMaxQ = 3; + + const int64_t nbl = n/QK_K; + + block_iq2_s * y = vy; + + float scales[QK_K/16]; + float weight[16]; + float xval[16]; + int8_t L[16]; + int8_t Laux[16]; + float waux[16]; + bool is_on_grid[2]; + bool is_on_grid_aux[2]; + uint8_t block_signs[2]; + + for (int ibl = 0; ibl < nbl; ++ibl) { + + memset(&y[ibl], 0, sizeof(block_iq2_s)); + y[ibl].d = GGML_FP32_TO_FP16(0.f); + + float max_scale = 0; + + const float * xbl = x + QK_K*ibl; + float sumx2 = 0; + for (int i = 0; i < QK_K; ++i) sumx2 += xbl[i]*xbl[i]; + float sigma2 = 2*sumx2/QK_K; + + for (int ib = 0; ib < QK_K/16; ++ib) { + const float * xb = xbl + 16*ib; + if (quant_weights) { + const float * qw = quant_weights + QK_K*ibl + 16*ib; + for (int i = 0; i < 16; ++i) weight[i] = qw[i] * sqrtf(sigma2 + xb[i]*xb[i]); + } else { + for (int i = 0; i < 16; ++i) weight[i] = 0.25f*sigma2 + xb[i]*xb[i]; + } + for (int i = 0; i < 16; ++i) waux[i] = sqrtf(weight[i]); + for (int k = 0; k < 2; ++k) { + uint8_t s = 0; + for (int i = 0; i < 8; ++i) { + if (xb[8*k + i] >= 0) xval[8*k + i] = xb[8*k + i]; + else { + xval[8*k + i] = -xb[8*k + i]; s |= (1 << i); + } + } + block_signs[k] = s; + } + float max = xval[0]; + for (int i = 1; i < 16; ++i) max = MAX(max, xval[i]); + if (max < GROUP_MAX_EPS_IQ2_S) { + scales[ib] = 0; + continue; + } + float best = 0; + float scale = max/(2*kMaxQ-1); + is_on_grid[0] = is_on_grid[1] = true; + for (int is = -9; is <= 9; ++is) { + float id = (2*kMaxQ-1+is*0.1f)/max; + float this_scale = 1/id; + for (int k = 0; k < 2; ++k) { + for (int i = 0; i < 8; ++i) { + int l = nearest_int(0.5f*(id*xval[8*k+i]-1)); + Laux[8*k+i] = MAX(0, MIN(kMaxQ-1, l)); + } + uint16_t u = 0; + for (int i = 0; i < 8; ++i) u |= (Laux[8*k+i] << 2*i); + int grid_index = kmap_q2xs[u]; + is_on_grid_aux[k] = true; + if (grid_index < 0) { + is_on_grid_aux[k] = false; + const uint16_t * neighbours = kneighbors_q2xs - kmap_q2xs[u] - 1; + grid_index = iq2_find_best_neighbour(neighbours, kgrid_q2xs, xval + 8*k, waux + 8*k, this_scale, Laux + 8*k); + } + } + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < 16; ++i) { + float w = weight[i]; + float q = 2*Laux[i] + 1; + sumqx += w*xval[i]*q; + sumq2 += w*q*q; + } + if (sumq2 > 0 && sumqx*sumqx > best*sumq2) { + scale = sumqx/sumq2; best = scale*sumqx; + for (int i = 0; i < 16; ++i) L[i] = Laux[i]; + for (int k = 0; k < 2; ++k) is_on_grid[k] = is_on_grid_aux[k]; + } + } + int n_not_ongrid = 0; + for (int k = 0; k < 2; ++k) if (!is_on_grid[k]) ++n_not_ongrid; + if (n_not_ongrid > 0 && scale > 0) { + float id = 1/scale; + for (int k = 0; k < 2; ++k) { + if (is_on_grid[k]) continue; + uint16_t u = 0; + for (int i = 0; i < 8; ++i) { + int l = nearest_int(0.5f*(id*xval[8*k+i]-1)); + l = MAX(0, MIN(kMaxQ-1, l)); + u |= (l << 2*i); + L[8*k + i] = l; + } + int grid_index = kmap_q2xs[u]; + if (grid_index < 0) { + const uint16_t * neighbours = kneighbors_q2xs - kmap_q2xs[u] - 1; + grid_index = iq2_find_best_neighbour(neighbours, kgrid_q2xs, xval + 8*k, waux + 8*k, scale, L + 8*k); + } + } + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < 16; ++i) { + float w = weight[i]; + float q = 2*L[i] + 1; + sumqx += w*xval[i]*q; + sumq2 += w*q*q; + } + if (sumq2 > 0) scale = sumqx/sumq2; + } + if (scale < 0) { + scale = -scale; + for (int k = 0; k < 2; ++k) block_signs[k] = ~block_signs[k]; + } + for (int k = 0; k < 2; ++k) { + uint16_t u = 0; + for (int i = 0; i < 8; ++i) u |= (L[8*k+i] << 2*i); + int grid_index = kmap_q2xs[u]; + if (grid_index < 0) { + printf("Oops: found point %u not on grid:", u); + for (int i = 0; i < 8; ++i) printf(" %d", L[8*k+i]); + printf("\n"); + GGML_ABORT("fatal error"); + } + const int i8 = 2*ib + k; + y[ibl].qs[i8] = grid_index & 255; + y[ibl].qh[i8/4] |= ((grid_index >> 8) << 2*(i8%4)); + y[ibl].qs[QK_K/8 + i8] = block_signs[k]; + } + GGML_ASSERT(scale >= 0); + scales[ib] = scale; + max_scale = MAX(max_scale, scale); + } + + if (!max_scale) { + continue; + } + + float d = max_scale/31; + y[ibl].d = GGML_FP32_TO_FP16(d * 0.9875f); + float id = 1/d; + for (int ib = 0; ib < QK_K/16; ++ib) { + int l = nearest_int(0.5f*(id*scales[ib]-1)); + l = MAX(0, MIN(15, l)); + if (ib%2 == 0) y[ibl].scales[ib/2] = l; + else y[ibl].scales[ib/2] |= (l << 4); + } + } +} + +size_t quantize_iq2_s(const float * restrict src, void * restrict dst, int64_t nrow, int64_t n_per_row, const float * quant_weights) { + GGML_ASSERT(n_per_row%QK_K == 0); + int64_t nblock = n_per_row/QK_K; + char * qrow = (char *)dst; + for (int64_t row = 0; row < nrow; ++row) { + quantize_row_iq2_s_impl(src, qrow, n_per_row, quant_weights); + src += n_per_row; + qrow += nblock*sizeof(block_iq2_s); + } + return nrow * nblock * sizeof(block_iq2_s); +} + +void quantize_row_iq2_s_ref(const float * restrict x, block_iq2_s * restrict y, int64_t k) { + assert(k % QK_K == 0); + quantize_iq2_s(x, y, 1, k, NULL); +} + +void quantize_row_iq2_s(const float * restrict x, void * restrict vy, int64_t k) { + assert(k % QK_K == 0); + block_iq2_s * restrict y = vy; + quantize_row_iq2_s_ref(x, y, k); +} + +static bool validate_float(float f, size_t i) { + if (isinf(f)) { + fprintf(stderr, "ggml_validate_row_data: found inf value at block %zu\n", i); + return false; + } + + if (isnan(f)) { + fprintf(stderr, "ggml_validate_row_data: found nan value at block %zu\n", i); + return false; + } + + return true; +} + +static bool isinf_fp16(ggml_fp16_t f) { + return (f & 0x7c00) == 0x7c00 && (f & 0x03ff) == 0; +} + +static bool isnan_fp16(ggml_fp16_t f) { + return (f & 0x7c00) == 0x7c00 && (f & 0x03ff) != 0; +} + +static bool validate_fp16(ggml_fp16_t f, size_t i) { + if (isinf_fp16(f)) { + fprintf(stderr, "ggml_validate_row_data: found inf value at block %zu\n", i); + return false; + } + + if (isnan_fp16(f)) { + fprintf(stderr, "ggml_validate_row_data: found nan value at block %zu\n", i); + return false; + } + + return true; +} + +#define VALIDATE_ROW_DATA_D_F16_IMPL(type, data, nb) \ + const type * q = (const type *) (data); \ + for (size_t i = 0; i < (nb); ++i) { \ + if (!validate_fp16(q[i].d, i)) { \ + return false; \ + } \ + } + +#define VALIDATE_ROW_DATA_DM_F16_IMPL(type, data, nb, d, m) \ + const type * q = (const type *) (data); \ + for (size_t i = 0; i < (nb); ++i) { \ + if (!validate_fp16(q[i].d, i) || !validate_fp16(q[i].m, i)) { \ + return false; \ + } \ + } + +#define VALIDATE_ROW_DATA_DVEC_F16_IMPL(type, data, nb, nr) \ + const type * q = (const type *) (data); \ + for (size_t i = 0; i < (nb); ++i) { \ + for (size_t j = 0; j < (nr); ++j) { \ + if (!validate_fp16(q[i].d[j], i)) { \ + return false; \ + } \ + } \ + } + +bool ggml_validate_row_data(enum ggml_type type, const void * data, size_t nbytes) { + if (type < 0 || type >= GGML_TYPE_COUNT) { + fprintf(stderr, "%s: invalid type %d\n", __func__, type); + return false; + } + + if (nbytes % ggml_type_size(type) != 0) { + fprintf(stderr, "%s: invalid size %zu for type %s (type size = %zu)\n", __func__, nbytes, ggml_type_name(type), ggml_type_size(type)); + return false; + } + + const size_t nb = nbytes/ggml_type_size(type); + + switch (type) { + case GGML_TYPE_BF16: + { + int nans = 0; + int infs = 0; + const unsigned short * f = (const unsigned short *) data; + for (size_t i = 0; i < nb; ++i) { + nans += (f[i] & 0x7fff) > 0x7f80; + infs += (f[i] & 0x7fff) == 0x7f80; + } + if (nans) { + fprintf(stderr, "%s: found %d NaNs in row of %zu BF16 values\n", __func__, nans, nb); + return false; + } + if (infs) { + fprintf(stderr, "%s: found %d infinities in row of %zu BF16 values\n", __func__, infs, nb); + return false; + } + } break; + case GGML_TYPE_F16: + { + const ggml_fp16_t * f = (const ggml_fp16_t *) data; + size_t i = 0; +#if defined(__AVX2__) + for (; i + 15 < nb; i += 16) { + __m256i v = _mm256_loadu_si256((const __m256i *)(f + i)); + __m256i vexp = _mm256_and_si256(v, _mm256_set1_epi16(0x7c00)); + __m256i cmp = _mm256_cmpeq_epi16(vexp, _mm256_set1_epi16(0x7c00)); + int mask = _mm256_movemask_epi8(cmp); + if (mask) { + for (size_t j = 0; j < 16; ++j) { + if (!validate_fp16(f[i + j], i + j)) { + return false; + } + } + GGML_UNREACHABLE(); + } + } +#elif defined(__ARM_NEON) + for (; i + 7 < nb; i += 8) { + uint16x8_t v = vld1q_u16(f + i); + uint16x8_t vexp = vandq_u16(v, vdupq_n_u16(0x7c00)); + uint16x8_t cmp = vceqq_u16(vexp, vdupq_n_u16(0x7c00)); + uint64_t mask = vget_lane_u64(vreinterpret_u64_u8(vshrn_n_u16(cmp, 4)), 0); + if (mask) { + for (size_t j = 0; j < 8; ++j) { + if (!validate_fp16(f[i + j], i + j)) { + return false; + } + } + GGML_UNREACHABLE(); + } + } +#endif + for (; i < nb; ++i) { + if (!validate_fp16(f[i], i)) { + return false; + } + } + } break; + case GGML_TYPE_F32: + { + const float * f = (const float *) data; + size_t i = 0; +#if defined(__AVX2__) + for (; i + 7 < nb; i += 8) { + __m256i v = _mm256_loadu_si256((const __m256i *)(f + i)); + __m256i vexp = _mm256_and_si256(v, _mm256_set1_epi32(0x7f800000)); + __m256i cmp = _mm256_cmpeq_epi32(vexp, _mm256_set1_epi32(0x7f800000)); + int mask = _mm256_movemask_epi8(cmp); + if (mask) { + for (size_t j = 0; j < 8; ++j) { + if (!validate_float(f[i + j], i + j)) { + return false; + } + } + GGML_UNREACHABLE(); + } + } +#elif defined(__ARM_NEON) + for (; i + 3 < nb; i += 4) { + uint32x4_t v = vld1q_u32((const uint32_t *)f + i); + uint32x4_t vexp = vandq_u32(v, vdupq_n_u32(0x7f800000)); + uint32x4_t cmp = vceqq_u32(vexp, vdupq_n_u32(0x7f800000)); + uint64_t mask = vget_lane_u64(vreinterpret_u64_u16(vshrn_n_u32(cmp, 8)), 0); + if (mask) { + for (size_t j = 0; j < 4; ++j) { + if (!validate_float(f[i + j], i + j)) { + return false; + } + } + GGML_UNREACHABLE(); + } + } +#endif + for (; i < nb; ++i) { + if (!validate_float(f[i], i)) { + return false; + } + } + } break; + case GGML_TYPE_F64: + { + const double * f = (const double *) data; + for (size_t i = 0; i < nb; ++i) { + if (!validate_float(f[i], i)) { + return false; + } + } + } break; + case GGML_TYPE_Q4_0: + { + VALIDATE_ROW_DATA_D_F16_IMPL(block_q4_0, data, nb); + } break; + case GGML_TYPE_Q4_1: + { + VALIDATE_ROW_DATA_DM_F16_IMPL(block_q4_1, data, nb, d, m); + } break; + case GGML_TYPE_Q5_0: + { + VALIDATE_ROW_DATA_D_F16_IMPL(block_q5_0, data, nb); + } break; + case GGML_TYPE_Q5_1: + { + VALIDATE_ROW_DATA_DM_F16_IMPL(block_q5_1, data, nb, d, m); + } break; + case GGML_TYPE_Q8_0: + { + VALIDATE_ROW_DATA_D_F16_IMPL(block_q8_0, data, nb); + } break; + case GGML_TYPE_Q2_K: + { + VALIDATE_ROW_DATA_DM_F16_IMPL(block_q2_K, data, nb, d, dmin); + } break; + case GGML_TYPE_Q3_K: + { + VALIDATE_ROW_DATA_D_F16_IMPL(block_q3_K, data, nb); + } break; + case GGML_TYPE_Q4_K: + { + VALIDATE_ROW_DATA_DM_F16_IMPL(block_q4_K, data, nb, d, dmin); + } break; + case GGML_TYPE_Q5_K: + { + VALIDATE_ROW_DATA_DM_F16_IMPL(block_q5_K, data, nb, d, dmin); + } break; + case GGML_TYPE_Q6_K: + { + VALIDATE_ROW_DATA_D_F16_IMPL(block_q6_K, data, nb); + } break; + case GGML_TYPE_Q8_K: + { + const block_q8_K * q = (const block_q8_K *) data; + for (size_t i = 0; i < nb; ++i) { + if (!validate_float(q[i].d, i)) { + return false; + } + } + } break; + case GGML_TYPE_TQ1_0: + { + VALIDATE_ROW_DATA_D_F16_IMPL(block_tq1_0, data, nb); + } break; + case GGML_TYPE_TQ2_0: + { + VALIDATE_ROW_DATA_D_F16_IMPL(block_tq2_0, data, nb); + } break; + case GGML_TYPE_IQ1_S: + { + VALIDATE_ROW_DATA_D_F16_IMPL(block_iq1_s, data, nb); + } break; + case GGML_TYPE_IQ1_M: + { + const block_iq1_m * q = (const block_iq1_m *) data; + for (size_t i = 0; i < nb; ++i) { + iq1m_scale_t scale; + const uint16_t * sc = (const uint16_t *)q[i].scales; + scale.u16 = (sc[0] >> 12) | ((sc[1] >> 8) & 0x00f0) | ((sc[2] >> 4) & 0x0f00) | (sc[3] & 0xf000); + if (!validate_fp16(scale.f16, i)) { + return false; + } + } + } break; + case GGML_TYPE_IQ2_XXS: + { + VALIDATE_ROW_DATA_D_F16_IMPL(block_iq2_xxs, data, nb); + } break; + case GGML_TYPE_IQ2_XS: + { + VALIDATE_ROW_DATA_D_F16_IMPL(block_iq2_xs, data, nb); + } break; + case GGML_TYPE_IQ2_S: + { + VALIDATE_ROW_DATA_D_F16_IMPL(block_iq2_s, data, nb); + } break; + case GGML_TYPE_IQ3_XXS: + { + VALIDATE_ROW_DATA_D_F16_IMPL(block_iq3_xxs, data, nb); + } break; + + case GGML_TYPE_IQ3_S: + { + VALIDATE_ROW_DATA_D_F16_IMPL(block_iq3_s, data, nb); + } break; + case GGML_TYPE_IQ4_XS: + { + VALIDATE_ROW_DATA_D_F16_IMPL(block_iq4_xs, data, nb); + } break; + case GGML_TYPE_IQ4_NL: + { + VALIDATE_ROW_DATA_D_F16_IMPL(block_iq4_nl, data, nb); + } break; + case GGML_TYPE_Q4_0_4_4: + case GGML_TYPE_Q4_0_4_8: + { + VALIDATE_ROW_DATA_DVEC_F16_IMPL(block_q4_0x4, data, nbytes / sizeof(block_q4_0x4), 4); + } break; + case GGML_TYPE_Q4_0_8_8: + { + VALIDATE_ROW_DATA_DVEC_F16_IMPL(block_q4_0x8, data, nbytes / sizeof(block_q4_0x8), 8); + } break; + + case GGML_TYPE_I8: + case GGML_TYPE_I16: + case GGML_TYPE_I32: + case GGML_TYPE_I64: + // nothing to validate + break; + default: + { + fprintf(stderr, "%s: invalid type %d\n", __func__, type); + return false; + } + } + + return true; +} diff --git a/ml/backend/ggml/ggml-quants.h b/ml/backend/ggml/ggml-quants.h new file mode 100644 index 000000000..5f3231333 --- /dev/null +++ b/ml/backend/ggml/ggml-quants.h @@ -0,0 +1,173 @@ +/** + * llama.cpp - commit 3f1ae2e32cde00c39b96be6d01c2997c29bae555 - do not edit this file + * + * MIT License + * + * Copyright (c) 2023-2024 The ggml authors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +#pragma once + +#define GGML_COMMON_DECL_C +#include "ggml-common.h" + +#include "ggml.h" + +// GGML internal header + +#ifdef __cplusplus +extern "C" { +#endif + +// Quantization +void quantize_row_q4_0_ref(const float * GGML_RESTRICT x, block_q4_0 * GGML_RESTRICT y, int64_t k); +void quantize_row_q4_1_ref(const float * GGML_RESTRICT x, block_q4_1 * GGML_RESTRICT y, int64_t k); +void quantize_row_q5_0_ref(const float * GGML_RESTRICT x, block_q5_0 * GGML_RESTRICT y, int64_t k); +void quantize_row_q5_1_ref(const float * GGML_RESTRICT x, block_q5_1 * GGML_RESTRICT y, int64_t k); +void quantize_row_q8_0_ref(const float * GGML_RESTRICT x, block_q8_0 * GGML_RESTRICT y, int64_t k); +void quantize_row_q8_1_ref(const float * GGML_RESTRICT x, block_q8_1 * GGML_RESTRICT y, int64_t k); + +void quantize_row_q2_K_ref(const float * GGML_RESTRICT x, block_q2_K * GGML_RESTRICT y, int64_t k); +void quantize_row_q3_K_ref(const float * GGML_RESTRICT x, block_q3_K * GGML_RESTRICT y, int64_t k); +void quantize_row_q4_K_ref(const float * GGML_RESTRICT x, block_q4_K * GGML_RESTRICT y, int64_t k); +void quantize_row_q5_K_ref(const float * GGML_RESTRICT x, block_q5_K * GGML_RESTRICT y, int64_t k); +void quantize_row_q6_K_ref(const float * GGML_RESTRICT x, block_q6_K * GGML_RESTRICT y, int64_t k); +void quantize_row_q8_K_ref(const float * GGML_RESTRICT x, block_q8_K * GGML_RESTRICT y, int64_t k); + +void quantize_row_tq1_0_ref(const float * GGML_RESTRICT x, block_tq1_0 * GGML_RESTRICT y, int64_t k); +void quantize_row_tq2_0_ref(const float * GGML_RESTRICT x, block_tq2_0 * GGML_RESTRICT y, int64_t k); + +void quantize_row_iq3_xxs_ref(const float * GGML_RESTRICT x, block_iq3_xxs * GGML_RESTRICT y, int64_t k); +void quantize_row_iq4_nl_ref (const float * GGML_RESTRICT x, block_iq4_nl * GGML_RESTRICT y, int64_t k); +void quantize_row_iq4_xs_ref (const float * GGML_RESTRICT x, block_iq4_xs * GGML_RESTRICT y, int64_t k); +void quantize_row_iq3_s_ref (const float * GGML_RESTRICT x, block_iq3_s * GGML_RESTRICT y, int64_t k); +void quantize_row_iq2_s_ref (const float * GGML_RESTRICT x, block_iq2_s * GGML_RESTRICT y, int64_t k); + +void quantize_row_q4_0(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int64_t k); +void quantize_row_q4_1(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int64_t k); +void quantize_row_q5_0(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int64_t k); +void quantize_row_q5_1(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int64_t k); +void quantize_row_q8_0(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int64_t k); +void quantize_row_q8_1(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int64_t k); + +void quantize_row_q2_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int64_t k); +void quantize_row_q3_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int64_t k); +void quantize_row_q4_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int64_t k); +void quantize_row_q5_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int64_t k); +void quantize_row_q6_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int64_t k); +void quantize_row_q8_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int64_t k); + +void quantize_row_tq1_0(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int64_t k); +void quantize_row_tq2_0(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int64_t k); + +void quantize_row_iq3_xxs(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int64_t k); +void quantize_row_iq4_nl (const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int64_t k); +void quantize_row_iq4_xs (const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int64_t k); +void quantize_row_iq3_s (const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int64_t k); +void quantize_row_iq2_s (const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int64_t k); + +// Dequantization +void dequantize_row_q4_0(const block_q4_0 * GGML_RESTRICT x, float * GGML_RESTRICT y, int64_t k); +void dequantize_row_q4_1(const block_q4_1 * GGML_RESTRICT x, float * GGML_RESTRICT y, int64_t k); +void dequantize_row_q5_0(const block_q5_0 * GGML_RESTRICT x, float * GGML_RESTRICT y, int64_t k); +void dequantize_row_q5_1(const block_q5_1 * GGML_RESTRICT x, float * GGML_RESTRICT y, int64_t k); +void dequantize_row_q8_0(const block_q8_0 * GGML_RESTRICT x, float * GGML_RESTRICT y, int64_t k); +//void dequantize_row_q8_1(const block_q8_1 * GGML_RESTRICT x, float * GGML_RESTRICT y, int64_t k); + +void dequantize_row_q2_K(const block_q2_K * GGML_RESTRICT x, float * GGML_RESTRICT y, int64_t k); +void dequantize_row_q3_K(const block_q3_K * GGML_RESTRICT x, float * GGML_RESTRICT y, int64_t k); +void dequantize_row_q4_K(const block_q4_K * GGML_RESTRICT x, float * GGML_RESTRICT y, int64_t k); +void dequantize_row_q5_K(const block_q5_K * GGML_RESTRICT x, float * GGML_RESTRICT y, int64_t k); +void dequantize_row_q6_K(const block_q6_K * GGML_RESTRICT x, float * GGML_RESTRICT y, int64_t k); +void dequantize_row_q8_K(const block_q8_K * GGML_RESTRICT x, float * GGML_RESTRICT y, int64_t k); + +void dequantize_row_tq1_0(const block_tq1_0 * GGML_RESTRICT x, float * GGML_RESTRICT y, int64_t k); +void dequantize_row_tq2_0(const block_tq2_0 * GGML_RESTRICT x, float * GGML_RESTRICT y, int64_t k); + +void dequantize_row_iq2_xxs(const block_iq2_xxs * GGML_RESTRICT x, float * GGML_RESTRICT y, int64_t k); +void dequantize_row_iq2_xs (const block_iq2_xs * GGML_RESTRICT x, float * GGML_RESTRICT y, int64_t k); +void dequantize_row_iq2_s (const block_iq2_s * GGML_RESTRICT x, float * GGML_RESTRICT y, int64_t k); +void dequantize_row_iq3_xxs(const block_iq3_xxs * GGML_RESTRICT x, float * GGML_RESTRICT y, int64_t k); +void dequantize_row_iq1_s (const block_iq1_s * GGML_RESTRICT x, float * GGML_RESTRICT y, int64_t k); +void dequantize_row_iq1_m (const block_iq1_m * GGML_RESTRICT x, float * GGML_RESTRICT y, int64_t k); +void dequantize_row_iq4_nl (const block_iq4_nl * GGML_RESTRICT x, float * GGML_RESTRICT y, int64_t k); +void dequantize_row_iq4_xs (const block_iq4_xs * GGML_RESTRICT x, float * GGML_RESTRICT y, int64_t k); +void dequantize_row_iq3_s (const block_iq3_s * GGML_RESTRICT x, float * GGML_RESTRICT y, int64_t k); + +// Dot product +void ggml_vec_dot_q4_0_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_q4_1_q8_1(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_q5_0_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_q5_1_q8_1(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_q8_0_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); + +void ggml_vec_dot_q2_K_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_q3_K_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_q4_K_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_q5_K_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_q6_K_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); + +void ggml_vec_dot_tq1_0_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_tq2_0_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); + +void ggml_vec_dot_iq2_xxs_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_iq2_xs_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_iq2_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_iq3_xxs_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_iq1_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_iq1_m_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_iq4_nl_q8_0 (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_iq4_xs_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_iq3_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); + +// Quantization utilizing an importance matrix (a.k.a. "Activation aWare Quantization") +size_t quantize_iq2_xxs(const float * GGML_RESTRICT src, void * GGML_RESTRICT dst, int64_t nrows, int64_t n_per_row, const float * imatrix); +size_t quantize_iq2_xs (const float * GGML_RESTRICT src, void * GGML_RESTRICT dst, int64_t nrows, int64_t n_per_row, const float * imatrix); +size_t quantize_iq2_s (const float * GGML_RESTRICT src, void * GGML_RESTRICT dst, int64_t nrows, int64_t n_per_row, const float * imatrix); +size_t quantize_iq3_xxs(const float * GGML_RESTRICT src, void * GGML_RESTRICT dst, int64_t nrows, int64_t n_per_row, const float * imatrix); +size_t quantize_iq1_s (const float * GGML_RESTRICT src, void * GGML_RESTRICT dst, int64_t nrows, int64_t n_per_row, const float * imatrix); +size_t quantize_iq1_m (const float * GGML_RESTRICT src, void * GGML_RESTRICT dst, int64_t nrows, int64_t n_per_row, const float * imatrix); +size_t quantize_iq4_nl (const float * GGML_RESTRICT src, void * GGML_RESTRICT dst, int64_t nrows, int64_t n_per_row, const float * imatrix); +size_t quantize_iq4_xs (const float * GGML_RESTRICT src, void * GGML_RESTRICT dst, int64_t nrows, int64_t n_per_row, const float * imatrix); +size_t quantize_iq3_s (const float * GGML_RESTRICT src, void * GGML_RESTRICT dst, int64_t nrows, int64_t n_per_row, const float * imatrix); + +size_t quantize_tq1_0(const float * GGML_RESTRICT src, void * GGML_RESTRICT dst, int64_t nrows, int64_t n_per_row, const float * imatrix); +size_t quantize_tq2_0(const float * GGML_RESTRICT src, void * GGML_RESTRICT dst, int64_t nrows, int64_t n_per_row, const float * imatrix); + +size_t quantize_q2_K(const float * GGML_RESTRICT src, void * GGML_RESTRICT dst, int64_t nrows, int64_t n_per_row, const float * imatrix); +size_t quantize_q3_K(const float * GGML_RESTRICT src, void * GGML_RESTRICT dst, int64_t nrows, int64_t n_per_row, const float * imatrix); +size_t quantize_q4_K(const float * GGML_RESTRICT src, void * GGML_RESTRICT dst, int64_t nrows, int64_t n_per_row, const float * imatrix); +size_t quantize_q5_K(const float * GGML_RESTRICT src, void * GGML_RESTRICT dst, int64_t nrows, int64_t n_per_row, const float * imatrix); +size_t quantize_q6_K(const float * GGML_RESTRICT src, void * GGML_RESTRICT dst, int64_t nrows, int64_t n_per_row, const float * imatrix); +size_t quantize_q4_0(const float * GGML_RESTRICT src, void * GGML_RESTRICT dst, int64_t nrows, int64_t n_per_row, const float * imatrix); +size_t quantize_q4_1(const float * GGML_RESTRICT src, void * GGML_RESTRICT dst, int64_t nrows, int64_t n_per_row, const float * imatrix); +size_t quantize_q5_0(const float * GGML_RESTRICT src, void * GGML_RESTRICT dst, int64_t nrows, int64_t n_per_row, const float * imatrix); +size_t quantize_q5_1(const float * GGML_RESTRICT src, void * GGML_RESTRICT dst, int64_t nrows, int64_t n_per_row, const float * imatrix); +size_t quantize_q8_0(const float * GGML_RESTRICT src, void * GGML_RESTRICT dst, int64_t nrows, int64_t n_per_row, const float * imatrix); + +void iq2xs_init_impl(enum ggml_type type); +void iq2xs_free_impl(enum ggml_type type); +void iq3xs_init_impl(int grid_size); +void iq3xs_free_impl(int grid_size); + +#ifdef __cplusplus +} +#endif diff --git a/ml/backend/ggml/ggml.c b/ml/backend/ggml/ggml.c new file mode 100644 index 000000000..7f7a20e4b --- /dev/null +++ b/ml/backend/ggml/ggml.c @@ -0,0 +1,23355 @@ +/** + * llama.cpp - commit 3f1ae2e32cde00c39b96be6d01c2997c29bae555 - do not edit this file + * + * MIT License + * + * Copyright (c) 2023-2024 The ggml authors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +#define _CRT_SECURE_NO_DEPRECATE // Disables ridiculous "unsafe" warnings on Windows +#define _USE_MATH_DEFINES // For M_PI on MSVC + +#include "ggml-backend.h" +#include "ggml-impl.h" +#include "ggml-cpu-impl.h" +#include "ggml-quants.h" +#include "ggml.h" +#include "ggml-aarch64.h" + +#if defined(_MSC_VER) || defined(__MINGW32__) +#include // using malloc.h with MSC/MINGW +#elif !defined(__FreeBSD__) && !defined(__NetBSD__) && !defined(__OpenBSD__) +#include +#endif + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#if defined(__gnu_linux__) +#include +#endif + +#ifdef GGML_USE_OPENMP +#include +#endif + +#ifdef GGML_USE_METAL +#include +#endif + +#if defined(__ARM_FEATURE_SVE) || defined(__ARM_FEATURE_MATMUL_INT8) +#undef GGML_USE_LLAMAFILE +#endif + +#ifdef GGML_USE_LLAMAFILE +#include +#endif + +#if defined(_MSC_VER) +// disable "possible loss of data" to avoid hundreds of casts +// we should just be careful :) +#pragma warning(disable: 4244 4267) + +// disable POSIX deprecation warnings +// these functions are never going away, anyway +#pragma warning(disable: 4996) + +// unreachable code because of multiple instances of code after GGML_ABORT +#pragma warning(disable: 4702) +#endif + +// Note: once we move threading into a separate C++ file +// will use std::hardware_destructive_interference_size instead of hardcoding it here +// and we'll use C++ attribute syntax. +#define GGML_CACHE_LINE 64 + +#if defined(__clang__) || defined(__GNUC__) +#define GGML_CACHE_ALIGN __attribute__((aligned(GGML_CACHE_LINE))) +#endif + +#if defined(__has_feature) +#if __has_feature(thread_sanitizer) +#define GGML_TSAN_ENABLED 1 +#endif +#else // __has_feature +#if defined(__SANITIZE_THREAD__) +#define GGML_TSAN_ENABLED 1 +#endif +#endif // __has_feature + +#if defined(_WIN32) + +#define WIN32_LEAN_AND_MEAN +#ifndef NOMINMAX + #define NOMINMAX +#endif +#include + +#if !defined(__clang__) +#define GGML_CACHE_ALIGN __declspec(align(GGML_CACHE_LINE)) + +typedef volatile LONG atomic_int; +typedef atomic_int atomic_bool; +typedef atomic_int atomic_flag; + +#define ATOMIC_FLAG_INIT 0 + +typedef enum { + memory_order_relaxed, + memory_order_consume, + memory_order_acquire, + memory_order_release, + memory_order_acq_rel, + memory_order_seq_cst +} memory_order; + +static void atomic_store(atomic_int * ptr, LONG val) { + InterlockedExchange(ptr, val); +} +static void atomic_store_explicit(atomic_int * ptr, LONG val, memory_order mo) { + // TODO: add support for explicit memory order + InterlockedExchange(ptr, val); +} +static LONG atomic_load(atomic_int * ptr) { + return InterlockedCompareExchange(ptr, 0, 0); +} +static LONG atomic_load_explicit(atomic_int * ptr, memory_order mo) { + // TODO: add support for explicit memory order + return InterlockedCompareExchange(ptr, 0, 0); +} +static LONG atomic_fetch_add(atomic_int * ptr, LONG inc) { + return InterlockedExchangeAdd(ptr, inc); +} +static LONG atomic_fetch_add_explicit(atomic_int * ptr, LONG inc, memory_order mo) { + // TODO: add support for explicit memory order + return InterlockedExchangeAdd(ptr, inc); +} +static atomic_bool atomic_flag_test_and_set(atomic_flag * ptr) { + return InterlockedExchange(ptr, 1); +} +static void atomic_flag_clear(atomic_flag * ptr) { + InterlockedExchange(ptr, 0); +} +static void atomic_thread_fence(memory_order mo) { + MemoryBarrier(); +} +#else // clang +#include +#endif + +typedef HANDLE pthread_t; + +typedef DWORD thread_ret_t; +static int pthread_create(pthread_t * out, void * unused, thread_ret_t(*func)(void *), void * arg) { + (void) unused; + HANDLE handle = CreateThread(NULL, 0, (LPTHREAD_START_ROUTINE) func, arg, 0, NULL); + if (handle == NULL) + { + return EAGAIN; + } + + *out = handle; + return 0; +} + +static int pthread_join(pthread_t thread, void * unused) { + (void) unused; + int ret = (int) WaitForSingleObject(thread, INFINITE); + CloseHandle(thread); + return ret; +} + +static int sched_yield (void) { + Sleep (0); + return 0; +} +#else + +#include +#include +#include +#if defined(__FreeBSD__) +#include +#endif + +typedef void * thread_ret_t; + +#include +#include +#include + +#endif + +typedef pthread_t ggml_thread_t; + +#ifdef GGML_USE_CPU_HBM +#include +#endif + +#if defined(__APPLE__) +#include +#endif + +#if (defined(__linux__) || defined(__APPLE__) || defined(__FreeBSD__) || defined(__NetBSD__) || defined(__OpenBSD__)) && \ + (!defined(TARGET_OS_TV) && !defined(TARGET_OS_WATCH)) + +#include + +#if defined(__ANDROID__) +#include +#include +#include + +struct backtrace_state { + void ** current; + void ** end; +}; + +static _Unwind_Reason_Code unwind_callback(struct _Unwind_Context* context, void* arg) { + struct backtrace_state * state = (struct backtrace_state *)arg; + uintptr_t pc = _Unwind_GetIP(context); + if (pc) { + if (state->current == state->end) { + return _URC_END_OF_STACK; + } else { + *state->current++ = (void*)pc; + } + } + return _URC_NO_REASON; +} + +static void ggml_print_backtrace_symbols(void) { + const int max = 100; + void* buffer[max]; + + struct backtrace_state state = {buffer, buffer + max}; + _Unwind_Backtrace(unwind_callback, &state); + + int count = state.current - buffer; + + for (int idx = 0; idx < count; ++idx) { + const void * addr = buffer[idx]; + const char * symbol = ""; + + Dl_info info; + if (dladdr(addr, &info) && info.dli_sname) { + symbol = info.dli_sname; + } + + fprintf(stderr, "%d: %p %s\n", idx, addr, symbol); + } +} +#elif defined(__linux__) && defined(__GLIBC__) +#include +static void ggml_print_backtrace_symbols(void) { + void * trace[100]; + int nptrs = backtrace(trace, sizeof(trace)/sizeof(trace[0])); + backtrace_symbols_fd(trace, nptrs, STDERR_FILENO); +} +#else +static void ggml_print_backtrace_symbols(void) { + // platform not supported +} +#endif + +static void ggml_print_backtrace(void) { + char attach[32]; + snprintf(attach, sizeof(attach), "attach %d", getpid()); + int pid = fork(); + if (pid == 0) { + // try gdb + execlp("gdb", "gdb", "--batch", + "-ex", "set style enabled on", + "-ex", attach, + "-ex", "bt -frame-info source-and-location", + "-ex", "detach", + "-ex", "quit", + (char *) NULL); + // try lldb + execlp("lldb", "lldb", "--batch", + "-o", "bt", + "-o", "quit", + "-p", attach, + (char *) NULL); + exit(EXIT_FAILURE); + } else { + int wstatus; + waitpid(pid, &wstatus, 0); + if (WIFEXITED(wstatus)) { + if (WEXITSTATUS(wstatus) == EXIT_FAILURE) { + // gdb failed, fallback to backtrace_symbols + ggml_print_backtrace_symbols(); + } + } + } +} +#else +static void ggml_print_backtrace(void) { + // platform not supported +} +#endif + +void ggml_abort(const char * file, int line, const char * fmt, ...) { + fflush(stdout); + + fprintf(stderr, "%s:%d: ", file, line); + + va_list args; + va_start(args, fmt); + vfprintf(stderr, fmt, args); + va_end(args); + + fprintf(stderr, "\n"); + + ggml_print_backtrace(); + abort(); +} + +#define GGML_DEBUG 0 +#define GGML_GELU_FP16 +#define GGML_GELU_QUICK_FP16 + +#define GGML_SOFT_MAX_UNROLL 4 +#define GGML_VEC_DOT_UNROLL 2 +#define GGML_VEC_MAD_UNROLL 32 + +// +// logging +// + +#if (GGML_DEBUG >= 1) +#define GGML_PRINT_DEBUG(...) printf(__VA_ARGS__) +#else +#define GGML_PRINT_DEBUG(...) +#endif + +#if (GGML_DEBUG >= 5) +#define GGML_PRINT_DEBUG_5(...) printf(__VA_ARGS__) +#else +#define GGML_PRINT_DEBUG_5(...) +#endif + +#if (GGML_DEBUG >= 10) +#define GGML_PRINT_DEBUG_10(...) printf(__VA_ARGS__) +#else +#define GGML_PRINT_DEBUG_10(...) +#endif + +#define GGML_PRINT(...) printf(__VA_ARGS__) + +// +// end of logging block +// + +#ifdef GGML_USE_ACCELERATE +// uncomment to use vDSP for soft max computation +// note: not sure if it is actually faster +//#define GGML_SOFT_MAX_ACCELERATE +#endif + +#if defined(_MSC_VER) || defined(__MINGW32__) +#define GGML_ALIGNED_MALLOC(size) _aligned_malloc(size, GGML_MEM_ALIGN) +#define GGML_ALIGNED_FREE(ptr) _aligned_free(ptr) +#else +inline static void * ggml_aligned_malloc(size_t size) { + if (size == 0) { + GGML_PRINT("WARNING: Behavior may be unexpected when allocating 0 bytes for ggml_aligned_malloc!\n"); + return NULL; + } + void * aligned_memory = NULL; +#ifdef GGML_USE_CPU_HBM + int result = hbw_posix_memalign(&aligned_memory, 16, size); +#elif GGML_USE_METAL + int result = posix_memalign(&aligned_memory, sysconf(_SC_PAGESIZE), size); +#else + int result = posix_memalign(&aligned_memory, GGML_MEM_ALIGN, size); +#endif + if (result != 0) { + // Handle allocation failure + const char *error_desc = "unknown allocation error"; + switch (result) { + case EINVAL: + error_desc = "invalid alignment value"; + break; + case ENOMEM: + error_desc = "insufficient memory"; + break; + } + GGML_PRINT("%s: %s (attempted to allocate %6.2f MB)\n", __func__, error_desc, size/(1024.0*1024.0)); + GGML_ABORT("fatal error"); + return NULL; + } + return aligned_memory; +} +#define GGML_ALIGNED_MALLOC(size) ggml_aligned_malloc(size) +#ifdef GGML_USE_CPU_HBM +#define GGML_ALIGNED_FREE(ptr) if(NULL != ptr) hbw_free(ptr) +#else +#define GGML_ALIGNED_FREE(ptr) free(ptr) +#endif +#endif + +inline static void * ggml_malloc(size_t size) { + if (size == 0) { + GGML_PRINT("WARNING: Behavior may be unexpected when allocating 0 bytes for ggml_malloc!\n"); + return NULL; + } + void * result = malloc(size); + if (result == NULL) { + GGML_PRINT("%s: failed to allocate %6.2f MB\n", __func__, size/(1024.0*1024.0)); + GGML_ABORT("fatal error"); + } + return result; +} + +// calloc +inline static void * ggml_calloc(size_t num, size_t size) { + if (num == 0 || size == 0) { + GGML_PRINT("WARNING: Behavior may be unexpected when allocating 0 bytes for ggml_calloc!\n"); + return NULL; + } + void * result = calloc(num, size); + if (result == NULL) { + GGML_PRINT("%s: failed to allocate %6.2f MB\n", __func__, size/(1024.0*1024.0)); + GGML_ABORT("fatal error"); + } + return result; +} + +#define GGML_MALLOC(size) ggml_malloc(size) +#define GGML_CALLOC(num, size) ggml_calloc(num, size) + +#define GGML_FREE(ptr) free(ptr) + +#define UNUSED GGML_UNUSED +#define SWAP(x, y, T) do { T SWAP = x; (x) = y; (y) = SWAP; } while (0) + +#if defined(GGML_USE_ACCELERATE) +#include +#endif + +// floating point type used to accumulate sums +typedef double ggml_float; + +#undef MIN +#undef MAX + +#define MIN(a, b) ((a) < (b) ? (a) : (b)) +#define MAX(a, b) ((a) > (b) ? (a) : (b)) + +// +// global data +// + +// precomputed gelu table for f16 (128 KB) +static ggml_fp16_t ggml_table_gelu_f16[1 << 16]; + +// precomputed quick gelu table for f16 (128 KB) +static ggml_fp16_t ggml_table_gelu_quick_f16[1 << 16]; + +// precomputed f32 table for f16 (256 KB) (ggml-impl.h) +float ggml_table_f32_f16[1 << 16]; + +#if defined(__ARM_ARCH) +struct ggml_arm_arch_features_type { + int has_neon; + int has_i8mm; + int has_sve; + int sve_cnt; +} ggml_arm_arch_features = {-1, -1, -1, 0}; +#endif + +GGML_CALL const char * ggml_status_to_string(enum ggml_status status) { + switch (status) { + case GGML_STATUS_ALLOC_FAILED: return "GGML status: error (failed to allocate memory)"; + case GGML_STATUS_FAILED: return "GGML status: error (operation failed)"; + case GGML_STATUS_SUCCESS: return "GGML status: success"; + case GGML_STATUS_ABORTED: return "GGML status: warning (operation aborted)"; + } + + return "GGML status: unknown"; +} + +float ggml_fp16_to_fp32(ggml_fp16_t x) { +#define ggml_fp16_to_fp32 do_not_use__ggml_fp16_to_fp32__in_ggml + return GGML_FP16_TO_FP32(x); +} + +ggml_fp16_t ggml_fp32_to_fp16(float x) { +#define ggml_fp32_to_fp16 do_not_use__ggml_fp32_to_fp16__in_ggml + return GGML_FP32_TO_FP16(x); +} + +float ggml_bf16_to_fp32(ggml_bf16_t x) { +#define ggml_bf16_to_fp32 do_not_use__ggml_bf16_to_fp32__in_ggml + return GGML_BF16_TO_FP32(x); // it just left shifts +} + +ggml_bf16_t ggml_fp32_to_bf16(float x) { +#define ggml_fp32_to_bf16 do_not_use__ggml_fp32_to_bf16__in_ggml + return GGML_FP32_TO_BF16(x); +} + +void ggml_fp16_to_fp32_row(const ggml_fp16_t * x, float * y, int64_t n) { + for (int64_t i = 0; i < n; i++) { + y[i] = GGML_FP16_TO_FP32(x[i]); + } +} + +void ggml_fp32_to_fp16_row(const float * x, ggml_fp16_t * y, int64_t n) { + int64_t i = 0; +#if defined(__F16C__) + for (; i + 7 < n; i += 8) { + __m256 x_vec = _mm256_loadu_ps(x + i); + __m128i y_vec = _mm256_cvtps_ph(x_vec, _MM_FROUND_TO_NEAREST_INT); + _mm_storeu_si128((__m128i *)(y + i), y_vec); + } + for(; i + 3 < n; i += 4) { + __m128 x_vec = _mm_loadu_ps(x + i); + __m128i y_vec = _mm_cvtps_ph(x_vec, _MM_FROUND_TO_NEAREST_INT); + _mm_storel_epi64((__m128i *)(y + i), y_vec); + } +#endif + for (; i < n; i++) { + y[i] = GGML_FP32_TO_FP16(x[i]); + } +} + +void ggml_bf16_to_fp32_row(const ggml_bf16_t * x, float * y, int64_t n) { + int64_t i = 0; +#if defined(__AVX512F__) + for (; i + 16 <= n; i += 16) { + _mm512_storeu_ps(y + i, + _mm512_castsi512_ps( + _mm512_slli_epi32( + _mm512_cvtepu16_epi32( + _mm256_loadu_si256( + (const __m256i *)(x + i))), + 16))); + } +#elif defined(__AVX2__) + for (; i + 8 <= n; i += 8) { + _mm256_storeu_ps(y + i, + _mm256_castsi256_ps( + _mm256_slli_epi32( + _mm256_cvtepu16_epi32( + _mm_loadu_si128( + (const __m128i *)(x + i))), + 16))); + } +#endif + for (; i < n; i++) { + y[i] = GGML_BF16_TO_FP32(x[i]); + } +} + +void ggml_fp32_to_bf16_row_ref(const float * x, ggml_bf16_t * y, int64_t n) { + for (int i = 0; i < n; i++) { + y[i] = ggml_compute_fp32_to_bf16(x[i]); + } +} + +void ggml_fp32_to_bf16_row(const float * x, ggml_bf16_t * y, int64_t n) { + int i = 0; +#if defined(__AVX512BF16__) + // subnormals are flushed to zero on this platform + for (; i + 32 <= n; i += 32) { + _mm512_storeu_si512( + (__m512i *)(y + i), + m512i(_mm512_cvtne2ps_pbh(_mm512_loadu_ps(x + i + 16), + _mm512_loadu_ps(x + i)))); + } +#endif + for (; i < n; i++) { + y[i] = GGML_FP32_TO_BF16(x[i]); + } +} + +bool ggml_guid_matches(ggml_guid_t guid_a, ggml_guid_t guid_b) { + return memcmp(guid_a, guid_b, sizeof(ggml_guid)) == 0; +} + +// +// timing +// + +#if defined(_MSC_VER) || defined(__MINGW32__) +static int64_t timer_freq, timer_start; +void ggml_time_init(void) { + LARGE_INTEGER t; + QueryPerformanceFrequency(&t); + timer_freq = t.QuadPart; + + // The multiplication by 1000 or 1000000 below can cause an overflow if timer_freq + // and the uptime is high enough. + // We subtract the program start time to reduce the likelihood of that happening. + QueryPerformanceCounter(&t); + timer_start = t.QuadPart; +} +int64_t ggml_time_ms(void) { + LARGE_INTEGER t; + QueryPerformanceCounter(&t); + return ((t.QuadPart-timer_start) * 1000) / timer_freq; +} +int64_t ggml_time_us(void) { + LARGE_INTEGER t; + QueryPerformanceCounter(&t); + return ((t.QuadPart-timer_start) * 1000000) / timer_freq; +} +#else +void ggml_time_init(void) {} +int64_t ggml_time_ms(void) { + struct timespec ts; + clock_gettime(CLOCK_MONOTONIC, &ts); + return (int64_t)ts.tv_sec*1000 + (int64_t)ts.tv_nsec/1000000; +} + +int64_t ggml_time_us(void) { + struct timespec ts; + clock_gettime(CLOCK_MONOTONIC, &ts); + return (int64_t)ts.tv_sec*1000000 + (int64_t)ts.tv_nsec/1000; +} +#endif + +int64_t ggml_cycles(void) { + return clock(); +} + +int64_t ggml_cycles_per_ms(void) { + return CLOCKS_PER_SEC/1000; +} + +// +// cross-platform UTF-8 file paths +// + +#ifdef _WIN32 +static wchar_t * ggml_mbstowcs(const char * mbs) { + int wlen = MultiByteToWideChar(CP_UTF8, 0, mbs, -1, NULL, 0); + if (!wlen) { + errno = EINVAL; + return NULL; + } + + wchar_t * wbuf = GGML_MALLOC(wlen * sizeof(wchar_t)); + wlen = MultiByteToWideChar(CP_UTF8, 0, mbs, -1, wbuf, wlen); + if (!wlen) { + GGML_FREE(wbuf); + errno = EINVAL; + return NULL; + } + + return wbuf; +} +#endif + +FILE * ggml_fopen(const char * fname, const char * mode) { +#ifdef _WIN32 + FILE * file = NULL; + + // convert fname (UTF-8) + wchar_t * wfname = ggml_mbstowcs(fname); + if (wfname) { + // convert mode (ANSI) + wchar_t * wmode = GGML_MALLOC((strlen(mode) + 1) * sizeof(wchar_t)); + wchar_t * wmode_p = wmode; + do { + *wmode_p++ = (wchar_t)*mode; + } while (*mode++); + + // open file + file = _wfopen(wfname, wmode); + + GGML_FREE(wfname); + GGML_FREE(wmode); + } + + return file; +#else + return fopen(fname, mode); +#endif +} + +// +// cache line +// + +#if defined(__cpp_lib_hardware_interference_size) +#define CACHE_LINE_SIZE hardware_destructive_interference_size +#else +#if defined(__POWER9_VECTOR__) +#define CACHE_LINE_SIZE 128 +#else +#define CACHE_LINE_SIZE 64 +#endif +#endif + +static const size_t CACHE_LINE_SIZE_F32 = CACHE_LINE_SIZE/sizeof(float); + +static void ggml_vec_dot_f32(int n, float * restrict s, size_t bs, const float * restrict x, size_t bx, const float * restrict y, size_t by, int nrc); +static void ggml_vec_dot_f16(int n, float * restrict s, size_t bs, ggml_fp16_t * restrict x, size_t bx, ggml_fp16_t * restrict y, size_t by, int nrc); +static void ggml_vec_dot_bf16(int n, float * restrict s, size_t bs, ggml_bf16_t * restrict x, size_t bx, ggml_bf16_t * restrict y, size_t by, int nrc); + +static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { + [GGML_TYPE_I8] = { + .type_name = "i8", + .blck_size = 1, + .type_size = sizeof(int8_t), + .is_quantized = false, + }, + [GGML_TYPE_I16] = { + .type_name = "i16", + .blck_size = 1, + .type_size = sizeof(int16_t), + .is_quantized = false, + }, + [GGML_TYPE_I32] = { + .type_name = "i32", + .blck_size = 1, + .type_size = sizeof(int32_t), + .is_quantized = false, + }, + [GGML_TYPE_I64] = { + .type_name = "i64", + .blck_size = 1, + .type_size = sizeof(int64_t), + .is_quantized = false, + }, + [GGML_TYPE_F64] = { + .type_name = "f64", + .blck_size = 1, + .type_size = sizeof(double), + .is_quantized = false, + .nrows = 1, + }, + [GGML_TYPE_F32] = { + .type_name = "f32", + .blck_size = 1, + .type_size = sizeof(float), + .is_quantized = false, + .vec_dot = (ggml_vec_dot_t) ggml_vec_dot_f32, + .vec_dot_type = GGML_TYPE_F32, + .nrows = 1, + }, + [GGML_TYPE_F16] = { + .type_name = "f16", + .blck_size = 1, + .type_size = sizeof(ggml_fp16_t), + .is_quantized = false, + .to_float = (ggml_to_float_t) ggml_fp16_to_fp32_row, + .from_float = (ggml_from_float_t) ggml_fp32_to_fp16_row, + .from_float_ref = (ggml_from_float_t) ggml_fp32_to_fp16_row, + .vec_dot = (ggml_vec_dot_t) ggml_vec_dot_f16, + .vec_dot_type = GGML_TYPE_F16, + .nrows = 1, + }, + [GGML_TYPE_Q4_0] = { + .type_name = "q4_0", + .blck_size = QK4_0, + .type_size = sizeof(block_q4_0), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_q4_0, + .from_float = quantize_row_q4_0, + .from_float_ref = (ggml_from_float_t) quantize_row_q4_0_ref, + .vec_dot = ggml_vec_dot_q4_0_q8_0, + .vec_dot_type = GGML_TYPE_Q8_0, +#if defined (__ARM_FEATURE_MATMUL_INT8) + .nrows = 2, +#else + .nrows = 1, +#endif + }, + [GGML_TYPE_Q4_1] = { + .type_name = "q4_1", + .blck_size = QK4_1, + .type_size = sizeof(block_q4_1), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_q4_1, + .from_float = quantize_row_q4_1, + .from_float_ref = (ggml_from_float_t) quantize_row_q4_1_ref, + .vec_dot = ggml_vec_dot_q4_1_q8_1, + .vec_dot_type = GGML_TYPE_Q8_1, +#if defined (__ARM_FEATURE_MATMUL_INT8) + .nrows = 2, +#else + .nrows = 1, +#endif + }, + [4] = { // GGML_TYPE_Q4_2 + .type_name = "DEPRECATED", + .blck_size = 0, + .type_size = 0, + .is_quantized = false, + .to_float = NULL, + .from_float = NULL, + .from_float_ref = NULL, + .vec_dot = NULL, + .vec_dot_type = GGML_TYPE_COUNT, + .nrows = 1, + }, + [5] = { // GGML_TYPE_Q4_3 + .type_name = "DEPRECATED", + .blck_size = 0, + .type_size = 0, + .is_quantized = false, + .to_float = NULL, + .from_float = NULL, + .from_float_ref = NULL, + .vec_dot = NULL, + .vec_dot_type = GGML_TYPE_COUNT, + .nrows = 1, + }, + [GGML_TYPE_Q5_0] = { + .type_name = "q5_0", + .blck_size = QK5_0, + .type_size = sizeof(block_q5_0), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_q5_0, + .from_float = quantize_row_q5_0, + .from_float_ref = (ggml_from_float_t) quantize_row_q5_0_ref, + .vec_dot = ggml_vec_dot_q5_0_q8_0, + .vec_dot_type = GGML_TYPE_Q8_0, + .nrows = 1, + }, + [GGML_TYPE_Q5_1] = { + .type_name = "q5_1", + .blck_size = QK5_1, + .type_size = sizeof(block_q5_1), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_q5_1, + .from_float = quantize_row_q5_1, + .from_float_ref = (ggml_from_float_t) quantize_row_q5_1_ref, + .vec_dot = ggml_vec_dot_q5_1_q8_1, + .vec_dot_type = GGML_TYPE_Q8_1, + .nrows = 1, + }, + [GGML_TYPE_Q8_0] = { + .type_name = "q8_0", + .blck_size = QK8_0, + .type_size = sizeof(block_q8_0), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_q8_0, + .from_float = quantize_row_q8_0, + .from_float_ref = (ggml_from_float_t) quantize_row_q8_0_ref, + .from_float_to_mat = quantize_mat_q8_0, + .vec_dot = ggml_vec_dot_q8_0_q8_0, + .vec_dot_type = GGML_TYPE_Q8_0, +#if defined (__ARM_FEATURE_MATMUL_INT8) + .nrows = 2, +#else + .nrows = 1, +#endif + }, + [GGML_TYPE_Q8_1] = { + .type_name = "q8_1", + .blck_size = QK8_1, + .type_size = sizeof(block_q8_1), + .is_quantized = true, + .from_float = quantize_row_q8_1, + .from_float_ref = (ggml_from_float_t) quantize_row_q8_1_ref, + .vec_dot_type = GGML_TYPE_Q8_1, + .nrows = 1, + }, + [GGML_TYPE_Q2_K] = { + .type_name = "q2_K", + .blck_size = QK_K, + .type_size = sizeof(block_q2_K), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_q2_K, + .from_float = quantize_row_q2_K, + .from_float_ref = (ggml_from_float_t) quantize_row_q2_K_ref, + .vec_dot = ggml_vec_dot_q2_K_q8_K, + .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, + }, + [GGML_TYPE_Q3_K] = { + .type_name = "q3_K", + .blck_size = QK_K, + .type_size = sizeof(block_q3_K), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_q3_K, + .from_float = quantize_row_q3_K, + .from_float_ref = (ggml_from_float_t) quantize_row_q3_K_ref, + .vec_dot = ggml_vec_dot_q3_K_q8_K, + .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, + }, + [GGML_TYPE_Q4_K] = { + .type_name = "q4_K", + .blck_size = QK_K, + .type_size = sizeof(block_q4_K), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_q4_K, + .from_float = quantize_row_q4_K, + .from_float_ref = (ggml_from_float_t) quantize_row_q4_K_ref, + .vec_dot = ggml_vec_dot_q4_K_q8_K, + .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, + }, + [GGML_TYPE_Q5_K] = { + .type_name = "q5_K", + .blck_size = QK_K, + .type_size = sizeof(block_q5_K), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_q5_K, + .from_float = quantize_row_q5_K, + .from_float_ref = (ggml_from_float_t) quantize_row_q5_K_ref, + .vec_dot = ggml_vec_dot_q5_K_q8_K, + .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, + }, + [GGML_TYPE_Q6_K] = { + .type_name = "q6_K", + .blck_size = QK_K, + .type_size = sizeof(block_q6_K), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_q6_K, + .from_float = quantize_row_q6_K, + .from_float_ref = (ggml_from_float_t) quantize_row_q6_K_ref, + .vec_dot = ggml_vec_dot_q6_K_q8_K, + .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, + }, + [GGML_TYPE_IQ2_XXS] = { + .type_name = "iq2_xxs", + .blck_size = QK_K, + .type_size = sizeof(block_iq2_xxs), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_iq2_xxs, + .from_float = NULL, + .from_float_ref = NULL, + .vec_dot = ggml_vec_dot_iq2_xxs_q8_K, + .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, + }, + [GGML_TYPE_IQ2_XS] = { + .type_name = "iq2_xs", + .blck_size = QK_K, + .type_size = sizeof(block_iq2_xs), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_iq2_xs, + .from_float = NULL, + .from_float_ref = NULL, + .vec_dot = ggml_vec_dot_iq2_xs_q8_K, + .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, + }, + [GGML_TYPE_IQ3_XXS] = { + .type_name = "iq3_xxs", + .blck_size = QK_K, + .type_size = sizeof(block_iq3_xxs), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_iq3_xxs, + .from_float = quantize_row_iq3_xxs, + .from_float_ref = (ggml_from_float_t)quantize_row_iq3_xxs_ref, + .vec_dot = ggml_vec_dot_iq3_xxs_q8_K, + .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, + }, + [GGML_TYPE_IQ3_S] = { + .type_name = "iq3_s", + .blck_size = QK_K, + .type_size = sizeof(block_iq3_s), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_iq3_s, + .from_float = quantize_row_iq3_s, + .from_float_ref = (ggml_from_float_t)quantize_row_iq3_s_ref, + .vec_dot = ggml_vec_dot_iq3_s_q8_K, + .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, + }, + [GGML_TYPE_IQ2_S] = { + .type_name = "iq2_s", + .blck_size = QK_K, + .type_size = sizeof(block_iq2_s), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_iq2_s, + .from_float = quantize_row_iq2_s, + .from_float_ref = (ggml_from_float_t)quantize_row_iq2_s_ref, + .vec_dot = ggml_vec_dot_iq2_s_q8_K, + .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, + }, + [GGML_TYPE_IQ1_S] = { + .type_name = "iq1_s", + .blck_size = QK_K, + .type_size = sizeof(block_iq1_s), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_iq1_s, + .from_float = NULL, + .from_float_ref = NULL, + .vec_dot = ggml_vec_dot_iq1_s_q8_K, + .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, + }, + [GGML_TYPE_IQ1_M] = { + .type_name = "iq1_m", + .blck_size = QK_K, + .type_size = sizeof(block_iq1_m), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_iq1_m, + .from_float = NULL, + .from_float_ref = NULL, + .vec_dot = ggml_vec_dot_iq1_m_q8_K, + .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, + }, + [GGML_TYPE_IQ4_NL] = { + .type_name = "iq4_nl", + .blck_size = QK4_NL, + .type_size = sizeof(block_iq4_nl), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_iq4_nl, + .from_float = quantize_row_iq4_nl, + .from_float_ref = (ggml_from_float_t)quantize_row_iq4_nl_ref, + .vec_dot = ggml_vec_dot_iq4_nl_q8_0, + .vec_dot_type = GGML_TYPE_Q8_0, + .nrows = 1, + }, + [GGML_TYPE_IQ4_XS] = { + .type_name = "iq4_xs", + .blck_size = QK_K, + .type_size = sizeof(block_iq4_xs), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_iq4_xs, + .from_float = quantize_row_iq4_xs, + .from_float_ref = (ggml_from_float_t)quantize_row_iq4_xs_ref, + .vec_dot = ggml_vec_dot_iq4_xs_q8_K, + .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, + }, + [GGML_TYPE_Q8_K] = { + .type_name = "q8_K", + .blck_size = QK_K, + .type_size = sizeof(block_q8_K), + .is_quantized = true, + .from_float = quantize_row_q8_K, + }, + [GGML_TYPE_BF16] = { + .type_name = "bf16", + .blck_size = 1, + .type_size = sizeof(ggml_bf16_t), + .is_quantized = false, + .to_float = (ggml_to_float_t) ggml_bf16_to_fp32_row, + .from_float = (ggml_from_float_t) ggml_fp32_to_bf16_row, + .from_float_ref = (ggml_from_float_t) ggml_fp32_to_bf16_row_ref, + .vec_dot = (ggml_vec_dot_t) ggml_vec_dot_bf16, + .vec_dot_type = GGML_TYPE_BF16, + .nrows = 1, + }, + [GGML_TYPE_Q4_0_4_4] = { + .type_name = "q4_0_4x4", + .blck_size = QK4_0, + .blck_size_interleave = 4, + .type_size = sizeof(block_q4_0), + .is_quantized = true, + .to_float = NULL, + .from_float = NULL, + .from_float_ref = NULL, + .vec_dot = NULL, + .vec_dot_type = GGML_TYPE_Q8_0, + .nrows = 1, + .ncols = 4, + .gemv = ggml_gemv_q4_0_4x4_q8_0, + .gemm = ggml_gemm_q4_0_4x4_q8_0, + }, + [GGML_TYPE_Q4_0_4_8] = { + .type_name = "q4_0_4x8", + .blck_size = QK4_0, + .blck_size_interleave = 8, + .type_size = sizeof(block_q4_0), + .is_quantized = true, + .to_float = NULL, + .from_float = NULL, + .from_float_ref = NULL, + .vec_dot = NULL, + .vec_dot_type = GGML_TYPE_Q8_0, + .nrows = 1, + .ncols = 4, + .gemv = ggml_gemv_q4_0_4x8_q8_0, + .gemm = ggml_gemm_q4_0_4x8_q8_0, + }, + [GGML_TYPE_Q4_0_8_8] = { + .type_name = "q4_0_8x8", + .blck_size = QK4_0, + .blck_size_interleave = 8, + .type_size = sizeof(block_q4_0), + .is_quantized = true, + .to_float = NULL, + .from_float = NULL, + .from_float_ref = NULL, + .vec_dot = NULL, + .vec_dot_type = GGML_TYPE_Q8_0, + .nrows = 1, + .ncols = 8, + .gemv = ggml_gemv_q4_0_8x8_q8_0, + .gemm = ggml_gemm_q4_0_8x8_q8_0, + }, + [GGML_TYPE_TQ1_0] = { + .type_name = "tq1_0", + .blck_size = QK_K, + .type_size = sizeof(block_tq1_0), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_tq1_0, + .from_float = quantize_row_tq1_0, + .from_float_ref = (ggml_from_float_t) quantize_row_tq1_0_ref, + .vec_dot = ggml_vec_dot_tq1_0_q8_K, + .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, + }, + [GGML_TYPE_TQ2_0] = { + .type_name = "tq2_0", + .blck_size = QK_K, + .type_size = sizeof(block_tq2_0), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_tq2_0, + .from_float = quantize_row_tq2_0, + .from_float_ref = (ggml_from_float_t) quantize_row_tq2_0_ref, + .vec_dot = ggml_vec_dot_tq2_0_q8_K, + .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, + }, +}; + +// For internal test use +ggml_type_traits_t ggml_internal_get_type_traits(enum ggml_type type) { + GGML_ASSERT(type < GGML_TYPE_COUNT); + return type_traits[type]; +} + +// +// simd mappings +// + +// we define a common set of C macros which map to specific intrinsics based on the current architecture +// we then implement the fundamental computation operations below using only these macros +// adding support for new architectures requires to define the corresponding SIMD macros +// +// GGML_F32_STEP / GGML_F16_STEP +// number of elements to process in a single step +// +// GGML_F32_EPR / GGML_F16_EPR +// number of elements to fit in a single register +// + +#if defined(__ARM_NEON) && defined(__ARM_FEATURE_FMA) + +#define GGML_SIMD + +// F32 NEON + +#define GGML_F32_STEP 16 +#define GGML_F32_EPR 4 + +#define GGML_F32x4 float32x4_t +#define GGML_F32x4_ZERO vdupq_n_f32(0.0f) +#define GGML_F32x4_SET1(x) vdupq_n_f32(x) +#define GGML_F32x4_LOAD vld1q_f32 +#define GGML_F32x4_STORE vst1q_f32 +#define GGML_F32x4_FMA(a, b, c) vfmaq_f32(a, b, c) +#define GGML_F32x4_ADD vaddq_f32 +#define GGML_F32x4_MUL vmulq_f32 +#define GGML_F32x4_REDUCE_ONE(x) vaddvq_f32(x) +#define GGML_F32x4_REDUCE(res, x) \ +{ \ + int offset = GGML_F32_ARR >> 1; \ + for (int i = 0; i < offset; ++i) { \ + (x)[i] = vaddq_f32((x)[i], (x)[offset+i]); \ + } \ + offset >>= 1; \ + for (int i = 0; i < offset; ++i) { \ + (x)[i] = vaddq_f32((x)[i], (x)[offset+i]); \ + } \ + offset >>= 1; \ + for (int i = 0; i < offset; ++i) { \ + (x)[i] = vaddq_f32((x)[i], (x)[offset+i]); \ + } \ + (res) = GGML_F32x4_REDUCE_ONE((x)[0]); \ +} + +#define GGML_F32_VEC GGML_F32x4 +#define GGML_F32_VEC_ZERO GGML_F32x4_ZERO +#define GGML_F32_VEC_SET1 GGML_F32x4_SET1 +#define GGML_F32_VEC_LOAD GGML_F32x4_LOAD +#define GGML_F32_VEC_STORE GGML_F32x4_STORE +#define GGML_F32_VEC_FMA GGML_F32x4_FMA +#define GGML_F32_VEC_ADD GGML_F32x4_ADD +#define GGML_F32_VEC_MUL GGML_F32x4_MUL +#define GGML_F32_VEC_REDUCE GGML_F32x4_REDUCE + +// F16 NEON + +#if defined(__ARM_FEATURE_FP16_VECTOR_ARITHMETIC) + #define GGML_F16_STEP 32 + #define GGML_F16_EPR 8 + + #define GGML_F16x8 float16x8_t + #define GGML_F16x8_ZERO vdupq_n_f16(0.0f) + #define GGML_F16x8_SET1(x) vdupq_n_f16(x) + #define GGML_F16x8_LOAD(x) vld1q_f16((const ggml_fp16_internal_t *)(x)) + #define GGML_F16x8_STORE vst1q_f16 + #define GGML_F16x8_FMA(a, b, c) vfmaq_f16(a, b, c) + #define GGML_F16x8_ADD vaddq_f16 + #define GGML_F16x8_MUL vmulq_f16 + #define GGML_F16x8_REDUCE(res, x) \ + do { \ + int offset = GGML_F16_ARR >> 1; \ + for (int i = 0; i < offset; ++i) { \ + (x)[i] = vaddq_f16((x)[i], (x)[offset+i]); \ + } \ + offset >>= 1; \ + for (int i = 0; i < offset; ++i) { \ + (x)[i] = vaddq_f16((x)[i], (x)[offset+i]); \ + } \ + offset >>= 1; \ + for (int i = 0; i < offset; ++i) { \ + (x)[i] = vaddq_f16((x)[i], (x)[offset+i]); \ + } \ + const float32x4_t t0 = vcvt_f32_f16(vget_low_f16 ((x)[0])); \ + const float32x4_t t1 = vcvt_f32_f16(vget_high_f16((x)[0])); \ + (res) = (ggml_float) vaddvq_f32(vaddq_f32(t0, t1)); \ + } while (0) + + #define GGML_F16_VEC GGML_F16x8 + #define GGML_F16_VEC_ZERO GGML_F16x8_ZERO + #define GGML_F16_VEC_SET1 GGML_F16x8_SET1 + #define GGML_F16_VEC_LOAD(p, i) GGML_F16x8_LOAD(p) + #define GGML_F16_VEC_STORE(p, r, i) GGML_F16x8_STORE((ggml_fp16_internal_t *)(p), (r)[i]) + #define GGML_F16_VEC_FMA GGML_F16x8_FMA + #define GGML_F16_VEC_ADD GGML_F16x8_ADD + #define GGML_F16_VEC_MUL GGML_F16x8_MUL + #define GGML_F16_VEC_REDUCE GGML_F16x8_REDUCE +#else + // if FP16 vector arithmetic is not supported, we use FP32 instead + // and take advantage of the vcvt_ functions to convert to/from FP16 + + #define GGML_F16_STEP 16 + #define GGML_F16_EPR 4 + + #define GGML_F32Cx4 float32x4_t + #define GGML_F32Cx4_ZERO vdupq_n_f32(0.0f) + #define GGML_F32Cx4_SET1(x) vdupq_n_f32(x) + #define GGML_F32Cx4_LOAD(x) vcvt_f32_f16(vld1_f16((const ggml_fp16_internal_t *)(x))) + #define GGML_F32Cx4_STORE(x, y) vst1_f16(x, vcvt_f16_f32(y)) + #define GGML_F32Cx4_FMA(a, b, c) vfmaq_f32(a, b, c) + #define GGML_F32Cx4_ADD vaddq_f32 + #define GGML_F32Cx4_MUL vmulq_f32 + #define GGML_F32Cx4_REDUCE GGML_F32x4_REDUCE + + #define GGML_F16_VEC GGML_F32Cx4 + #define GGML_F16_VEC_ZERO GGML_F32Cx4_ZERO + #define GGML_F16_VEC_SET1 GGML_F32Cx4_SET1 + #define GGML_F16_VEC_LOAD(p, i) GGML_F32Cx4_LOAD(p) + #define GGML_F16_VEC_STORE(p, r, i) GGML_F32Cx4_STORE((ggml_fp16_internal_t *)(p), r[i]) + #define GGML_F16_VEC_FMA GGML_F32Cx4_FMA + #define GGML_F16_VEC_ADD GGML_F32Cx4_ADD + #define GGML_F16_VEC_MUL GGML_F32Cx4_MUL + #define GGML_F16_VEC_REDUCE GGML_F32Cx4_REDUCE +#endif + +#elif defined(__AVX512F__) + +#define GGML_SIMD + +// F32 AVX512 + +#define GGML_F32_STEP 64 +#define GGML_F32_EPR 16 + +#define GGML_F32x16 __m512 +#define GGML_F32x16_ZERO _mm512_setzero_ps() +#define GGML_F32x16_SET1(x) _mm512_set1_ps(x) +#define GGML_F32x16_LOAD _mm512_loadu_ps +#define GGML_F32x16_STORE _mm512_storeu_ps +// _mm512_fmadd_ps is defined in AVX512F so no guard is required +#define GGML_F32x16_FMA(a, b, c) _mm512_fmadd_ps(b, c, a) +#define GGML_F32x16_ADD _mm512_add_ps +#define GGML_F32x16_MUL _mm512_mul_ps +#define GGML_F32x16_REDUCE(res, x) \ +do { \ + int offset = GGML_F32_ARR >> 1; \ + for (int i = 0; i < offset; ++i) { \ + x[i] = _mm512_add_ps(x[i], x[offset+i]); \ + } \ + offset >>= 1; \ + for (int i = 0; i < offset; ++i) { \ + x[i] = _mm512_add_ps(x[i], x[offset+i]); \ + } \ + offset >>= 1; \ + for (int i = 0; i < offset; ++i) { \ + x[i] = _mm512_add_ps(x[i], x[offset+i]); \ + } \ + res = _mm512_reduce_add_ps(x[0]); \ +} while (0) + +// TODO: is this optimal ? + +#define GGML_F32_VEC GGML_F32x16 +#define GGML_F32_VEC_ZERO GGML_F32x16_ZERO +#define GGML_F32_VEC_SET1 GGML_F32x16_SET1 +#define GGML_F32_VEC_LOAD GGML_F32x16_LOAD +#define GGML_F32_VEC_STORE GGML_F32x16_STORE +#define GGML_F32_VEC_FMA GGML_F32x16_FMA +#define GGML_F32_VEC_ADD GGML_F32x16_ADD +#define GGML_F32_VEC_MUL GGML_F32x16_MUL +#define GGML_F32_VEC_REDUCE GGML_F32x16_REDUCE + +// F16 AVX512 + +// F16 AVX + +#define GGML_F16_STEP 64 +#define GGML_F16_EPR 16 + +// AVX512 has FP16 extension (AVX512_FP16) but I don't have it on my machine so I use FP32 instead + +#define GGML_F32Cx16 __m512 +#define GGML_F32Cx16_ZERO _mm512_setzero_ps() +#define GGML_F32Cx16_SET1(x) _mm512_set1_ps(x) + +// unlike _mm256_cvt intrinsics that require F16C, _mm512_cvt is defined in AVX512F +// so F16C guard isn't required +#define GGML_F32Cx16_LOAD(x) _mm512_cvtph_ps(_mm256_loadu_si256((const __m256i *)(x))) +#define GGML_F32Cx16_STORE(x, y) _mm256_storeu_si256((__m256i *)(x), _mm512_cvtps_ph(y, 0)) + +#define GGML_F32Cx16_FMA(a, b, c) _mm512_fmadd_ps(b, c, a) +#define GGML_F32Cx16_ADD _mm512_add_ps +#define GGML_F32Cx16_MUL _mm512_mul_ps +#define GGML_F32Cx16_REDUCE(res, x) \ +do { \ + int offset = GGML_F32_ARR >> 1; \ + for (int i = 0; i < offset; ++i) { \ + x[i] = _mm512_add_ps(x[i], x[offset+i]); \ + } \ + offset >>= 1; \ + for (int i = 0; i < offset; ++i) { \ + x[i] = _mm512_add_ps(x[i], x[offset+i]); \ + } \ + offset >>= 1; \ + for (int i = 0; i < offset; ++i) { \ + x[i] = _mm512_add_ps(x[i], x[offset+i]); \ + } \ + res = _mm512_reduce_add_ps(x[0]); \ +} while (0) + +#define GGML_F16_VEC GGML_F32Cx16 +#define GGML_F16_VEC_ZERO GGML_F32Cx16_ZERO +#define GGML_F16_VEC_SET1 GGML_F32Cx16_SET1 +#define GGML_F16_VEC_LOAD(p, i) GGML_F32Cx16_LOAD(p) +#define GGML_F16_VEC_STORE(p, r, i) GGML_F32Cx16_STORE(p, r[i]) +#define GGML_F16_VEC_FMA GGML_F32Cx16_FMA +#define GGML_F16_VEC_ADD GGML_F32Cx16_ADD +#define GGML_F16_VEC_MUL GGML_F32Cx16_MUL +#define GGML_F16_VEC_REDUCE GGML_F32Cx16_REDUCE + +#elif defined(__AVX__) + +#define GGML_SIMD + +// F32 AVX + +#define GGML_F32_STEP 32 +#define GGML_F32_EPR 8 + +#define GGML_F32x8 __m256 +#define GGML_F32x8_ZERO _mm256_setzero_ps() +#define GGML_F32x8_SET1(x) _mm256_set1_ps(x) +#define GGML_F32x8_LOAD _mm256_loadu_ps +#define GGML_F32x8_STORE _mm256_storeu_ps +#if defined(__FMA__) + #define GGML_F32x8_FMA(a, b, c) _mm256_fmadd_ps(b, c, a) +#else + #define GGML_F32x8_FMA(a, b, c) _mm256_add_ps(_mm256_mul_ps(b, c), a) +#endif +#define GGML_F32x8_ADD _mm256_add_ps +#define GGML_F32x8_MUL _mm256_mul_ps +#define GGML_F32x8_REDUCE(res, x) \ +do { \ + int offset = GGML_F32_ARR >> 1; \ + for (int i = 0; i < offset; ++i) { \ + x[i] = _mm256_add_ps(x[i], x[offset+i]); \ + } \ + offset >>= 1; \ + for (int i = 0; i < offset; ++i) { \ + x[i] = _mm256_add_ps(x[i], x[offset+i]); \ + } \ + offset >>= 1; \ + for (int i = 0; i < offset; ++i) { \ + x[i] = _mm256_add_ps(x[i], x[offset+i]); \ + } \ + const __m128 t0 = _mm_add_ps(_mm256_castps256_ps128(x[0]), \ + _mm256_extractf128_ps(x[0], 1)); \ + const __m128 t1 = _mm_hadd_ps(t0, t0); \ + res = (ggml_float) _mm_cvtss_f32(_mm_hadd_ps(t1, t1)); \ +} while (0) +// TODO: is this optimal ? + +#define GGML_F32_VEC GGML_F32x8 +#define GGML_F32_VEC_ZERO GGML_F32x8_ZERO +#define GGML_F32_VEC_SET1 GGML_F32x8_SET1 +#define GGML_F32_VEC_LOAD GGML_F32x8_LOAD +#define GGML_F32_VEC_STORE GGML_F32x8_STORE +#define GGML_F32_VEC_FMA GGML_F32x8_FMA +#define GGML_F32_VEC_ADD GGML_F32x8_ADD +#define GGML_F32_VEC_MUL GGML_F32x8_MUL +#define GGML_F32_VEC_REDUCE GGML_F32x8_REDUCE + +// F16 AVX + +#define GGML_F16_STEP 32 +#define GGML_F16_EPR 8 + +// F16 arithmetic is not supported by AVX, so we use F32 instead + +#define GGML_F32Cx8 __m256 +#define GGML_F32Cx8_ZERO _mm256_setzero_ps() +#define GGML_F32Cx8_SET1(x) _mm256_set1_ps(x) + +#if defined(__F16C__) +// the _mm256_cvt intrinsics require F16C +#define GGML_F32Cx8_LOAD(x) _mm256_cvtph_ps(_mm_loadu_si128((const __m128i *)(x))) +#define GGML_F32Cx8_STORE(x, y) _mm_storeu_si128((__m128i *)(x), _mm256_cvtps_ph(y, 0)) +#else +static inline __m256 __avx_f32cx8_load(ggml_fp16_t *x) { + float tmp[8]; + + for (int i = 0; i < 8; i++) { + tmp[i] = GGML_FP16_TO_FP32(x[i]); + } + + return _mm256_loadu_ps(tmp); +} +static inline void __avx_f32cx8_store(ggml_fp16_t *x, __m256 y) { + float arr[8]; + + _mm256_storeu_ps(arr, y); + + for (int i = 0; i < 8; i++) + x[i] = GGML_FP32_TO_FP16(arr[i]); +} +#define GGML_F32Cx8_LOAD(x) __avx_f32cx8_load(x) +#define GGML_F32Cx8_STORE(x, y) __avx_f32cx8_store(x, y) +#endif + +#define GGML_F32Cx8_FMA GGML_F32x8_FMA +#define GGML_F32Cx8_ADD _mm256_add_ps +#define GGML_F32Cx8_MUL _mm256_mul_ps +#define GGML_F32Cx8_REDUCE GGML_F32x8_REDUCE + +#define GGML_F16_VEC GGML_F32Cx8 +#define GGML_F16_VEC_ZERO GGML_F32Cx8_ZERO +#define GGML_F16_VEC_SET1 GGML_F32Cx8_SET1 +#define GGML_F16_VEC_LOAD(p, i) GGML_F32Cx8_LOAD(p) +#define GGML_F16_VEC_STORE(p, r, i) GGML_F32Cx8_STORE(p, r[i]) +#define GGML_F16_VEC_FMA GGML_F32Cx8_FMA +#define GGML_F16_VEC_ADD GGML_F32Cx8_ADD +#define GGML_F16_VEC_MUL GGML_F32Cx8_MUL +#define GGML_F16_VEC_REDUCE GGML_F32Cx8_REDUCE + +#elif defined(__POWER9_VECTOR__) + +#define GGML_SIMD + +// F32 POWER9 + +#define GGML_F32_STEP 32 +#define GGML_F32_EPR 4 + +#define GGML_F32x4 vector float +#define GGML_F32x4_ZERO 0.0f +#define GGML_F32x4_SET1 vec_splats +#define GGML_F32x4_LOAD(p) vec_xl(0, p) +#define GGML_F32x4_STORE(p, r) vec_xst(r, 0, p) +#define GGML_F32x4_FMA(a, b, c) vec_madd(b, c, a) +#define GGML_F32x4_ADD vec_add +#define GGML_F32x4_MUL vec_mul +#define GGML_F32x4_REDUCE(res, x) \ +{ \ + int offset = GGML_F32_ARR >> 1; \ + for (int i = 0; i < offset; ++i) { \ + x[i] = vec_add(x[i], x[offset+i]); \ + } \ + offset >>= 1; \ + for (int i = 0; i < offset; ++i) { \ + x[i] = vec_add(x[i], x[offset+i]); \ + } \ + offset >>= 1; \ + for (int i = 0; i < offset; ++i) { \ + x[i] = vec_add(x[i], x[offset+i]); \ + } \ + res = vec_extract(x[0], 0) + \ + vec_extract(x[0], 1) + \ + vec_extract(x[0], 2) + \ + vec_extract(x[0], 3); \ +} + +#define GGML_F32_VEC GGML_F32x4 +#define GGML_F32_VEC_ZERO GGML_F32x4_ZERO +#define GGML_F32_VEC_SET1 GGML_F32x4_SET1 +#define GGML_F32_VEC_LOAD GGML_F32x4_LOAD +#define GGML_F32_VEC_STORE GGML_F32x4_STORE +#define GGML_F32_VEC_FMA GGML_F32x4_FMA +#define GGML_F32_VEC_ADD GGML_F32x4_ADD +#define GGML_F32_VEC_MUL GGML_F32x4_MUL +#define GGML_F32_VEC_REDUCE GGML_F32x4_REDUCE + +// F16 POWER9 +#define GGML_F16_STEP GGML_F32_STEP +#define GGML_F16_EPR GGML_F32_EPR +#define GGML_F16_VEC GGML_F32x4 +#define GGML_F16_VEC_ZERO GGML_F32x4_ZERO +#define GGML_F16_VEC_SET1 GGML_F32x4_SET1 +#define GGML_F16_VEC_FMA GGML_F32x4_FMA +#define GGML_F16_VEC_ADD GGML_F32x4_ADD +#define GGML_F16_VEC_MUL GGML_F32x4_MUL +#define GGML_F16_VEC_REDUCE GGML_F32x4_REDUCE +// Use vec_xl, not vec_ld, in case the load address is not aligned. +#define GGML_F16_VEC_LOAD(p, i) (i & 0x1) ? \ + vec_extract_fp32_from_shorth(vec_xl(0, p - GGML_F16_EPR)) : \ + vec_extract_fp32_from_shortl(vec_xl(0, p)) +#define GGML_ENDIAN_BYTE(i) ((unsigned char *)&(uint16_t){1})[i] +#define GGML_F16_VEC_STORE(p, r, i) \ + if (i & 0x1) \ + vec_xst(vec_pack_to_short_fp32(r[i - GGML_ENDIAN_BYTE(1)], \ + r[i - GGML_ENDIAN_BYTE(0)]), \ + 0, p - GGML_F16_EPR) + +#elif defined(__wasm_simd128__) + +#define GGML_SIMD + +// F32 WASM + +#define GGML_F32_STEP 16 +#define GGML_F32_EPR 4 + +#define GGML_F32x4 v128_t +#define GGML_F32x4_ZERO wasm_f32x4_splat(0.0f) +#define GGML_F32x4_SET1(x) wasm_f32x4_splat(x) +#define GGML_F32x4_LOAD wasm_v128_load +#define GGML_F32x4_STORE wasm_v128_store +#define GGML_F32x4_FMA(a, b, c) wasm_f32x4_add(wasm_f32x4_mul(b, c), a) +#define GGML_F32x4_ADD wasm_f32x4_add +#define GGML_F32x4_MUL wasm_f32x4_mul +#define GGML_F32x4_REDUCE(res, x) \ +{ \ + int offset = GGML_F32_ARR >> 1; \ + for (int i = 0; i < offset; ++i) { \ + x[i] = wasm_f32x4_add(x[i], x[offset+i]); \ + } \ + offset >>= 1; \ + for (int i = 0; i < offset; ++i) { \ + x[i] = wasm_f32x4_add(x[i], x[offset+i]); \ + } \ + offset >>= 1; \ + for (int i = 0; i < offset; ++i) { \ + x[i] = wasm_f32x4_add(x[i], x[offset+i]); \ + } \ + res = wasm_f32x4_extract_lane(x[0], 0) + \ + wasm_f32x4_extract_lane(x[0], 1) + \ + wasm_f32x4_extract_lane(x[0], 2) + \ + wasm_f32x4_extract_lane(x[0], 3); \ +} + +#define GGML_F32_VEC GGML_F32x4 +#define GGML_F32_VEC_ZERO GGML_F32x4_ZERO +#define GGML_F32_VEC_SET1 GGML_F32x4_SET1 +#define GGML_F32_VEC_LOAD GGML_F32x4_LOAD +#define GGML_F32_VEC_STORE GGML_F32x4_STORE +#define GGML_F32_VEC_FMA GGML_F32x4_FMA +#define GGML_F32_VEC_ADD GGML_F32x4_ADD +#define GGML_F32_VEC_MUL GGML_F32x4_MUL +#define GGML_F32_VEC_REDUCE GGML_F32x4_REDUCE + +// F16 WASM + +#define GGML_F16_STEP 16 +#define GGML_F16_EPR 4 + +inline static v128_t __wasm_f16x4_load(const ggml_fp16_t * p) { + float tmp[4]; + + tmp[0] = GGML_FP16_TO_FP32(p[0]); + tmp[1] = GGML_FP16_TO_FP32(p[1]); + tmp[2] = GGML_FP16_TO_FP32(p[2]); + tmp[3] = GGML_FP16_TO_FP32(p[3]); + + return wasm_v128_load(tmp); +} + +inline static void __wasm_f16x4_store(ggml_fp16_t * p, v128_t x) { + float tmp[4]; + + wasm_v128_store(tmp, x); + + p[0] = GGML_FP32_TO_FP16(tmp[0]); + p[1] = GGML_FP32_TO_FP16(tmp[1]); + p[2] = GGML_FP32_TO_FP16(tmp[2]); + p[3] = GGML_FP32_TO_FP16(tmp[3]); +} + +#define GGML_F16x4 v128_t +#define GGML_F16x4_ZERO wasm_f32x4_splat(0.0f) +#define GGML_F16x4_SET1(x) wasm_f32x4_splat(x) +#define GGML_F16x4_LOAD(x) __wasm_f16x4_load(x) +#define GGML_F16x4_STORE(x, y) __wasm_f16x4_store(x, y) +#define GGML_F16x4_FMA GGML_F32x4_FMA +#define GGML_F16x4_ADD wasm_f32x4_add +#define GGML_F16x4_MUL wasm_f32x4_mul +#define GGML_F16x4_REDUCE(res, x) \ +{ \ + int offset = GGML_F16_ARR >> 1; \ + for (int i = 0; i < offset; ++i) { \ + x[i] = wasm_f32x4_add(x[i], x[offset+i]); \ + } \ + offset >>= 1; \ + for (int i = 0; i < offset; ++i) { \ + x[i] = wasm_f32x4_add(x[i], x[offset+i]); \ + } \ + offset >>= 1; \ + for (int i = 0; i < offset; ++i) { \ + x[i] = wasm_f32x4_add(x[i], x[offset+i]); \ + } \ + res = wasm_f32x4_extract_lane(x[0], 0) + \ + wasm_f32x4_extract_lane(x[0], 1) + \ + wasm_f32x4_extract_lane(x[0], 2) + \ + wasm_f32x4_extract_lane(x[0], 3); \ +} + +#define GGML_F16_VEC GGML_F16x4 +#define GGML_F16_VEC_ZERO GGML_F16x4_ZERO +#define GGML_F16_VEC_SET1 GGML_F16x4_SET1 +#define GGML_F16_VEC_LOAD(p, i) GGML_F16x4_LOAD(p) +#define GGML_F16_VEC_STORE(p, r, i) GGML_F16x4_STORE(p, r[i]) +#define GGML_F16_VEC_FMA GGML_F16x4_FMA +#define GGML_F16_VEC_ADD GGML_F16x4_ADD +#define GGML_F16_VEC_MUL GGML_F16x4_MUL +#define GGML_F16_VEC_REDUCE GGML_F16x4_REDUCE + +#elif defined(__SSE3__) + +#define GGML_SIMD + +// F32 SSE + +#define GGML_F32_STEP 32 +#define GGML_F32_EPR 4 + +#define GGML_F32x4 __m128 +#define GGML_F32x4_ZERO _mm_setzero_ps() +#define GGML_F32x4_SET1(x) _mm_set1_ps(x) +#define GGML_F32x4_LOAD _mm_loadu_ps +#define GGML_F32x4_STORE _mm_storeu_ps +#if defined(__FMA__) + // TODO: Does this work? + #define GGML_F32x4_FMA(a, b, c) _mm_fmadd_ps(b, c, a) +#else + #define GGML_F32x4_FMA(a, b, c) _mm_add_ps(_mm_mul_ps(b, c), a) +#endif +#define GGML_F32x4_ADD _mm_add_ps +#define GGML_F32x4_MUL _mm_mul_ps +#define GGML_F32x4_REDUCE(res, x) \ +{ \ + int offset = GGML_F32_ARR >> 1; \ + for (int i = 0; i < offset; ++i) { \ + x[i] = _mm_add_ps(x[i], x[offset+i]); \ + } \ + offset >>= 1; \ + for (int i = 0; i < offset; ++i) { \ + x[i] = _mm_add_ps(x[i], x[offset+i]); \ + } \ + offset >>= 1; \ + for (int i = 0; i < offset; ++i) { \ + x[i] = _mm_add_ps(x[i], x[offset+i]); \ + } \ + const __m128 t0 = _mm_hadd_ps(x[0], x[0]); \ + res = (ggml_float) _mm_cvtss_f32(_mm_hadd_ps(t0, t0)); \ +} +// TODO: is this optimal ? + +#define GGML_F32_VEC GGML_F32x4 +#define GGML_F32_VEC_ZERO GGML_F32x4_ZERO +#define GGML_F32_VEC_SET1 GGML_F32x4_SET1 +#define GGML_F32_VEC_LOAD GGML_F32x4_LOAD +#define GGML_F32_VEC_STORE GGML_F32x4_STORE +#define GGML_F32_VEC_FMA GGML_F32x4_FMA +#define GGML_F32_VEC_ADD GGML_F32x4_ADD +#define GGML_F32_VEC_MUL GGML_F32x4_MUL +#define GGML_F32_VEC_REDUCE GGML_F32x4_REDUCE + +// F16 SSE + +#define GGML_F16_STEP 32 +#define GGML_F16_EPR 4 + +static inline __m128 __sse_f16x4_load(ggml_fp16_t *x) { + float tmp[4]; + + tmp[0] = GGML_FP16_TO_FP32(x[0]); + tmp[1] = GGML_FP16_TO_FP32(x[1]); + tmp[2] = GGML_FP16_TO_FP32(x[2]); + tmp[3] = GGML_FP16_TO_FP32(x[3]); + + return _mm_loadu_ps(tmp); +} + +static inline void __sse_f16x4_store(ggml_fp16_t *x, __m128 y) { + float arr[4]; + + _mm_storeu_ps(arr, y); + + x[0] = GGML_FP32_TO_FP16(arr[0]); + x[1] = GGML_FP32_TO_FP16(arr[1]); + x[2] = GGML_FP32_TO_FP16(arr[2]); + x[3] = GGML_FP32_TO_FP16(arr[3]); +} + +#define GGML_F32Cx4 __m128 +#define GGML_F32Cx4_ZERO _mm_setzero_ps() +#define GGML_F32Cx4_SET1(x) _mm_set1_ps(x) +#define GGML_F32Cx4_LOAD(x) __sse_f16x4_load(x) +#define GGML_F32Cx4_STORE(x, y) __sse_f16x4_store(x, y) +#define GGML_F32Cx4_FMA GGML_F32x4_FMA +#define GGML_F32Cx4_ADD _mm_add_ps +#define GGML_F32Cx4_MUL _mm_mul_ps +#define GGML_F32Cx4_REDUCE GGML_F32x4_REDUCE + +#define GGML_F16_VEC GGML_F32Cx4 +#define GGML_F16_VEC_ZERO GGML_F32Cx4_ZERO +#define GGML_F16_VEC_SET1 GGML_F32Cx4_SET1 +#define GGML_F16_VEC_LOAD(p, i) GGML_F32Cx4_LOAD(p) +#define GGML_F16_VEC_STORE(p, r, i) GGML_F32Cx4_STORE(p, r[i]) +#define GGML_F16_VEC_FMA GGML_F32Cx4_FMA +#define GGML_F16_VEC_ADD GGML_F32Cx4_ADD +#define GGML_F16_VEC_MUL GGML_F32Cx4_MUL +#define GGML_F16_VEC_REDUCE GGML_F32Cx4_REDUCE + +#elif defined(__loongarch_asx) + +#define GGML_SIMD + +// F32 LASX +#define GGML_F32_STEP 32 +#define GGML_F32_EPR 8 + +#define GGML_F32x8 __m256 +#define GGML_F32x8_ZERO (__m256)__lasx_xvldi(0) +#define GGML_F32x8_SET1(x) (__m256)__lasx_xvreplfr2vr_s((x)) +#define GGML_F32x8_LOAD(x) (__m256)__lasx_xvld((x), 0) +#define GGML_F32x8_STORE(x,y) __lasx_xvst((y), (x), 0) +#define GGML_F32x8_FMA(a, b, c) __lasx_xvfmadd_s(b, c, a) +#define GGML_F32x8_ADD __lasx_xvfadd_s +#define GGML_F32x8_MUL __lasx_xvfmul_s +#define GGML_F32x8_REDUCE(res, x) \ +do { \ + int offset = GGML_F32_ARR >> 1; \ + for (int i = 0; i < offset; ++i) { \ + x[i] = __lasx_xvfadd_s(x[i], x[offset+i]); \ + } \ + offset >>= 1; \ + for (int i = 0; i < offset; ++i) { \ + x[i] = __lasx_xvfadd_s(x[i], x[offset+i]); \ + } \ + offset >>= 1; \ + for (int i = 0; i < offset; ++i) { \ + x[i] = __lasx_xvfadd_s(x[i], x[offset+i]); \ + } \ + float *tmp_p = (float *)&x[0]; \ + res = tmp_p[0] + tmp_p[1] + tmp_p[2] + tmp_p[3] + tmp_p[4] + tmp_p[5] + tmp_p[6] + tmp_p[7]; \ +} while (0) +// TODO: is this optimal ? + +#define GGML_F32_VEC GGML_F32x8 +#define GGML_F32_VEC_ZERO GGML_F32x8_ZERO +#define GGML_F32_VEC_SET1 GGML_F32x8_SET1 +#define GGML_F32_VEC_LOAD GGML_F32x8_LOAD +#define GGML_F32_VEC_STORE GGML_F32x8_STORE +#define GGML_F32_VEC_FMA GGML_F32x8_FMA +#define GGML_F32_VEC_ADD GGML_F32x8_ADD +#define GGML_F32_VEC_MUL GGML_F32x8_MUL +#define GGML_F32_VEC_REDUCE GGML_F32x8_REDUCE + +// F16 LASX + +#define GGML_F16_STEP 32 +#define GGML_F16_EPR 8 + +// F16 arithmetic is not supported by AVX, so we use F32 instead + +#define GGML_F32Cx8 __m256 +#define GGML_F32Cx8_ZERO (__m256)__lasx_xvldi(0) +#define GGML_F32Cx8_SET1(x) (__m256)__lasx_xvreplgr2vr_w((x)) + +static inline __m256 __lasx_f32cx8_load(const ggml_fp16_t * x) { + float tmp[8]; + + for (int i = 0; i < 8; i++) { + tmp[i] = GGML_FP16_TO_FP32(x[i]); + } + + return (__m256)__lasx_xvld(tmp, 0); +} +static inline void __lasx_f32cx8_store(ggml_fp16_t * x, __m256 y) { + float arr[8]; + + __lasx_xvst(y, arr, 0); + + for (int i = 0; i < 8; i++) { + x[i] = GGML_FP32_TO_FP16(arr[i]); + } +} +#define GGML_F32Cx8_LOAD(x) __lasx_f32cx8_load(x) +#define GGML_F32Cx8_STORE(x, y) __lasx_f32cx8_store(x, y) + +#define GGML_F32Cx8_FMA GGML_F32x8_FMA +#define GGML_F32Cx8_ADD __lasx_xvfadd_s +#define GGML_F32Cx8_MUL __lasx_xvfmul_s +#define GGML_F32Cx8_REDUCE GGML_F32x8_REDUCE + +#define GGML_F16_VEC GGML_F32Cx8 +#define GGML_F16_VEC_ZERO GGML_F32Cx8_ZERO +#define GGML_F16_VEC_SET1 GGML_F32Cx8_SET1 +#define GGML_F16_VEC_LOAD(p, i) GGML_F32Cx8_LOAD(p) +#define GGML_F16_VEC_STORE(p, r, i) GGML_F32Cx8_STORE(p, r[i]) +#define GGML_F16_VEC_FMA GGML_F32Cx8_FMA +#define GGML_F16_VEC_ADD GGML_F32Cx8_ADD +#define GGML_F16_VEC_MUL GGML_F32Cx8_MUL +#define GGML_F16_VEC_REDUCE GGML_F32Cx8_REDUCE + +#elif defined(__loongarch_sx) + +#define GGML_SIMD + +// F32 LSX + +#define GGML_F32_STEP 32 +#define GGML_F32_EPR 4 + +#define GGML_F32x4 __m128 +#define GGML_F32x4_ZERO __lsx_vldi(0) +#define GGML_F32x4_SET1(x) __lsx_vinsgr2vr_w(__lsx_vldi(0),(x), 0) +#define GGML_F32x4_LOAD(x) __lsx_vld((x), 0) +#define GGML_F32x4_STORE((x),(y)) __lsx_vst((y), (x), 0) +#define GGML_F32x4_FMA(a, b, c) __lsx_vfmadd_s(b, c, a) +#define GGML_F32x4_ADD __lsx_vfadd_s +#define GGML_F32x4_MUL __lsx_vfmul_s +#define GGML_F32x4_REDUCE(res, x) \ +{ \ + int offset = GGML_F32_ARR >> 1; \ + for (int i = 0; i < offset; ++i) { \ + x[i] = __lsx_vfadd_s(x[i], x[offset+i]); \ + } \ + offset >>= 1; \ + for (int i = 0; i < offset; ++i) { \ + x[i] = __lsx_vfadd_s(x[i], x[offset+i]); \ + } \ + offset >>= 1; \ + for (int i = 0; i < offset; ++i) { \ + x[i] = __lsx_vfadd_s(x[i], x[offset+i]); \ + } \ + __m128i tmp = __lsx_vsrli_d((__m128i)x[0], 32); \ + tmp = (__m128i)__lsx_vfadd_s((__m128)tmp, x[0]); \ + tmp = __lsx_vpickev_w(__lsx_vldi(0), tmp); \ + const __m128 t0 = __lsx_vshuf4i_w(tmp, 0x88); \ + tmp = __lsx_vsrli_d((__m128i)t0, 32); \ + tmp = (__m128i)__lsx_vfadd_s((__m128)tmp, t0); \ + tmp = __lsx_vpickev_w(__lsx_vldi(0), tmp); \ + res = (ggml_float) __lsx_vpickve2gr_w(__lsx_vshuf4i_w(tmp, 0x88), 0); \ +} + +#define GGML_F32_VEC GGML_F32x4 +#define GGML_F32_VEC_ZERO GGML_F32x4_ZERO +#define GGML_F32_VEC_SET1 GGML_F32x4_SET1 +#define GGML_F32_VEC_LOAD GGML_F32x4_LOAD +#define GGML_F32_VEC_STORE GGML_F32x4_STORE +#define GGML_F32_VEC_FMA GGML_F32x4_FMA +#define GGML_F32_VEC_ADD GGML_F32x4_ADD +#define GGML_F32_VEC_MUL GGML_F32x4_MUL +#define GGML_F32_VEC_REDUCE GGML_F32x4_REDUCE + +// F16 LSX + +#define GGML_F16_STEP 32 +#define GGML_F16_EPR 4 + +static inline __m128 __lsx_f16x4_load(const ggml_fp16_t * x) { + float tmp[4]; + + tmp[0] = GGML_FP16_TO_FP32(x[0]); + tmp[1] = GGML_FP16_TO_FP32(x[1]); + tmp[2] = GGML_FP16_TO_FP32(x[2]); + tmp[3] = GGML_FP16_TO_FP32(x[3]); + + return __lsx_vld(tmp, 0); +} + +static inline void __lsx_f16x4_store(ggml_fp16_t * x, __m128 y) { + float arr[4]; + + __lsx_vst(y, arr, 0); + + x[0] = GGML_FP32_TO_FP16(arr[0]); + x[1] = GGML_FP32_TO_FP16(arr[1]); + x[2] = GGML_FP32_TO_FP16(arr[2]); + x[3] = GGML_FP32_TO_FP16(arr[3]); +} + +#define GGML_F32Cx4 __m128 +#define GGML_F32Cx4_ZERO __lsx_vldi(0) +#define GGML_F32Cx4_SET1(x) __lsx_vinsgr2vr_w(__lsx_vldi(0),(x), 0) +#define GGML_F32Cx4_LOAD(x) __lsx_f16x4_load(x) +#define GGML_F32Cx4_STORE(x, y) __lsx_f16x4_store(x, y) +#define GGML_F32Cx4_FMA GGML_F32x4_FMA +#define GGML_F32Cx4_ADD __lsx_vfadd_s +#define GGML_F32Cx4_MUL __lsx_vfmul_s +#define GGML_F32Cx4_REDUCE GGML_F32x4_REDUCE + +#define GGML_F16_VEC GGML_F32Cx4 +#define GGML_F16_VEC_ZERO GGML_F32Cx4_ZERO +#define GGML_F16_VEC_SET1 GGML_F32Cx4_SET1 +#define GGML_F16_VEC_LOAD(p, i) GGML_F32Cx4_LOAD(p) +#define GGML_F16_VEC_STORE(p, r, i) GGML_F32Cx4_STORE(p, r[i]) +#define GGML_F16_VEC_FMA GGML_F32Cx4_FMA +#define GGML_F16_VEC_ADD GGML_F32Cx4_ADD +#define GGML_F16_VEC_MUL GGML_F32Cx4_MUL +#define GGML_F16_VEC_REDUCE GGML_F32Cx4_REDUCE + +#endif + +// GGML_F32_ARR / GGML_F16_ARR +// number of registers to use per step +#ifdef GGML_SIMD +#define GGML_F32_ARR (GGML_F32_STEP/GGML_F32_EPR) +#define GGML_F16_ARR (GGML_F16_STEP/GGML_F16_EPR) +#endif + +// +// ggml object +// + +struct ggml_object { + size_t offs; + size_t size; + + struct ggml_object * next; + + enum ggml_object_type type; + + char padding[4]; +}; + +static const size_t GGML_OBJECT_SIZE = sizeof(struct ggml_object); + +// +// ggml context +// + +struct ggml_context { + size_t mem_size; + void* mem_buffer; + bool mem_buffer_owned; + bool no_alloc; + bool no_alloc_save; // this is used to save the no_alloc state when using scratch buffers + + int n_objects; + + struct ggml_object * objects_begin; + struct ggml_object * objects_end; + + struct ggml_scratch scratch; + struct ggml_scratch scratch_save; +}; + +struct ggml_context_container { + bool used; + + struct ggml_context context; +}; + +// +// Threading defs +// + +typedef pthread_t ggml_thread_t; + +#if defined(_WIN32) + +typedef CONDITION_VARIABLE ggml_cond_t; +typedef SRWLOCK ggml_mutex_t; + +#define ggml_mutex_init(m) InitializeSRWLock(m) +#define ggml_mutex_destroy(m) +#define ggml_mutex_lock(m) AcquireSRWLockExclusive(m) +#define ggml_mutex_unlock(m) ReleaseSRWLockExclusive(m) +#define ggml_mutex_lock_shared(m) AcquireSRWLockShared(m) +#define ggml_mutex_unlock_shared(m) ReleaseSRWLockShared(m) + +#define ggml_cond_init(c) InitializeConditionVariable(c) +#define ggml_cond_destroy(c) +#define ggml_cond_wait(c, m) SleepConditionVariableSRW(c, m, INFINITE, CONDITION_VARIABLE_LOCKMODE_SHARED) +#define ggml_cond_broadcast(c) WakeAllConditionVariable(c) + +#define ggml_thread_create pthread_create +#define ggml_thread_join pthread_join + +#else + +typedef pthread_cond_t ggml_cond_t; +typedef pthread_mutex_t ggml_mutex_t; + +#define ggml_mutex_init(m) pthread_mutex_init(m, NULL) +#define ggml_mutex_destroy(m) pthread_mutex_destroy(m) +#define ggml_mutex_lock(m) pthread_mutex_lock(m) +#define ggml_mutex_unlock(m) pthread_mutex_unlock(m) +#define ggml_mutex_lock_shared(m) pthread_mutex_lock(m) +#define ggml_mutex_unlock_shared(m) pthread_mutex_unlock(m) + +#define ggml_lock_init(x) UNUSED(x) +#define ggml_lock_destroy(x) UNUSED(x) +#if defined(__x86_64__) || (defined(_MSC_VER) && defined(_M_AMD64)) +#define ggml_lock_lock(x) _mm_pause() +#else +#define ggml_lock_lock(x) UNUSED(x) +#endif +#define ggml_lock_unlock(x) UNUSED(x) + +#define GGML_LOCK_INITIALIZER 0 +#define ggml_cond_init(c) pthread_cond_init(c, NULL) +#define ggml_cond_destroy(c) pthread_cond_destroy(c) +#define ggml_cond_wait(c, m) pthread_cond_wait(c, m) +#define ggml_cond_broadcast(c) pthread_cond_broadcast(c) + +#define ggml_thread_create pthread_create +#define ggml_thread_join pthread_join + +#endif + +// Threadpool def +struct ggml_threadpool { + ggml_mutex_t mutex; // mutex for cond.var + ggml_cond_t cond; // cond.var for waiting for new work + + struct ggml_cgraph * cgraph; + struct ggml_cplan * cplan; + + // synchronization primitives + atomic_int n_graph; // incremented when there is work to be done (i.e each graph) + atomic_int GGML_CACHE_ALIGN n_barrier; + atomic_int GGML_CACHE_ALIGN n_barrier_passed; + atomic_int current_chunk; // currently processing chunk during Mat_Mul, shared between all the threads. + + // these are atomic as an annotation for thread-sanitizer + atomic_bool stop; // Used for stopping the threadpool altogether + atomic_bool pause; // Used for pausing the threadpool or individual threads + atomic_bool abort; // Used for aborting processing of a graph + + struct ggml_compute_state * workers; // per thread state + int n_threads_max; // number of threads in the pool + atomic_int n_threads_cur; // number of threads used in the current graph + + int32_t prio; // Scheduling priority + uint32_t poll; // Polling level (0 - no polling) + + enum ggml_status ec; +}; + +// Per-thread state +struct ggml_compute_state { +#ifndef GGML_USE_OPENMP + ggml_thread_t thrd; + bool cpumask[GGML_MAX_N_THREADS]; + int last_graph; + bool pending; +#endif + struct ggml_threadpool * threadpool; + int ith; +}; + +struct ggml_compute_params { + // ith = thread index, nth = number of threads + int ith, nth; + + // work buffer for all threads + size_t wsize; + void * wdata; + + struct ggml_threadpool * threadpool; +}; + +// +// fundamental operations +// + +inline static void ggml_vec_set_i8(const int n, int8_t * x, const int8_t v) { for (int i = 0; i < n; ++i) x[i] = v; } + +inline static void ggml_vec_set_i16(const int n, int16_t * x, const int16_t v) { for (int i = 0; i < n; ++i) x[i] = v; } + +inline static void ggml_vec_set_i32(const int n, int32_t * x, const int32_t v) { for (int i = 0; i < n; ++i) x[i] = v; } + +inline static void ggml_vec_set_f16(const int n, ggml_fp16_t * x, const int32_t v) { for (int i = 0; i < n; ++i) x[i] = v; } + +inline static void ggml_vec_set_bf16(const int n, ggml_bf16_t * x, const ggml_bf16_t v) { for (int i = 0; i < n; ++i) x[i] = v; } + +inline static void ggml_vec_add_f32 (const int n, float * z, const float * x, const float * y) { for (int i = 0; i < n; ++i) z[i] = x[i] + y[i]; } +inline static void ggml_vec_add1_f32(const int n, float * z, const float * x, const float v) { for (int i = 0; i < n; ++i) z[i] = x[i] + v; } +inline static void ggml_vec_acc_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] += x[i]; } +inline static void ggml_vec_acc1_f32(const int n, float * y, const float v) { for (int i = 0; i < n; ++i) y[i] += v; } +inline static void ggml_vec_sub_f32 (const int n, float * z, const float * x, const float * y) { for (int i = 0; i < n; ++i) z[i] = x[i] - y[i]; } +inline static void ggml_vec_set_f32 (const int n, float * x, const float v) { for (int i = 0; i < n; ++i) x[i] = v; } +inline static void ggml_vec_cpy_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = x[i]; } +inline static void ggml_vec_neg_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = -x[i]; } +inline static void ggml_vec_mul_f32 (const int n, float * z, const float * x, const float * y) { for (int i = 0; i < n; ++i) z[i] = x[i]*y[i]; } +inline static void ggml_vec_div_f32 (const int n, float * z, const float * x, const float * y) { for (int i = 0; i < n; ++i) z[i] = x[i]/y[i]; } + +static void ggml_vec_dot_f32(int n, float * restrict s, size_t bs, const float * restrict x, size_t bx, const float * restrict y, size_t by, int nrc) { + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + +#if defined(GGML_SIMD) + float sumf = 0.0f; + const int np = (n & ~(GGML_F32_STEP - 1)); + + GGML_F32_VEC sum[GGML_F32_ARR] = { GGML_F32_VEC_ZERO }; + + GGML_F32_VEC ax[GGML_F32_ARR]; + GGML_F32_VEC ay[GGML_F32_ARR]; + + for (int i = 0; i < np; i += GGML_F32_STEP) { + for (int j = 0; j < GGML_F32_ARR; j++) { + ax[j] = GGML_F32_VEC_LOAD(x + i + j*GGML_F32_EPR); + ay[j] = GGML_F32_VEC_LOAD(y + i + j*GGML_F32_EPR); + + sum[j] = GGML_F32_VEC_FMA(sum[j], ax[j], ay[j]); + } + } + + // reduce sum0..sum3 to sum0 + GGML_F32_VEC_REDUCE(sumf, sum); + + // leftovers + for (int i = np; i < n; ++i) { + sumf += x[i]*y[i]; + } +#else + // scalar + ggml_float sumf = 0.0; + for (int i = 0; i < n; ++i) { + sumf += (ggml_float)(x[i]*y[i]); + } +#endif + + *s = sumf; +} + +static void ggml_vec_dot_bf16(int n, float * restrict s, size_t bs, ggml_bf16_t * restrict x, size_t bx, ggml_bf16_t * restrict y, size_t by, int nrc) { + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + int i = 0; + ggml_float sumf = 0; + +#if defined(__AVX512BF16__) + __m512 c1 = _mm512_setzero_ps(); + __m512 c2 = _mm512_setzero_ps(); + for (; i + 64 <= n; i += 64) { + c1 = _mm512_dpbf16_ps(c1, m512bh(_mm512_loadu_si512((x + i))), + m512bh(_mm512_loadu_si512((y + i)))); + c2 = _mm512_dpbf16_ps(c2, m512bh(_mm512_loadu_si512((x + i + 32))), + m512bh(_mm512_loadu_si512((y + i + 32)))); + } + sumf += (ggml_float)_mm512_reduce_add_ps(c1); + sumf += (ggml_float)_mm512_reduce_add_ps(c2); + +#elif defined(__AVX512F__) +#define LOAD(p) _mm512_castsi512_ps(_mm512_slli_epi32(_mm512_cvtepu16_epi32(_mm256_loadu_si256((const __m256i *)(p))), 16)) + __m512 c1 = _mm512_setzero_ps(); + __m512 c2 = _mm512_setzero_ps(); + for (; i + 32 <= n; i += 32) { + c1 = _mm512_add_ps(_mm512_mul_ps(LOAD(x + i), LOAD(y + i)), c1); + c2 = _mm512_add_ps(_mm512_mul_ps(LOAD(x + i + 16), LOAD(y + i + 16)), c2); + } + sumf += (ggml_float)_mm512_reduce_add_ps(c1); + sumf += (ggml_float)_mm512_reduce_add_ps(c2); + +#undef LOAD +#elif defined(__AVX2__) +#define LOAD(p) _mm256_castsi256_ps(_mm256_slli_epi32(_mm256_cvtepu16_epi32(_mm_loadu_si128((const __m128i *)(p))), 16)) + __m256 c1 = _mm256_setzero_ps(); + __m256 c2 = _mm256_setzero_ps(); + __m256 c3 = _mm256_setzero_ps(); + __m256 c4 = _mm256_setzero_ps(); + for (; i + 32 <= n; i += 32) { + c1 = _mm256_add_ps(_mm256_mul_ps(LOAD(x + i), LOAD(y + i)), c1); + c2 = _mm256_add_ps(_mm256_mul_ps(LOAD(x + i + 8), LOAD(y + i + 8)), c2); + c3 = _mm256_add_ps(_mm256_mul_ps(LOAD(x + i + 16), LOAD(y + i + 16)), c3); + c4 = _mm256_add_ps(_mm256_mul_ps(LOAD(x + i + 24), LOAD(y + i + 24)), c4); + } + __m128 g; + c1 = _mm256_add_ps(_mm256_add_ps(c1, c3), + _mm256_add_ps(c2, c4)); + g = _mm_add_ps(_mm256_extractf128_ps(c1, 1), + _mm256_castps256_ps128(c1)); + g = _mm_add_ps(g, _mm_movehl_ps(g, g)); + g = _mm_add_ss(g, _mm_movehdup_ps(g)); + sumf += (ggml_float)_mm_cvtss_f32(g); + +#undef LOAD +#endif + + for (; i < n; ++i) { + sumf += (ggml_float)(GGML_BF16_TO_FP32(x[i]) * + GGML_BF16_TO_FP32(y[i])); + } + *s = sumf; +} + +static void ggml_vec_dot_f16(int n, float * restrict s, size_t bs, ggml_fp16_t * restrict x, size_t bx, ggml_fp16_t * restrict y, size_t by, int nrc) { + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + + ggml_float sumf = 0.0; + +#if defined(GGML_SIMD) + const int np = (n & ~(GGML_F16_STEP - 1)); + + GGML_F16_VEC sum[GGML_F16_ARR] = { GGML_F16_VEC_ZERO }; + + GGML_F16_VEC ax[GGML_F16_ARR]; + GGML_F16_VEC ay[GGML_F16_ARR]; + + for (int i = 0; i < np; i += GGML_F16_STEP) { + for (int j = 0; j < GGML_F16_ARR; j++) { + ax[j] = GGML_F16_VEC_LOAD(x + i + j*GGML_F16_EPR, j); + ay[j] = GGML_F16_VEC_LOAD(y + i + j*GGML_F16_EPR, j); + + sum[j] = GGML_F16_VEC_FMA(sum[j], ax[j], ay[j]); + } + } + + // reduce sum0..sum3 to sum0 + GGML_F16_VEC_REDUCE(sumf, sum); + + // leftovers + for (int i = np; i < n; ++i) { + sumf += (ggml_float)(GGML_FP16_TO_FP32(x[i])*GGML_FP16_TO_FP32(y[i])); + } +#else + for (int i = 0; i < n; ++i) { + sumf += (ggml_float)(GGML_FP16_TO_FP32(x[i])*GGML_FP16_TO_FP32(y[i])); + } +#endif + + *s = sumf; +} + +// compute GGML_VEC_DOT_UNROLL dot products at once +// xs - x row stride in bytes +inline static void ggml_vec_dot_f16_unroll(const int n, const int xs, float * restrict s, void * restrict xv, ggml_fp16_t * restrict y) { + ggml_float sumf[GGML_VEC_DOT_UNROLL] = { 0.0 }; + + ggml_fp16_t * restrict x[GGML_VEC_DOT_UNROLL]; + + for (int i = 0; i < GGML_VEC_DOT_UNROLL; ++i) { + x[i] = (ggml_fp16_t *) ((char *) xv + i*xs); + } + +#if defined(GGML_SIMD) + const int np = (n & ~(GGML_F16_STEP - 1)); + + GGML_F16_VEC sum[GGML_VEC_DOT_UNROLL][GGML_F16_ARR] = { { GGML_F16_VEC_ZERO } }; + + GGML_F16_VEC ax[GGML_F16_ARR]; + GGML_F16_VEC ay[GGML_F16_ARR]; + + for (int i = 0; i < np; i += GGML_F16_STEP) { + for (int j = 0; j < GGML_F16_ARR; j++) { + ay[j] = GGML_F16_VEC_LOAD(y + i + j*GGML_F16_EPR, j); + + for (int k = 0; k < GGML_VEC_DOT_UNROLL; ++k) { + ax[j] = GGML_F16_VEC_LOAD(x[k] + i + j*GGML_F16_EPR, j); + + sum[k][j] = GGML_F16_VEC_FMA(sum[k][j], ax[j], ay[j]); + } + } + } + + // reduce sum0..sum3 to sum0 + for (int k = 0; k < GGML_VEC_DOT_UNROLL; ++k) { + GGML_F16_VEC_REDUCE(sumf[k], sum[k]); + } + + // leftovers + for (int i = np; i < n; ++i) { + for (int j = 0; j < GGML_VEC_DOT_UNROLL; ++j) { + sumf[j] += (ggml_float)(GGML_FP16_TO_FP32(x[j][i])*GGML_FP16_TO_FP32(y[i])); + } + } +#else + for (int i = 0; i < n; ++i) { + for (int j = 0; j < GGML_VEC_DOT_UNROLL; ++j) { + sumf[j] += (ggml_float)(GGML_FP16_TO_FP32(x[j][i])*GGML_FP16_TO_FP32(y[i])); + } + } +#endif + + for (int i = 0; i < GGML_VEC_DOT_UNROLL; ++i) { + s[i] = sumf[i]; + } +} + +inline static void ggml_vec_mad_f32(const int n, float * restrict y, const float * restrict x, const float v) { +#if defined(GGML_SIMD) + const int np = (n & ~(GGML_F32_STEP - 1)); + + GGML_F32_VEC vx = GGML_F32_VEC_SET1(v); + + GGML_F32_VEC ax[GGML_F32_ARR]; + GGML_F32_VEC ay[GGML_F32_ARR]; + + for (int i = 0; i < np; i += GGML_F32_STEP) { + for (int j = 0; j < GGML_F32_ARR; j++) { + ax[j] = GGML_F32_VEC_LOAD(x + i + j*GGML_F32_EPR); + ay[j] = GGML_F32_VEC_LOAD(y + i + j*GGML_F32_EPR); + ay[j] = GGML_F32_VEC_FMA(ay[j], ax[j], vx); + + GGML_F32_VEC_STORE(y + i + j*GGML_F32_EPR, ay[j]); + } + } + + // leftovers + for (int i = np; i < n; ++i) { + y[i] += x[i]*v; + } +#else + // scalar + for (int i = 0; i < n; ++i) { + y[i] += x[i]*v; + } +#endif +} + +inline static void ggml_vec_mad_f16(const int n, ggml_fp16_t * restrict y, const ggml_fp16_t * restrict x, const float v) { +#if defined(GGML_SIMD) + const int np = (n & ~(GGML_F16_STEP - 1)); + + GGML_F16_VEC vx = GGML_F16_VEC_SET1(v); + + GGML_F16_VEC ax[GGML_F16_ARR]; + GGML_F16_VEC ay[GGML_F16_ARR]; + + for (int i = 0; i < np; i += GGML_F16_STEP) { + for (int j = 0; j < GGML_F16_ARR; j++) { + ax[j] = GGML_F16_VEC_LOAD(x + i + j*GGML_F16_EPR, j); + ay[j] = GGML_F16_VEC_LOAD(y + i + j*GGML_F16_EPR, j); + ay[j] = GGML_F16_VEC_FMA(ay[j], ax[j], vx); + + GGML_F16_VEC_STORE(y + i + j*GGML_F16_EPR, ay, j); + } + } + + // leftovers + for (int i = np; i < n; ++i) { + y[i] = GGML_FP32_TO_FP16(GGML_FP16_TO_FP32(y[i]) + GGML_FP16_TO_FP32(x[i])*v); + } +#else + // scalar + for (int i = 0; i < n; ++i) { + y[i] = GGML_FP32_TO_FP16(GGML_FP16_TO_FP32(y[i]) + GGML_FP16_TO_FP32(x[i])*v); + } +#endif +} + +// xs and vs are byte strides of x and v +inline static void ggml_vec_mad_f32_unroll(const int n, const int xs, const int vs, float * restrict y, const float * restrict xv, const float * restrict vv) { + + const float * restrict x[GGML_VEC_MAD_UNROLL]; + const float * restrict v[GGML_VEC_MAD_UNROLL]; + + for (int i = 0; i < GGML_VEC_MAD_UNROLL; ++i) { + x[i] = (const float *) ((const char *) xv + i*xs); + v[i] = (const float *) ((const char *) vv + i*vs); + } + +#if defined(GGML_SIMD) + const int np = (n & ~(GGML_F32_STEP - 1)); + + GGML_F32_VEC vx[GGML_VEC_MAD_UNROLL]; + + for (int k = 0; k < GGML_VEC_MAD_UNROLL; ++k) { + vx[k] = GGML_F32_VEC_SET1(v[k][0]); + } + + GGML_F32_VEC ax[GGML_VEC_MAD_UNROLL][GGML_F32_ARR]; + GGML_F32_VEC ay[GGML_F32_ARR]; + + for (int i = 0; i < np; i += GGML_F32_STEP) { + for (int j = 0; j < GGML_F32_ARR; j++) { + ay[j] = GGML_F32_VEC_LOAD(y + i + j*GGML_F32_EPR); + + for (int k = 0; k < GGML_VEC_MAD_UNROLL; ++k) { + ax[k][j] = GGML_F32_VEC_LOAD(x[k] + i + j*GGML_F32_EPR); + ay[j] = GGML_F32_VEC_FMA(ay[j], ax[k][j], vx[k]); + } + + GGML_F32_VEC_STORE(y + i + j*GGML_F32_EPR, ay[j]); + } + } + + // leftovers + for (int k = 0; k < GGML_VEC_MAD_UNROLL; ++k) { + for (int i = np; i < n; ++i) { + y[i] += x[k][i]*v[k][0]; + } + } +#else + // scalar + for (int k = 0; k < GGML_VEC_MAD_UNROLL; ++k) { + for (int i = 0; i < n; ++i) { + y[i] += x[k][i]*v[k][0]; + } + } +#endif +} + +//inline static void ggml_vec_scale_f32(const int n, float * y, const float v) { for (int i = 0; i < n; ++i) y[i] *= v; } +inline static void ggml_vec_scale_f32(const int n, float * y, const float v) { +#if defined(GGML_USE_ACCELERATE) + vDSP_vsmul(y, 1, &v, y, 1, n); +#elif defined(GGML_SIMD) + const int np = (n & ~(GGML_F32_STEP - 1)); + + GGML_F32_VEC vx = GGML_F32_VEC_SET1(v); + + GGML_F32_VEC ay[GGML_F32_ARR]; + + for (int i = 0; i < np; i += GGML_F32_STEP) { + for (int j = 0; j < GGML_F32_ARR; j++) { + ay[j] = GGML_F32_VEC_LOAD(y + i + j*GGML_F32_EPR); + ay[j] = GGML_F32_VEC_MUL(ay[j], vx); + + GGML_F32_VEC_STORE(y + i + j*GGML_F32_EPR, ay[j]); + } + } + + // leftovers + for (int i = np; i < n; ++i) { + y[i] *= v; + } +#else + // scalar + for (int i = 0; i < n; ++i) { + y[i] *= v; + } +#endif +} + +inline static void ggml_vec_scale_f16(const int n, ggml_fp16_t * y, const float v) { +#if defined(GGML_SIMD) + const int np = (n & ~(GGML_F16_STEP - 1)); + + GGML_F16_VEC vx = GGML_F16_VEC_SET1(v); + + GGML_F16_VEC ay[GGML_F16_ARR]; + + for (int i = 0; i < np; i += GGML_F16_STEP) { + for (int j = 0; j < GGML_F16_ARR; j++) { + ay[j] = GGML_F16_VEC_LOAD(y + i + j*GGML_F16_EPR, j); + ay[j] = GGML_F16_VEC_MUL(ay[j], vx); + + GGML_F16_VEC_STORE(y + i + j*GGML_F16_EPR, ay, j); + } + } + + // leftovers + for (int i = np; i < n; ++i) { + y[i] = GGML_FP32_TO_FP16(GGML_FP16_TO_FP32(y[i])*v); + } +#else + // scalar + for (int i = 0; i < n; ++i) { + y[i] = GGML_FP32_TO_FP16(GGML_FP16_TO_FP32(y[i])*v); + } +#endif +} + +inline static void ggml_vec_norm_f32 (const int n, float * s, const float * x) { ggml_vec_dot_f32(n, s, 0, x, 0, x, 0, 1); *s = sqrtf(*s); } +inline static void ggml_vec_sqr_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = x[i]*x[i]; } +inline static void ggml_vec_sqrt_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = sqrtf(x[i]); } +inline static void ggml_vec_log_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = logf(x[i]); } +inline static void ggml_vec_sin_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = sinf(x[i]); } +inline static void ggml_vec_cos_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = cosf(x[i]); } +inline static void ggml_vec_abs_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = fabsf(x[i]); } +inline static void ggml_vec_sgn_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = (x[i] > 0.f) ? 1.f : ((x[i] < 0.f) ? -1.f : 0.f); } +inline static void ggml_vec_step_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = (x[i] > 0.f) ? 1.f : 0.f; } +inline static void ggml_vec_tanh_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = tanhf(x[i]); } +inline static void ggml_vec_elu_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = (x[i] > 0.f) ? x[i] : expm1f(x[i]); } +inline static void ggml_vec_relu_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = (x[i] > 0.f) ? x[i] : 0.f; } +inline static void ggml_vec_leaky_relu_f32 (const int n, float * y, const float * x, const float ns) { for (int i = 0; i < n; ++i) y[i] = ((x[i] > 0.f) ? x[i] : 0.f) + ns * ((x[i] < 0.0f) ? x[i] : 0.f); } +inline static void ggml_vec_sigmoid_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = 1.f / (1.f + expf(-x[i])); } +// TODO: optimize performance +inline static void ggml_vec_hardswish_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = x[i] * fminf(1.0f, fmaxf(0.0f, (x[i] + 3.0f) / 6.0f)); } +inline static void ggml_vec_hardsigmoid_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = fminf(1.0f, fmaxf(0.0f, (x[i] + 3.0f) / 6.0f)); } +inline static void ggml_vec_exp_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = expf(x[i]); } + +static const float GELU_COEF_A = 0.044715f; +static const float GELU_QUICK_COEF = -1.702f; +static const float SQRT_2_OVER_PI = 0.79788456080286535587989211986876f; + +inline static float ggml_gelu_f32(float x) { + return 0.5f*x*(1.0f + tanhf(SQRT_2_OVER_PI*x*(1.0f + GELU_COEF_A*x*x))); +} + +inline static void ggml_vec_gelu_f16(const int n, ggml_fp16_t * y, const ggml_fp16_t * x) { + const uint16_t * i16 = (const uint16_t *) x; + for (int i = 0; i < n; ++i) { + y[i] = ggml_table_gelu_f16[i16[i]]; + } +} + +#ifdef GGML_GELU_FP16 +inline static void ggml_vec_gelu_f32(const int n, float * y, const float * x) { + uint16_t t; + for (int i = 0; i < n; ++i) { + if (x[i] <= -10.0f) { + y[i] = 0.0f; + } else if (x[i] >= 10.0f) { + y[i] = x[i]; + } else { + ggml_fp16_t fp16 = GGML_FP32_TO_FP16(x[i]); + memcpy(&t, &fp16, sizeof(uint16_t)); + y[i] = GGML_FP16_TO_FP32(ggml_table_gelu_f16[t]); + } + } +} +#else +inline static void ggml_vec_gelu_f32(const int n, float * y, const float * x) { + for (int i = 0; i < n; ++i) { + y[i] = ggml_gelu_f32(x[i]); + } +} +#endif + +inline static float ggml_gelu_quick_f32(float x) { + return x*(1.0f/(1.0f+expf(GELU_QUICK_COEF*x))); +} + +//inline static void ggml_vec_gelu_quick_f16(const int n, ggml_fp16_t * y, const ggml_fp16_t * x) { +// const uint16_t * i16 = (const uint16_t *) x; +// for (int i = 0; i < n; ++i) { +// y[i] = ggml_table_gelu_quick_f16[i16[i]]; +// } +//} + +#ifdef GGML_GELU_QUICK_FP16 +inline static void ggml_vec_gelu_quick_f32(const int n, float * y, const float * x) { + uint16_t t; + for (int i = 0; i < n; ++i) { + ggml_fp16_t fp16 = GGML_FP32_TO_FP16(x[i]); + memcpy(&t, &fp16, sizeof(uint16_t)); + y[i] = GGML_FP16_TO_FP32(ggml_table_gelu_quick_f16[t]); + } +} +#else +inline static void ggml_vec_gelu_quick_f32(const int n, float * y, const float * x) { + for (int i = 0; i < n; ++i) { + y[i] = ggml_gelu_quick_f32(x[i]); + } +} +#endif + +// Sigmoid Linear Unit (SiLU) function +inline static float ggml_silu_f32(float x) { + return x/(1.0f + expf(-x)); +} + +#if __FINITE_MATH_ONLY__ +#error "some routines in ggml.c require non-finite math arithmetics -- pass -fno-finite-math-only to the compiler to fix" +#error "ref: https://github.com/ggerganov/llama.cpp/pull/7154#issuecomment-2143844461" +#endif + +#if defined(__ARM_NEON) && defined(__aarch64__) + +// adapted from arm limited optimized routine +// the maximum error is 1.45358 plus 0.5 ulps +// numbers above 88.38 will flush to infinity +// numbers beneath -103.97 will flush to zero +inline static float32x4_t ggml_v_expf(float32x4_t x) { + const float32x4_t r = vdupq_n_f32(0x1.8p23f); + const float32x4_t z = vfmaq_f32(r, x, vdupq_n_f32(0x1.715476p+0f)); + const float32x4_t n = vsubq_f32(z, r); + const float32x4_t b = vfmsq_f32(vfmsq_f32(x, n, vdupq_n_f32(0x1.62e4p-1f)), n, + vdupq_n_f32(0x1.7f7d1cp-20f)); + const uint32x4_t e = vshlq_n_u32(vreinterpretq_u32_f32(z), 23); + const float32x4_t k = vreinterpretq_f32_u32(vaddq_u32(e, vreinterpretq_u32_f32(vdupq_n_f32(1)))); + const uint32x4_t c = vcagtq_f32(n, vdupq_n_f32(126)); + const float32x4_t u = vmulq_f32(b, b); + const float32x4_t j = vfmaq_f32( + vmulq_f32(vdupq_n_f32(0x1.ffffecp-1f), b), + vfmaq_f32(vfmaq_f32(vdupq_n_f32(0x1.fffdb6p-2f), vdupq_n_f32(0x1.555e66p-3f), b), + vfmaq_f32(vdupq_n_f32(0x1.573e2ep-5f), vdupq_n_f32(0x1.0e4020p-7f), b), u), u); + if (!vpaddd_u64(vreinterpretq_u64_u32(c))) + return vfmaq_f32(k, j, k); + const uint32x4_t d = vandq_u32(vclezq_f32(n), vdupq_n_u32(0x82000000)); + const float32x4_t s1 = vreinterpretq_f32_u32(vaddq_u32(d, vdupq_n_u32(0x7f000000))); + const float32x4_t s2 = vreinterpretq_f32_u32(vsubq_u32(e, d)); + return vbslq_f32(vcagtq_f32(n, vdupq_n_f32(192)), vmulq_f32(s1, s1), + vbslq_f32(c, vmulq_f32(vfmaq_f32(s2, s2, j), s1), vfmaq_f32(k, k, j))); +} + +// computes silu x/(1+exp(-x)) in single precision vector +inline static float32x4_t ggml_v_silu(float32x4_t x) { + const float32x4_t one = vdupq_n_f32(1.0f); + const float32x4_t zero = vdupq_n_f32(0.0f); + const float32x4_t neg_x = vsubq_f32(zero, x); + const float32x4_t exp_neg_x = ggml_v_expf(neg_x); + const float32x4_t one_plus_exp_neg_x = vaddq_f32(one, exp_neg_x); + return vdivq_f32(x, one_plus_exp_neg_x); +} + +#elif defined(__AVX512F__) && defined(__AVX512DQ__) + +// adapted from arm limited optimized routine +// the maximum error is 1.45358 plus 0.5 ulps +// numbers above 88.38 will flush to infinity +// numbers beneath -103.97 will flush to zero +inline static __m512 ggml_v_expf(__m512 x) { + const __m512 r = _mm512_set1_ps(0x1.8p23f); + const __m512 z = _mm512_fmadd_ps(x, _mm512_set1_ps(0x1.715476p+0f), r); + const __m512 n = _mm512_sub_ps(z, r); + const __m512 b = + _mm512_fnmadd_ps(n, _mm512_set1_ps(0x1.7f7d1cp-20f), + _mm512_fnmadd_ps(n, _mm512_set1_ps(0x1.62e4p-1f), x)); + const __mmask16 d = + _mm512_cmp_ps_mask(_mm512_abs_ps(n), _mm512_set1_ps(192), _CMP_GT_OQ); + const __m512 u = _mm512_mul_ps(b, b); + const __m512 j = _mm512_fmadd_ps( + _mm512_fmadd_ps(_mm512_fmadd_ps(_mm512_set1_ps(0x1.0e4020p-7f), b, + _mm512_set1_ps(0x1.573e2ep-5f)), + u, + _mm512_fmadd_ps(_mm512_set1_ps(0x1.555e66p-3f), b, + _mm512_set1_ps(0x1.fffdb6p-2f))), + u, + _mm512_fmadd_ps(_mm512_set1_ps(0x1.ffffecp-1f), b, _mm512_set1_ps(1.0F))); + const __m512 res = _mm512_scalef_ps(j, n); + if (_mm512_kortestz(d, d)) + return res; + const __m512 zero = _mm512_setzero_ps(); + const __m512 alt = _mm512_mask_blend_ps( + _mm512_cmp_ps_mask(n, zero, _CMP_LE_OQ), _mm512_set1_ps(INFINITY), zero); + return _mm512_mask_blend_ps(d, res, alt); +} + +// computes silu x/(1+exp(-x)) in single precision vector +inline static __m512 ggml_v_silu(__m512 x) { + const __m512 one = _mm512_set1_ps(1); + const __m512 zero = _mm512_setzero_ps(); + const __m512 neg_x = _mm512_sub_ps(zero, x); + const __m512 exp_neg_x = ggml_v_expf(neg_x); + const __m512 one_plus_exp_neg_x = _mm512_add_ps(one, exp_neg_x); + return _mm512_div_ps(x, one_plus_exp_neg_x); +} + +#elif defined(__AVX2__) && defined(__FMA__) + +// adapted from arm limited optimized routine +// the maximum error is 1.45358 plus 0.5 ulps +// numbers above 88.38 will flush to infinity +// numbers beneath -103.97 will flush to zero +inline static __m256 ggml_v_expf(__m256 x) { + const __m256 r = _mm256_set1_ps(0x1.8p23f); + const __m256 z = _mm256_fmadd_ps(x, _mm256_set1_ps(0x1.715476p+0f), r); + const __m256 n = _mm256_sub_ps(z, r); + const __m256 b = _mm256_fnmadd_ps(n, _mm256_set1_ps(0x1.7f7d1cp-20f), + _mm256_fnmadd_ps(n, _mm256_set1_ps(0x1.62e4p-1f), x)); + const __m256i e = _mm256_slli_epi32(_mm256_castps_si256(z), 23); + const __m256 k = _mm256_castsi256_ps( + _mm256_add_epi32(e, _mm256_castps_si256(_mm256_set1_ps(1)))); + const __m256i c = _mm256_castps_si256( + _mm256_cmp_ps(_mm256_andnot_ps(_mm256_set1_ps(-0.f), n), + _mm256_set1_ps(126), _CMP_GT_OQ)); + const __m256 u = _mm256_mul_ps(b, b); + const __m256 j = _mm256_fmadd_ps(_mm256_fmadd_ps(_mm256_fmadd_ps(_mm256_set1_ps(0x1.0e4020p-7f), b, + _mm256_set1_ps(0x1.573e2ep-5f)), u, + _mm256_fmadd_ps(_mm256_set1_ps(0x1.555e66p-3f), b, + _mm256_set1_ps(0x1.fffdb6p-2f))), + u, _mm256_mul_ps(_mm256_set1_ps(0x1.ffffecp-1f), b)); + if (!_mm256_movemask_ps(_mm256_castsi256_ps(c))) + return _mm256_fmadd_ps(j, k, k); + const __m256i g = _mm256_and_si256( + _mm256_castps_si256(_mm256_cmp_ps(n, _mm256_setzero_ps(), _CMP_LE_OQ)), + _mm256_set1_epi32(0x82000000u)); + const __m256 s1 = + _mm256_castsi256_ps(_mm256_add_epi32(g, _mm256_set1_epi32(0x7f000000u))); + const __m256 s2 = _mm256_castsi256_ps(_mm256_sub_epi32(e, g)); + const __m256i d = _mm256_castps_si256( + _mm256_cmp_ps(_mm256_andnot_ps(_mm256_set1_ps(-0.f), n), + _mm256_set1_ps(192), _CMP_GT_OQ)); + return _mm256_or_ps( + _mm256_and_ps(_mm256_castsi256_ps(d), _mm256_mul_ps(s1, s1)), + _mm256_andnot_ps( + _mm256_castsi256_ps(d), + _mm256_or_ps( + _mm256_and_ps(_mm256_castsi256_ps(c), + _mm256_mul_ps(_mm256_fmadd_ps(s2, j, s2), s1)), + _mm256_andnot_ps(_mm256_castsi256_ps(c), _mm256_fmadd_ps(k, j, k))))); +} + +// computes silu x/(1+exp(-x)) in single precision vector +inline static __m256 ggml_v_silu(__m256 x) { + const __m256 one = _mm256_set1_ps(1); + const __m256 zero = _mm256_setzero_ps(); + const __m256 neg_x = _mm256_sub_ps(zero, x); + const __m256 exp_neg_x = ggml_v_expf(neg_x); + const __m256 one_plus_exp_neg_x = _mm256_add_ps(one, exp_neg_x); + return _mm256_div_ps(x, one_plus_exp_neg_x); +} + +#elif defined(__SSE2__) // __AVX2__ / __ARM_NEON + +#if defined(__FMA__) +#define MADD128(x, y, z) _mm_fmadd_ps(x, y, z) +#define NMADD128(x, y, z) _mm_fnmadd_ps(x, y, z) +#else +#define MADD128(x, y, z) _mm_add_ps(_mm_mul_ps(x, y), z) +#define NMADD128(x, y, z) _mm_sub_ps(z, _mm_mul_ps(x, y)) +#endif + +// adapted from arm limited optimized routine +// the maximum error is 1.45358 plus 0.5 ulps +// numbers above 88.38 will flush to infinity +// numbers beneath -103.97 will flush to zero +inline static __m128 ggml_v_expf(__m128 x) { + const __m128 r = _mm_set1_ps(0x1.8p23f); + const __m128 z = MADD128(x, _mm_set1_ps(0x1.715476p+0f), r); + const __m128 n = _mm_sub_ps(z, r); + const __m128 b = + NMADD128(n, _mm_set1_ps(0x1.7f7d1cp-20f), NMADD128(n, _mm_set1_ps(0x1.62e4p-1f), x)); + const __m128i e = _mm_slli_epi32(_mm_castps_si128(z), 23); + const __m128 k = _mm_castsi128_ps(_mm_add_epi32(e, _mm_castps_si128(_mm_set1_ps(1)))); + const __m128i c = + _mm_castps_si128(_mm_cmpgt_ps(_mm_andnot_ps(_mm_set1_ps(-0.f), n), _mm_set1_ps(126))); + const __m128 u = _mm_mul_ps(b, b); + const __m128 j = + MADD128(MADD128(MADD128(_mm_set1_ps(0x1.0e4020p-7f), b, _mm_set1_ps(0x1.573e2ep-5f)), u, + MADD128(_mm_set1_ps(0x1.555e66p-3f), b, _mm_set1_ps(0x1.fffdb6p-2f))), + u, _mm_mul_ps(_mm_set1_ps(0x1.ffffecp-1f), b)); + if (!_mm_movemask_epi8(c)) + return MADD128(j, k, k); + const __m128i g = _mm_and_si128(_mm_castps_si128(_mm_cmple_ps(n, _mm_setzero_ps())), + _mm_set1_epi32(0x82000000u)); + const __m128 s1 = _mm_castsi128_ps(_mm_add_epi32(g, _mm_set1_epi32(0x7f000000u))); + const __m128 s2 = _mm_castsi128_ps(_mm_sub_epi32(e, g)); + const __m128i d = + _mm_castps_si128(_mm_cmpgt_ps(_mm_andnot_ps(_mm_set1_ps(-0.f), n), _mm_set1_ps(192))); + return _mm_or_ps( + _mm_and_ps(_mm_castsi128_ps(d), _mm_mul_ps(s1, s1)), + _mm_andnot_ps(_mm_castsi128_ps(d), + _mm_or_ps(_mm_and_ps(_mm_castsi128_ps(c), _mm_mul_ps(MADD128(s2, j, s2), s1)), + _mm_andnot_ps(_mm_castsi128_ps(c), MADD128(k, j, k))))); +} + +// computes silu x/(1+exp(-x)) in single precision vector +inline static __m128 ggml_v_silu(__m128 x) { + const __m128 one = _mm_set1_ps(1); + const __m128 zero = _mm_setzero_ps(); + const __m128 neg_x = _mm_sub_ps(zero, x); + const __m128 exp_neg_x = ggml_v_expf(neg_x); + const __m128 one_plus_exp_neg_x = _mm_add_ps(one, exp_neg_x); + return _mm_div_ps(x, one_plus_exp_neg_x); +} + +#endif // __ARM_NEON / __AVX2__ / __SSE2__ + +static void ggml_vec_silu_f32(const int n, float * y, const float * x) { + int i = 0; +#if defined(__AVX512F__) && defined(__AVX512DQ__) + for (; i + 15 < n; i += 16) { + _mm512_storeu_ps(y + i, ggml_v_silu(_mm512_loadu_ps(x + i))); + } +#elif defined(__AVX2__) && defined(__FMA__) + for (; i + 7 < n; i += 8) { + _mm256_storeu_ps(y + i, ggml_v_silu(_mm256_loadu_ps(x + i))); + } +#elif defined(__SSE2__) + for (; i + 3 < n; i += 4) { + _mm_storeu_ps(y + i, ggml_v_silu(_mm_loadu_ps(x + i))); + } +#elif defined(__ARM_NEON) && defined(__aarch64__) + for (; i + 3 < n; i += 4) { + vst1q_f32(y + i, ggml_v_silu(vld1q_f32(x + i))); + } +#endif + for (; i < n; ++i) { + y[i] = ggml_silu_f32(x[i]); + } +} + +static ggml_float ggml_vec_soft_max_f32(const int n, float * y, const float * x, float max) { + int i = 0; + ggml_float sum = 0; +#if defined(__AVX512F__) && defined(__AVX512DQ__) + for (; i + 15 < n; i += 16) { + __m512 val = ggml_v_expf(_mm512_sub_ps(_mm512_loadu_ps(x + i), + _mm512_set1_ps(max))); + _mm512_storeu_ps(y + i, val); + sum += (ggml_float)_mm512_reduce_add_ps(val); + } +#elif defined(__AVX2__) && defined(__FMA__) + for (; i + 7 < n; i += 8) { + __m256 val = ggml_v_expf(_mm256_sub_ps(_mm256_loadu_ps(x + i), + _mm256_set1_ps(max))); + _mm256_storeu_ps(y + i, val); + __m128 val2 = _mm_add_ps(_mm256_extractf128_ps(val, 1), + _mm256_castps256_ps128(val)); + val2 = _mm_add_ps(val2, _mm_movehl_ps(val2, val2)); + val2 = _mm_add_ss(val2, _mm_movehdup_ps(val2)); + sum += (ggml_float)_mm_cvtss_f32(val2); + } +#elif defined(__SSE2__) + for (; i + 3 < n; i += 4) { + __m128 val = ggml_v_expf(_mm_sub_ps(_mm_loadu_ps(x + i), + _mm_set1_ps(max))); + _mm_storeu_ps(y + i, val); +#if defined(__AVX__) || defined(__AVX2__) || defined(__AVX512F__) + val = _mm_add_ps(val, _mm_movehl_ps(val, val)); + val = _mm_add_ss(val, _mm_movehdup_ps(val)); +#else + __m128 tmp = _mm_shuffle_ps(val, val, _MM_SHUFFLE(2, 3, 0, 1)); + val = _mm_add_ps(val, tmp); + tmp = _mm_movehl_ps(tmp, val); + val = _mm_add_ss(val, tmp); +#endif + sum += (ggml_float)_mm_cvtss_f32(val); + } +#elif defined(__ARM_NEON) && defined(__aarch64__) + for (; i + 3 < n; i += 4) { + float32x4_t val = ggml_v_expf(vsubq_f32(vld1q_f32(x + i), + vdupq_n_f32(max))); + vst1q_f32(y + i, val); + sum += (ggml_float)vaddvq_f32(val); + } +#endif + for (; i < n; ++i) { + float val = expf(x[i] - max); + sum += (ggml_float)val; + y[i] = val; + } + return sum; +} + +static ggml_float ggml_vec_log_soft_max_f32(const int n, float * y, const float * x, float max) { + // log(soft_max) = log(soft_max_i / soft_max_sum) = log(soft_max_i) - log(soft_max_sum) = (logit_i - max) - log(soft_max_i) + + int i = 0; + ggml_float sum = 0; + for (; i < n; ++i) { + float val = x[i] - max; + y[i] = val; + sum += (ggml_float)expf(val); + } + return sum = (ggml_float)logf(sum); +} + +inline static float ggml_silu_backward_f32(float x, float dy) { + const float s = 1.0f/(1.0f + expf(-x)); + return dy*s*(1.0f + x*(1.0f - s)); +} + +inline static void ggml_vec_silu_backward_f32(const int n, float * dx, const float * x, const float * dy) { + for (int i = 0; i < n; ++i) { + dx[i] = ggml_silu_backward_f32(x[i], dy[i]); + } +} + +inline static void ggml_vec_sum_f32(const int n, float * s, const float * x) { +#ifndef GGML_USE_ACCELERATE + ggml_float sum = 0.0; + for (int i = 0; i < n; ++i) { + sum += (ggml_float)x[i]; + } + *s = sum; +#else + vDSP_sve(x, 1, s, n); +#endif +} + +inline static void ggml_vec_sum_f32_ggf(const int n, ggml_float * s, const float * x) { + ggml_float sum = 0.0; + for (int i = 0; i < n; ++i) { + sum += (ggml_float)x[i]; + } + *s = sum; +} + +inline static void ggml_vec_sum_f16_ggf(const int n, float * s, const ggml_fp16_t * x) { + float sum = 0.0f; + for (int i = 0; i < n; ++i) { + sum += GGML_FP16_TO_FP32(x[i]); + } + *s = sum; +} + +inline static void ggml_vec_sum_bf16_ggf(const int n, float * s, const ggml_bf16_t * x) { + float sum = 0.0f; + for (int i = 0; i < n; ++i) { + sum += GGML_BF16_TO_FP32(x[i]); + } + *s = sum; +} + +inline static void ggml_vec_max_f32(const int n, float * s, const float * x) { +#ifndef GGML_USE_ACCELERATE + float max = -INFINITY; + for (int i = 0; i < n; ++i) { + max = MAX(max, x[i]); + } + *s = max; +#else + vDSP_maxv(x, 1, s, n); +#endif +} + +inline static void ggml_vec_norm_inv_f32(const int n, float * s, const float * x) { + ggml_vec_norm_f32(n, s, x); + *s = 1.f/(*s); +} + +inline static void ggml_vec_argmax_f32(const int n, int * s, const float * x) { + float max = -INFINITY; + int idx = 0; + for (int i = 0; i < n; ++i) { + max = MAX(max, x[i]); + if (max == x[i]) { idx = i; } + } + *s = idx; +} + +// +// data types +// + +static const char * GGML_OP_NAME[GGML_OP_COUNT] = { + "NONE", + + "DUP", + "ADD", + "ADD1", + "ACC", + "SUB", + "MUL", + "DIV", + "SQR", + "SQRT", + "LOG", + "SIN", + "COS", + "SUM", + "SUM_ROWS", + "MEAN", + "ARGMAX", + "REPEAT", + "REPEAT_BACK", + "CONCAT", + "SILU_BACK", + "NORM", + "RMS_NORM", + "RMS_NORM_BACK", + "GROUP_NORM", + + "MUL_MAT", + "MUL_MAT_ID", + "OUT_PROD", + + "SCALE", + "SET", + "CPY", + "CONT", + "RESHAPE", + "VIEW", + "PERMUTE", + "TRANSPOSE", + "GET_ROWS", + "GET_ROWS_BACK", + "DIAG", + "DIAG_MASK_INF", + "DIAG_MASK_ZERO", + "SOFT_MAX", + "SOFT_MAX_BACK", + "ROPE", + "ROPE_BACK", + "CLAMP", + "CONV_TRANSPOSE_1D", + "IM2COL", + "IM2COL_BACK", + "CONV_TRANSPOSE_2D", + "POOL_1D", + "POOL_2D", + "POOL_2D_BACK", + "UPSCALE", + "PAD", + "UNPAD", + "ARANGE", + "TIMESTEP_EMBEDDING", + "ARGSORT", + "LEAKY_RELU", + + "FLASH_ATTN_EXT", + "FLASH_ATTN_BACK", + "SSM_CONV", + "SSM_SCAN", + "WIN_PART", + "WIN_UNPART", + "GET_REL_POS", + "ADD_REL_POS", + "RWKV_WKV", + + "UNARY", + + "MAP_UNARY", + "MAP_BINARY", + + "MAP_CUSTOM1_F32", + "MAP_CUSTOM2_F32", + "MAP_CUSTOM3_F32", + + "MAP_CUSTOM1", + "MAP_CUSTOM2", + "MAP_CUSTOM3", + + "CROSS_ENTROPY_LOSS", + "CROSS_ENTROPY_LOSS_BACK", + "OPT_STEP_ADAMW", +}; + +static_assert(GGML_OP_COUNT == 81, "GGML_OP_COUNT != 81"); + +static const char * GGML_OP_SYMBOL[GGML_OP_COUNT] = { + "none", + + "x", + "x+y", + "x+y", + "view(x,nb,offset)+=y->x", + "x-y", + "x*y", + "x/y", + "x^2", + "√x", + "log(x)", + "sin(x)", + "cos(x)", + "Σx", + "Σx_k", + "Σx/n", + "argmax(x)", + "repeat(x)", + "repeat_back(x)", + "concat(x, y)", + "silu_back(x)", + "norm(x)", + "rms_norm(x)", + "rms_norm_back(x)", + "group_norm(x)", + + "X*Y", + "X[i]*Y", + "X*Y", + + "x*v", + "y-\\>view(x)", + "x-\\>y", + "cont(x)", + "reshape(x)", + "view(x)", + "permute(x)", + "transpose(x)", + "get_rows(x)", + "get_rows_back(x)", + "diag(x)", + "diag_mask_inf(x)", + "diag_mask_zero(x)", + "soft_max(x)", + "soft_max_back(x)", + "rope(x)", + "rope_back(x)", + "clamp(x)", + "conv_transpose_1d(x)", + "im2col(x)", + "im2col_back(x)", + "conv_transpose_2d(x)", + "pool_1d(x)", + "pool_2d(x)", + "pool_2d_back(x)", + "upscale(x)", + "pad(x)", + "unpad(x)", + "arange(start, stop, step)", + "timestep_embedding(timesteps, dim, max_period)", + "argsort(x)", + "leaky_relu(x)", + + "flash_attn_ext(x)", + "flash_attn_back(x)", + "ssm_conv(x)", + "ssm_scan(x)", + "win_part(x)", + "win_unpart(x)", + "get_rel_pos(x)", + "add_rel_pos(x)", + "rwkv_wkv(k, v, r, tf, td, s)", + + "unary(x)", + + "f(x)", + "f(x,y)", + + "custom_f32(x)", + "custom_f32(x,y)", + "custom_f32(x,y,z)", + + "custom(x)", + "custom(x,y)", + "custom(x,y,z)", + + "cross_entropy_loss(x,y)", + "cross_entropy_loss_back(x,y)", + "adamw(x)", +}; + +static_assert(GGML_OP_COUNT == 81, "GGML_OP_COUNT != 81"); + +static_assert(GGML_OP_POOL_COUNT == 2, "GGML_OP_POOL_COUNT != 2"); + + +static const char * GGML_UNARY_OP_NAME[GGML_UNARY_OP_COUNT] = { + "ABS", + "SGN", + "NEG", + "STEP", + "TANH", + "ELU", + "RELU", + "SIGMOID", + "GELU", + "GELU_QUICK", + "SILU", + "HARDSWISH", + "HARDSIGMOID", + "EXP", +}; + +static_assert(GGML_UNARY_OP_COUNT == 14, "GGML_UNARY_OP_COUNT != 14"); + + +static_assert(sizeof(struct ggml_object)%GGML_MEM_ALIGN == 0, "ggml_object size must be a multiple of GGML_MEM_ALIGN"); +static_assert(sizeof(struct ggml_tensor)%GGML_MEM_ALIGN == 0, "ggml_tensor size must be a multiple of GGML_MEM_ALIGN"); + +// Helpers for polling loops +#if defined(__aarch64__) && ( defined(__clang__) || defined(__GNUC__) ) +static inline void ggml_thread_cpu_relax(void) { + __asm__ volatile("yield" ::: "memory"); +} +#elif defined(__x86_64__) +static inline void ggml_thread_cpu_relax(void) { + _mm_pause(); +} +#else +static inline void ggml_thread_cpu_relax(void) {;} +#endif + +// +// NUMA support +// + +#define GGML_NUMA_MAX_NODES 8 +#define GGML_NUMA_MAX_CPUS 512 + +struct ggml_numa_node { + uint32_t cpus[GGML_NUMA_MAX_CPUS]; // hardware threads on this node + uint32_t n_cpus; +}; + +struct ggml_numa_nodes { + enum ggml_numa_strategy numa_strategy; + struct ggml_numa_node nodes[GGML_NUMA_MAX_NODES]; + uint32_t n_nodes; + uint32_t total_cpus; // hardware threads on system + uint32_t current_node; // node on which main process is execting +#if defined(__gnu_linux__) + cpu_set_t cpuset; // cpuset from numactl +#else + uint32_t cpuset; // no NUMA support outside of Linux at this time. Use a portable datatype +#endif +}; + +// +// ggml state +// + +struct ggml_state { + struct ggml_context_container contexts[GGML_MAX_CONTEXTS]; + struct ggml_numa_nodes numa; +}; + +// global state +static struct ggml_state g_state; +static atomic_flag g_state_critical = ATOMIC_FLAG_INIT; + +// critical section via spin lock +inline static void ggml_critical_section_start(void) { + while (atomic_flag_test_and_set(&g_state_critical)) { + // spin + sched_yield(); + } +} + +static void ggml_barrier(struct ggml_threadpool * tp) { + int n_threads = atomic_load_explicit(&tp->n_threads_cur, memory_order_relaxed); + if (n_threads == 1) { + return; + } + +#ifdef GGML_USE_OPENMP + #pragma omp barrier +#else + int n_passed = atomic_load_explicit(&tp->n_barrier_passed, memory_order_relaxed); + + // enter barrier (full seq-cst fence) + int n_barrier = atomic_fetch_add_explicit(&tp->n_barrier, 1, memory_order_seq_cst); + + if (n_barrier == (n_threads - 1)) { + // last thread + atomic_store_explicit(&tp->n_barrier, 0, memory_order_relaxed); + + // exit barrier (fill seq-cst fence) + atomic_fetch_add_explicit(&tp->n_barrier_passed, 1, memory_order_seq_cst); + return; + } + + // wait for other threads + while (atomic_load_explicit(&tp->n_barrier_passed, memory_order_relaxed) == n_passed) { + ggml_thread_cpu_relax(); + } + + // exit barrier (full seq-cst fence) + // TSAN doesn't support standalone fence yet, we use a dummy read-modify-write instead + #ifdef GGML_TSAN_ENABLED + atomic_fetch_add_explicit(&tp->n_barrier_passed, 0, memory_order_seq_cst); + #else + atomic_thread_fence(memory_order_seq_cst); + #endif +#endif +} + +// TODO: make this somehow automatically executed +// some sort of "sentry" mechanism +inline static void ggml_critical_section_end(void) { + atomic_flag_clear(&g_state_critical); +} + +#if defined(__gnu_linux__) +static cpu_set_t ggml_get_numa_affinity(void) { + cpu_set_t cpuset; + pthread_t thread; + thread = pthread_self(); + CPU_ZERO(&cpuset); + pthread_getaffinity_np(thread, sizeof(cpu_set_t), &cpuset); + return cpuset; +} +#else +static uint32_t ggml_get_numa_affinity(void) { + return 0; // no NUMA support +} +#endif + +void ggml_numa_init(enum ggml_numa_strategy numa_flag) { + if (g_state.numa.n_nodes > 0) { + fprintf(stderr, "ggml_numa_init: NUMA already initialized\n"); + + return; + } + +#if defined(__gnu_linux__) + struct stat st; + char path[256]; + int rv; + + // set numa scheme + g_state.numa.numa_strategy = numa_flag; + + GGML_PRINT_DEBUG("numa strategy %u\n",g_state.numa.numa_strategy); + + g_state.numa.cpuset = ggml_get_numa_affinity(); + + // enumerate nodes + while (g_state.numa.n_nodes < GGML_NUMA_MAX_NODES) { + rv = snprintf(path, sizeof(path), "/sys/devices/system/node/node%u", g_state.numa.n_nodes); + GGML_ASSERT(rv > 0 && (unsigned)rv < sizeof(path)); + if (stat(path, &st) != 0) { break; } + ++g_state.numa.n_nodes; + } + + // enumerate CPUs + while (g_state.numa.total_cpus < GGML_NUMA_MAX_CPUS) { + rv = snprintf(path, sizeof(path), "/sys/devices/system/cpu/cpu%u", g_state.numa.total_cpus); + GGML_ASSERT(rv > 0 && (unsigned)rv < sizeof(path)); + if (stat(path, &st) != 0) { break; } + ++g_state.numa.total_cpus; + } + + GGML_PRINT_DEBUG("found %u numa nodes, %u CPUs\n", g_state.numa.n_nodes, g_state.numa.total_cpus); + + // figure out which node we're on + uint current_cpu; + int getcpu_ret = 0; +#if __GLIBC__ > 2 || (__GLIBC__ == 2 && __GLIBC_MINOR__ > 28) || defined(__COSMOPOLITAN__) + getcpu_ret = getcpu(¤t_cpu, &g_state.numa.current_node); +#else + // old glibc doesn't have a wrapper for this call. Fall back on direct syscall +# if !defined(SYS_getcpu) && defined(SYS_get_cpu) +# define SYS_getcpu SYS_get_cpu // some older glibc versions use this name +# endif + getcpu_ret = syscall(SYS_getcpu, ¤t_cpu, &g_state.numa.current_node); +#endif + + if (g_state.numa.n_nodes < 1 || g_state.numa.total_cpus < 1 || getcpu_ret != 0) { + g_state.numa.n_nodes = 0; + return; + } + + GGML_PRINT_DEBUG("found our process on numa node %u, CPU %u\n", g_state.numa.current_node, current_cpu); + + for (uint32_t n = 0; n < g_state.numa.n_nodes; ++n) { + struct ggml_numa_node * node = &g_state.numa.nodes[n]; + GGML_PRINT_DEBUG("CPUs on node %u:", n); + node->n_cpus = 0; + for (uint32_t c = 0; c < g_state.numa.total_cpus; ++c) { + rv = snprintf(path, sizeof(path), "/sys/devices/system/node/node%u/cpu%u", n, c); + GGML_ASSERT(rv > 0 && (unsigned)rv < sizeof(path)); + if (stat(path, &st) == 0) { + node->cpus[node->n_cpus++] = c; + GGML_PRINT_DEBUG(" %u", c); + } + } + GGML_PRINT_DEBUG("\n"); + } + + if (ggml_is_numa()) { + FILE *fptr = fopen("/proc/sys/kernel/numa_balancing", "r"); + if (fptr != NULL) { + char buf[42]; + if (fgets(buf, sizeof(buf), fptr) && strncmp(buf, "0\n", sizeof(buf)) != 0) { + GGML_PRINT("WARNING: /proc/sys/kernel/numa_balancing is enabled, this has been observed to impair performance\n"); + } + fclose(fptr); + } + } +#else + UNUSED(numa_flag); + // TODO +#endif +} + +bool ggml_is_numa(void) { + return g_state.numa.n_nodes > 1; +} + +//////////////////////////////////////////////////////////////////////////////// + +void ggml_print_object(const struct ggml_object * obj) { + GGML_PRINT(" - ggml_object: type = %d, offset = %zu, size = %zu, next = %p\n", + obj->type, obj->offs, obj->size, (const void *) obj->next); +} + +void ggml_print_objects(const struct ggml_context * ctx) { + struct ggml_object * obj = ctx->objects_begin; + + GGML_PRINT("%s: objects in context %p:\n", __func__, (const void *) ctx); + + while (obj != NULL) { + ggml_print_object(obj); + obj = obj->next; + } + + GGML_PRINT("%s: --- end ---\n", __func__); +} + +GGML_CALL int64_t ggml_nelements(const struct ggml_tensor * tensor) { + static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); + + return tensor->ne[0]*tensor->ne[1]*tensor->ne[2]*tensor->ne[3]; +} + +GGML_CALL int64_t ggml_nrows(const struct ggml_tensor * tensor) { + static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); + + return tensor->ne[1]*tensor->ne[2]*tensor->ne[3]; +} + +GGML_CALL size_t ggml_nbytes(const struct ggml_tensor * tensor) { + size_t nbytes; + size_t blck_size = ggml_blck_size(tensor->type); + if (blck_size == 1) { + nbytes = ggml_type_size(tensor->type); + for (int i = 0; i < GGML_MAX_DIMS; ++i) { + nbytes += (tensor->ne[i] - 1)*tensor->nb[i]; + } + } + else { + nbytes = tensor->ne[0]*tensor->nb[0]/blck_size; + for (int i = 1; i < GGML_MAX_DIMS; ++i) { + nbytes += (tensor->ne[i] - 1)*tensor->nb[i]; + } + } + + return nbytes; +} + +size_t ggml_nbytes_pad(const struct ggml_tensor * tensor) { + return GGML_PAD(ggml_nbytes(tensor), GGML_MEM_ALIGN); +} + +GGML_CALL int64_t ggml_blck_size(enum ggml_type type) { + return type_traits[type].blck_size; +} + +GGML_CALL size_t ggml_type_size(enum ggml_type type) { + return type_traits[type].type_size; +} + +GGML_CALL size_t ggml_row_size(enum ggml_type type, int64_t ne) { + assert(ne % ggml_blck_size(type) == 0); + return ggml_type_size(type)*ne/ggml_blck_size(type); +} + +double ggml_type_sizef(enum ggml_type type) { + return ((double)(type_traits[type].type_size))/type_traits[type].blck_size; +} + +GGML_CALL const char * ggml_type_name(enum ggml_type type) { + return type < GGML_TYPE_COUNT ? type_traits[type].type_name : "NONE"; +} + +GGML_CALL bool ggml_is_quantized(enum ggml_type type) { + return type_traits[type].is_quantized; +} + +GGML_CALL const char * ggml_op_name(enum ggml_op op) { + return GGML_OP_NAME[op]; +} + +const char * ggml_op_symbol(enum ggml_op op) { + return GGML_OP_SYMBOL[op]; +} + +const char * ggml_unary_op_name(enum ggml_unary_op op) { + return GGML_UNARY_OP_NAME[op]; +} + +GGML_CALL const char * ggml_op_desc(const struct ggml_tensor * t) { + if (t->op == GGML_OP_UNARY) { + enum ggml_unary_op uop = ggml_get_unary_op(t); + return ggml_unary_op_name(uop); + } + return ggml_op_name(t->op); +} + +GGML_CALL size_t ggml_element_size(const struct ggml_tensor * tensor) { + return ggml_type_size(tensor->type); +} + +bool ggml_is_scalar(const struct ggml_tensor * tensor) { + static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); + + return tensor->ne[0] == 1 && tensor->ne[1] == 1 && tensor->ne[2] == 1 && tensor->ne[3] == 1; +} + +bool ggml_is_vector(const struct ggml_tensor * tensor) { + static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); + + return tensor->ne[1] == 1 && tensor->ne[2] == 1 && tensor->ne[3] == 1; +} + +bool ggml_is_matrix(const struct ggml_tensor * tensor) { + static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); + + return tensor->ne[2] == 1 && tensor->ne[3] == 1; +} + +bool ggml_is_3d(const struct ggml_tensor * tensor) { + return tensor->ne[3] == 1; +} + +int ggml_n_dims(const struct ggml_tensor * tensor) { + for (int i = GGML_MAX_DIMS - 1; i >= 1; --i) { + if (tensor->ne[i] > 1) { + return i + 1; + } + } + return 1; +} + +static inline bool ggml_can_mul_mat(const struct ggml_tensor * t0, const struct ggml_tensor * t1) { + static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); + + return (t0->ne[0] == t1->ne[0]) && + (t1->ne[2]%t0->ne[2] == 0) && // verify t0 is broadcastable + (t1->ne[3]%t0->ne[3] == 0); +} + +static inline bool ggml_can_out_prod(const struct ggml_tensor * t0, const struct ggml_tensor * t1) { + static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); + + return (t0->ne[1] == t1->ne[1]) && + (t1->ne[2]%t0->ne[2] == 0) && // verify t0 is broadcastable + (t1->ne[3]%t0->ne[3] == 0); +} + +enum ggml_type ggml_ftype_to_ggml_type(enum ggml_ftype ftype) { + enum ggml_type wtype = GGML_TYPE_COUNT; + + switch (ftype) { + case GGML_FTYPE_ALL_F32: wtype = GGML_TYPE_F32; break; + case GGML_FTYPE_MOSTLY_F16: wtype = GGML_TYPE_F16; break; + case GGML_FTYPE_MOSTLY_BF16: wtype = GGML_TYPE_BF16; break; + case GGML_FTYPE_MOSTLY_Q4_0: wtype = GGML_TYPE_Q4_0; break; + case GGML_FTYPE_MOSTLY_Q4_1: wtype = GGML_TYPE_Q4_1; break; + case GGML_FTYPE_MOSTLY_Q5_0: wtype = GGML_TYPE_Q5_0; break; + case GGML_FTYPE_MOSTLY_Q5_1: wtype = GGML_TYPE_Q5_1; break; + case GGML_FTYPE_MOSTLY_Q8_0: wtype = GGML_TYPE_Q8_0; break; + case GGML_FTYPE_MOSTLY_Q2_K: wtype = GGML_TYPE_Q2_K; break; + case GGML_FTYPE_MOSTLY_Q3_K: wtype = GGML_TYPE_Q3_K; break; + case GGML_FTYPE_MOSTLY_Q4_K: wtype = GGML_TYPE_Q4_K; break; + case GGML_FTYPE_MOSTLY_Q5_K: wtype = GGML_TYPE_Q5_K; break; + case GGML_FTYPE_MOSTLY_Q6_K: wtype = GGML_TYPE_Q6_K; break; + case GGML_FTYPE_MOSTLY_IQ2_XXS: wtype = GGML_TYPE_IQ2_XXS; break; + case GGML_FTYPE_MOSTLY_IQ2_XS: wtype = GGML_TYPE_IQ2_XS; break; + case GGML_FTYPE_MOSTLY_IQ3_XXS: wtype = GGML_TYPE_IQ3_XXS; break; + case GGML_FTYPE_MOSTLY_IQ1_S: wtype = GGML_TYPE_IQ1_S; break; + case GGML_FTYPE_MOSTLY_IQ1_M: wtype = GGML_TYPE_IQ1_M; break; + case GGML_FTYPE_MOSTLY_IQ4_NL: wtype = GGML_TYPE_IQ4_NL; break; + case GGML_FTYPE_MOSTLY_IQ4_XS: wtype = GGML_TYPE_IQ4_XS; break; + case GGML_FTYPE_MOSTLY_IQ3_S: wtype = GGML_TYPE_IQ3_S; break; + case GGML_FTYPE_MOSTLY_IQ2_S: wtype = GGML_TYPE_IQ2_S; break; + case GGML_FTYPE_MOSTLY_Q4_0_4_4: wtype = GGML_TYPE_Q4_0_4_4; break; + case GGML_FTYPE_MOSTLY_Q4_0_4_8: wtype = GGML_TYPE_Q4_0_4_8; break; + case GGML_FTYPE_MOSTLY_Q4_0_8_8: wtype = GGML_TYPE_Q4_0_8_8; break; + case GGML_FTYPE_UNKNOWN: wtype = GGML_TYPE_COUNT; break; + case GGML_FTYPE_MOSTLY_Q4_1_SOME_F16: wtype = GGML_TYPE_COUNT; break; + } + + GGML_ASSERT(wtype != GGML_TYPE_COUNT); + + return wtype; +} + +size_t ggml_tensor_overhead(void) { + return GGML_OBJECT_SIZE + GGML_TENSOR_SIZE; +} + +GGML_CALL bool ggml_is_transposed(const struct ggml_tensor * tensor) { + return tensor->nb[0] > tensor->nb[1]; +} + +static bool ggml_is_contiguous_n(const struct ggml_tensor * tensor, int n) { + size_t next_nb = ggml_type_size(tensor->type); + if (tensor->ne[0] != ggml_blck_size(tensor->type) && tensor->nb[0] != next_nb) { + return false; + } + next_nb *= tensor->ne[0]/ggml_blck_size(tensor->type); + for (int i = 1; i < GGML_MAX_DIMS; i++) { + if (tensor->ne[i] != 1) { + if (i > n) { + if (tensor->nb[i] != next_nb) { + return false; + } + next_nb *= tensor->ne[i]; + } else { + // this dimension does not need to be contiguous + next_nb = tensor->ne[i]*tensor->nb[i]; + } + } + } + return true; +} + +GGML_CALL bool ggml_is_contiguous(const struct ggml_tensor * tensor) { + return ggml_is_contiguous_0(tensor); +} + +GGML_CALL bool ggml_is_contiguous_0(const struct ggml_tensor * tensor) { + return ggml_is_contiguous_n(tensor, 0); +} + +GGML_CALL bool ggml_is_contiguous_1(const struct ggml_tensor * tensor) { + return ggml_is_contiguous_n(tensor, 1); +} + +GGML_CALL bool ggml_is_contiguous_2(const struct ggml_tensor * tensor) { + return ggml_is_contiguous_n(tensor, 2); +} + +GGML_CALL bool ggml_is_permuted(const struct ggml_tensor * tensor) { + static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); + + return tensor->nb[0] > tensor->nb[1] || tensor->nb[1] > tensor->nb[2] || tensor->nb[2] > tensor->nb[3]; +} + +static inline bool ggml_is_padded_1d(const struct ggml_tensor * tensor) { + static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); + + return + tensor->nb[0] == ggml_type_size(tensor->type) && + tensor->nb[2] == tensor->nb[1]*tensor->ne[1] && + tensor->nb[3] == tensor->nb[2]*tensor->ne[2]; +} + +GGML_CALL bool ggml_is_empty(const struct ggml_tensor * tensor) { + for (int i = 0; i < GGML_MAX_DIMS; ++i) { + if (tensor->ne[i] == 0) { + // empty if any dimension has no elements + return true; + } + } + return false; +} + +bool ggml_are_same_shape(const struct ggml_tensor * t0, const struct ggml_tensor * t1) { + static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); + + return + (t0->ne[0] == t1->ne[0]) && + (t0->ne[1] == t1->ne[1]) && + (t0->ne[2] == t1->ne[2]) && + (t0->ne[3] == t1->ne[3]); +} + +bool ggml_are_same_stride(const struct ggml_tensor * t0, const struct ggml_tensor * t1) { + static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); + + return + (t0->nb[0] == t1->nb[0]) && + (t0->nb[1] == t1->nb[1]) && + (t0->nb[2] == t1->nb[2]) && + (t0->nb[3] == t1->nb[3]); +} + +// check if t1 can be represented as a repeatition of t0 +bool ggml_can_repeat(const struct ggml_tensor * t0, const struct ggml_tensor * t1) { + static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); + + return ggml_is_empty(t0) ? ggml_is_empty(t1) : + (t1->ne[0]%t0->ne[0] == 0) && + (t1->ne[1]%t0->ne[1] == 0) && + (t1->ne[2]%t0->ne[2] == 0) && + (t1->ne[3]%t0->ne[3] == 0); +} + +static inline bool ggml_can_repeat_rows(const struct ggml_tensor * t0, const struct ggml_tensor * t1) { + static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); + + return (t0->ne[0] == t1->ne[0]) && ggml_can_repeat(t0, t1); +} + +static inline int ggml_up32(int n) { + return (n + 31) & ~31; +} + +//static inline int ggml_up64(int n) { +// return (n + 63) & ~63; +//} + +static inline int ggml_up(int n, int m) { + // assert m is a power of 2 + GGML_ASSERT((m & (m - 1)) == 0); + return (n + m - 1) & ~(m - 1); +} + +// assert that pointer is aligned to GGML_MEM_ALIGN +#define GGML_ASSERT_ALIGNED(ptr) \ + GGML_ASSERT(((uintptr_t) (ptr))%GGML_MEM_ALIGN == 0) + +//////////////////////////////////////////////////////////////////////////////// + +#if defined(__ARM_ARCH) + +#if defined(__linux__) && defined(__aarch64__) +#include +#elif defined(__APPLE__) +#include +#endif + +#if !defined(HWCAP2_I8MM) +#define HWCAP2_I8MM 0 +#endif + +static void ggml_init_arm_arch_features(void) { +#if defined(__linux__) && defined(__aarch64__) + uint32_t hwcap = getauxval(AT_HWCAP); + uint32_t hwcap2 = getauxval(AT_HWCAP2); + + ggml_arm_arch_features.has_neon = !!(hwcap & HWCAP_ASIMD); + ggml_arm_arch_features.has_i8mm = !!(hwcap2 & HWCAP2_I8MM); + ggml_arm_arch_features.has_sve = !!(hwcap & HWCAP_SVE); + +#if defined(__ARM_FEATURE_SVE) + ggml_arm_arch_features.sve_cnt = PR_SVE_VL_LEN_MASK & prctl(PR_SVE_GET_VL); +#endif +#elif defined(__APPLE__) + int oldp = 0; + size_t size = sizeof(oldp); + if (sysctlbyname("hw.optional.AdvSIMD", &oldp, &size, NULL, 0) != 0) { + oldp = 0; + } + ggml_arm_arch_features.has_neon = oldp; + + if (sysctlbyname("hw.optional.arm.FEAT_I8MM", &oldp, &size, NULL, 0) != 0) { + oldp = 0; + } + ggml_arm_arch_features.has_i8mm = oldp; + + ggml_arm_arch_features.has_sve = 0; + ggml_arm_arch_features.sve_cnt = 0; +#else +// Run-time CPU feature detection not implemented for this platform, fallback to compile time +#if defined(__ARM_NEON) + ggml_arm_arch_features.has_neon = 1; +#else + ggml_arm_arch_features.has_neon = 0; +#endif + +#if defined(__ARM_FEATURE_MATMUL_INT8) + ggml_arm_arch_features.has_i8mm = 1; +#else + ggml_arm_arch_features.has_i8mm = 0; +#endif + +#if defined(__ARM_FEATURE_SVE) + ggml_arm_arch_features.has_sve = 1; + ggml_arm_arch_features.sve_cnt = 16; +#else + ggml_arm_arch_features.has_sve = 0; + ggml_arm_arch_features.sve_cnt = 0; +#endif +#endif +} +#endif + +struct ggml_context * ggml_init(struct ggml_init_params params) { + // make this function thread safe + ggml_critical_section_start(); + + static bool is_first_call = true; + + if (is_first_call) { + // initialize time system (required on Windows) + ggml_time_init(); + + // initialize GELU, Quick GELU, SILU and EXP F32 tables + { + const uint64_t t_start = ggml_time_us(); UNUSED(t_start); + + for (int i = 0; i < (1 << 16); ++i) { + union { + uint16_t u16; + ggml_fp16_t fp16; + } u = {i}; + float f = ggml_table_f32_f16[i] = GGML_COMPUTE_FP16_TO_FP32(u.fp16); + ggml_table_gelu_f16[i] = GGML_FP32_TO_FP16(ggml_gelu_f32(f)); + ggml_table_gelu_quick_f16[i] = GGML_FP32_TO_FP16(ggml_gelu_quick_f32(f)); + } + + const uint64_t t_end = ggml_time_us(); UNUSED(t_end); + + GGML_PRINT_DEBUG("%s: GELU, Quick GELU, SILU and EXP tables initialized in %f ms\n", __func__, (t_end - t_start)/1000.0f); + } + + // initialize g_state + { + const uint64_t t_start = ggml_time_us(); UNUSED(t_start); + + g_state = (struct ggml_state) { + /*.contexts =*/ { { 0 } }, + /*.numa =*/ { + .n_nodes = 0, + .total_cpus = 0, + }, + }; + + for (int i = 0; i < GGML_MAX_CONTEXTS; ++i) { + g_state.contexts[i].used = false; + } + + const uint64_t t_end = ggml_time_us(); UNUSED(t_end); + + GGML_PRINT_DEBUG("%s: g_state initialized in %f ms\n", __func__, (t_end - t_start)/1000.0f); + } + +#if defined(__ARM_ARCH) + ggml_init_arm_arch_features(); +#endif + + is_first_call = false; + } + + // find non-used context in g_state + struct ggml_context * ctx = NULL; + + for (int i = 0; i < GGML_MAX_CONTEXTS; i++) { + if (!g_state.contexts[i].used) { + g_state.contexts[i].used = true; + ctx = &g_state.contexts[i].context; + + GGML_PRINT_DEBUG("%s: found unused context %d\n", __func__, i); + break; + } + } + + if (ctx == NULL) { + GGML_PRINT_DEBUG("%s: no unused context found\n", __func__); + + ggml_critical_section_end(); + + return NULL; + } + + // allow to call ggml_init with 0 size + if (params.mem_size == 0) { + params.mem_size = GGML_MEM_ALIGN; + } + + const size_t mem_size = params.mem_buffer ? params.mem_size : GGML_PAD(params.mem_size, GGML_MEM_ALIGN); + + *ctx = (struct ggml_context) { + /*.mem_size =*/ mem_size, + /*.mem_buffer =*/ params.mem_buffer ? params.mem_buffer : GGML_ALIGNED_MALLOC(mem_size), + /*.mem_buffer_owned =*/ params.mem_buffer ? false : true, + /*.no_alloc =*/ params.no_alloc, + /*.no_alloc_save =*/ params.no_alloc, + /*.n_objects =*/ 0, + /*.objects_begin =*/ NULL, + /*.objects_end =*/ NULL, + /*.scratch =*/ { 0, 0, NULL, }, + /*.scratch_save =*/ { 0, 0, NULL, }, + }; + + GGML_ASSERT(ctx->mem_buffer != NULL); + + GGML_ASSERT_ALIGNED(ctx->mem_buffer); + + GGML_PRINT_DEBUG("%s: context initialized\n", __func__); + + ggml_critical_section_end(); + + return ctx; +} + +void ggml_free(struct ggml_context * ctx) { + if (ctx == NULL) { + return; + } + + // make this function thread safe + ggml_critical_section_start(); + + bool found = false; + + for (int i = 0; i < GGML_MAX_CONTEXTS; i++) { + if (&g_state.contexts[i].context == ctx) { + g_state.contexts[i].used = false; + + GGML_PRINT_DEBUG("%s: context %d has been freed. memory used = %zu\n", + __func__, i, ggml_used_mem(ctx)); + + if (ctx->mem_buffer_owned) { + GGML_ALIGNED_FREE(ctx->mem_buffer); + } + + found = true; + break; + } + } + + if (!found) { + GGML_PRINT_DEBUG("%s: context not found\n", __func__); + } + + ggml_critical_section_end(); +} + +size_t ggml_used_mem(const struct ggml_context * ctx) { + return ctx->objects_end == NULL ? 0 : ctx->objects_end->offs + ctx->objects_end->size; +} + +size_t ggml_set_scratch(struct ggml_context * ctx, struct ggml_scratch scratch) { + const size_t result = ctx->scratch.data ? ctx->scratch.offs : 0; + + ctx->scratch = scratch; + + return result; +} + +bool ggml_get_no_alloc(struct ggml_context * ctx) { + return ctx->no_alloc; +} + +void ggml_set_no_alloc(struct ggml_context * ctx, bool no_alloc) { + ctx->no_alloc = no_alloc; +} + +void * ggml_get_mem_buffer(const struct ggml_context * ctx) { + return ctx->mem_buffer; +} + +size_t ggml_get_mem_size(const struct ggml_context * ctx) { + return ctx->mem_size; +} + +size_t ggml_get_max_tensor_size(const struct ggml_context * ctx) { + size_t max_size = 0; + + for (struct ggml_tensor * tensor = ggml_get_first_tensor(ctx); tensor != NULL; tensor = ggml_get_next_tensor(ctx, tensor)) { + size_t bytes = ggml_nbytes(tensor); + max_size = MAX(max_size, bytes); + } + + return max_size; +} + +// IMPORTANT: +// when creating "opt" tensors, always save and load the scratch buffer +// this is an error prone process, but it is necessary to support inplace +// operators when using scratch buffers +// TODO: implement a better way +static void ggml_scratch_save(struct ggml_context * ctx) { + // this is needed to allow opt tensors to store their data + // TODO: again, need to find a better way + ctx->no_alloc_save = ctx->no_alloc; + ctx->no_alloc = false; + + ctx->scratch_save = ctx->scratch; + ctx->scratch.data = NULL; +} + +static void ggml_scratch_load(struct ggml_context * ctx) { + ctx->no_alloc = ctx->no_alloc_save; + + ctx->scratch = ctx->scratch_save; +} + +//////////////////////////////////////////////////////////////////////////////// + +static struct ggml_object * ggml_new_object(struct ggml_context * ctx, enum ggml_object_type type, size_t size) { + // always insert objects at the end of the context's memory pool + struct ggml_object * obj_cur = ctx->objects_end; + + const size_t cur_offs = obj_cur == NULL ? 0 : obj_cur->offs; + const size_t cur_size = obj_cur == NULL ? 0 : obj_cur->size; + const size_t cur_end = cur_offs + cur_size; + + // align to GGML_MEM_ALIGN + size_t size_needed = GGML_PAD(size, GGML_MEM_ALIGN); + + char * const mem_buffer = ctx->mem_buffer; + struct ggml_object * const obj_new = (struct ggml_object *)(mem_buffer + cur_end); + + if (cur_end + size_needed + GGML_OBJECT_SIZE > ctx->mem_size) { + GGML_PRINT("%s: not enough space in the context's memory pool (needed %zu, available %zu)\n", + __func__, cur_end + size_needed + GGML_OBJECT_SIZE, ctx->mem_size); + assert(false); + return NULL; + } + + *obj_new = (struct ggml_object) { + .offs = cur_end + GGML_OBJECT_SIZE, + .size = size_needed, + .next = NULL, + .type = type, + }; + + GGML_ASSERT_ALIGNED(mem_buffer + obj_new->offs); + + if (obj_cur != NULL) { + obj_cur->next = obj_new; + } else { + // this is the first object in this context + ctx->objects_begin = obj_new; + } + + ctx->objects_end = obj_new; + + //printf("%s: inserted new object at %zu, size = %zu\n", __func__, cur_end, obj_new->size); + + return obj_new; +} + +static struct ggml_tensor * ggml_new_tensor_impl( + struct ggml_context * ctx, + enum ggml_type type, + int n_dims, + const int64_t * ne, + struct ggml_tensor * view_src, + size_t view_offs) { + + GGML_ASSERT(type >= 0 && type < GGML_TYPE_COUNT); + GGML_ASSERT(n_dims >= 1 && n_dims <= GGML_MAX_DIMS); + + // find the base tensor and absolute offset + if (view_src != NULL && view_src->view_src != NULL) { + view_offs += view_src->view_offs; + view_src = view_src->view_src; + } + + size_t data_size = ggml_row_size(type, ne[0]); + for (int i = 1; i < n_dims; i++) { + data_size *= ne[i]; + } + + GGML_ASSERT(view_src == NULL || data_size == 0 || data_size + view_offs <= ggml_nbytes(view_src)); + + void * data = view_src != NULL ? view_src->data : NULL; + if (data != NULL) { + data = (char *) data + view_offs; + } + + size_t obj_alloc_size = 0; + + if (view_src == NULL && !ctx->no_alloc) { + if (ctx->scratch.data != NULL) { + // allocate tensor data in the scratch buffer + if (ctx->scratch.offs + data_size > ctx->scratch.size) { + GGML_PRINT("%s: not enough space in the scratch memory pool (needed %zu, available %zu)\n", + __func__, ctx->scratch.offs + data_size, ctx->scratch.size); + assert(false); + return NULL; + } + + data = (char * const) ctx->scratch.data + ctx->scratch.offs; + + ctx->scratch.offs += data_size; + } else { + // allocate tensor data in the context's memory pool + obj_alloc_size = data_size; + } + } + + struct ggml_object * const obj_new = ggml_new_object(ctx, GGML_OBJECT_TYPE_TENSOR, GGML_TENSOR_SIZE + obj_alloc_size); + GGML_ASSERT(obj_new); + + // TODO: for recoverable errors, we would need to free the data allocated from the scratch buffer here + + struct ggml_tensor * const result = (struct ggml_tensor *)((char *)ctx->mem_buffer + obj_new->offs); + +#ifdef __clang__ + // temporary until ggml_tensor::backend is removed + #pragma clang diagnostic push + #pragma clang diagnostic ignored "-Wdeprecated-declarations" +#endif + + *result = (struct ggml_tensor) { + /*.type =*/ type, + /*.backend =*/ GGML_BACKEND_TYPE_CPU, + /*.buffer =*/ NULL, + /*.ne =*/ { 1, 1, 1, 1 }, + /*.nb =*/ { 0, 0, 0, 0 }, + /*.op =*/ GGML_OP_NONE, + /*.op_params =*/ { 0 }, + /*.flags =*/ 0, + /*.grad =*/ NULL, + /*.src =*/ { NULL }, + /*.view_src =*/ view_src, + /*.view_offs =*/ view_offs, + /*.data =*/ obj_alloc_size > 0 ? (void *)(result + 1) : data, + /*.name =*/ { 0 }, + /*.extra =*/ NULL, + ///*.padding =*/ { 0 }, + }; + +#ifdef __clang__ + #pragma clang diagnostic pop +#endif + + // TODO: this should not be needed as long as we don't rely on aligned SIMD loads + //GGML_ASSERT_ALIGNED(result->data); + + for (int i = 0; i < n_dims; i++) { + result->ne[i] = ne[i]; + } + + result->nb[0] = ggml_type_size(type); + result->nb[1] = result->nb[0]*(result->ne[0]/ggml_blck_size(type)); + for (int i = 2; i < GGML_MAX_DIMS; i++) { + result->nb[i] = result->nb[i - 1]*result->ne[i - 1]; + } + + ctx->n_objects++; + + return result; +} + +struct ggml_tensor * ggml_new_tensor( + struct ggml_context * ctx, + enum ggml_type type, + int n_dims, + const int64_t * ne) { + return ggml_new_tensor_impl(ctx, type, n_dims, ne, NULL, 0); +} + +struct ggml_tensor * ggml_new_tensor_1d( + struct ggml_context * ctx, + enum ggml_type type, + int64_t ne0) { + return ggml_new_tensor(ctx, type, 1, &ne0); +} + +struct ggml_tensor * ggml_new_tensor_2d( + struct ggml_context * ctx, + enum ggml_type type, + int64_t ne0, + int64_t ne1) { + const int64_t ne[2] = { ne0, ne1 }; + return ggml_new_tensor(ctx, type, 2, ne); +} + +struct ggml_tensor * ggml_new_tensor_3d( + struct ggml_context * ctx, + enum ggml_type type, + int64_t ne0, + int64_t ne1, + int64_t ne2) { + const int64_t ne[3] = { ne0, ne1, ne2 }; + return ggml_new_tensor(ctx, type, 3, ne); +} + +struct ggml_tensor * ggml_new_tensor_4d( + struct ggml_context * ctx, + enum ggml_type type, + int64_t ne0, + int64_t ne1, + int64_t ne2, + int64_t ne3) { + const int64_t ne[4] = { ne0, ne1, ne2, ne3 }; + return ggml_new_tensor(ctx, type, 4, ne); +} + +struct ggml_tensor * ggml_new_i32(struct ggml_context * ctx, int32_t value) { + ggml_scratch_save(ctx); + + struct ggml_tensor * result = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, 1); + + ggml_scratch_load(ctx); + + ggml_set_i32(result, value); + + return result; +} + +struct ggml_tensor * ggml_new_f32(struct ggml_context * ctx, float value) { + ggml_scratch_save(ctx); + + struct ggml_tensor * result = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, 1); + + ggml_scratch_load(ctx); + + ggml_set_f32(result, value); + + return result; +} + +struct ggml_tensor * ggml_dup_tensor(struct ggml_context * ctx, const struct ggml_tensor * src) { + return ggml_new_tensor(ctx, src->type, GGML_MAX_DIMS, src->ne); +} + +static void ggml_set_op_params(struct ggml_tensor * tensor, const void * params, size_t params_size) { + GGML_ASSERT(tensor != NULL); // silence -Warray-bounds warnings + assert(params_size <= GGML_MAX_OP_PARAMS); + memcpy(tensor->op_params, params, params_size); +} + +static int32_t ggml_get_op_params_i32(const struct ggml_tensor * tensor, uint32_t i) { + assert(i < GGML_MAX_OP_PARAMS / sizeof(int32_t)); + return ((const int32_t *)(tensor->op_params))[i]; +} + +static float ggml_get_op_params_f32(const struct ggml_tensor * tensor, uint32_t i) { + assert(i < GGML_MAX_OP_PARAMS / sizeof(float)); + return ((const float *)(tensor->op_params))[i]; +} + +static void ggml_set_op_params_i32(struct ggml_tensor * tensor, uint32_t i, int32_t value) { + assert(i < GGML_MAX_OP_PARAMS / sizeof(int32_t)); + ((int32_t *)(tensor->op_params))[i] = value; +} + +static void ggml_set_op_params_f32(struct ggml_tensor * tensor, uint32_t i, float value) { + assert(i < GGML_MAX_OP_PARAMS / sizeof(float)); + ((float *)(tensor->op_params))[i] = value; +} + +struct ggml_tensor * ggml_set_zero(struct ggml_tensor * tensor) { + if (tensor->buffer) { + ggml_backend_tensor_memset(tensor, 0, 0, ggml_nbytes(tensor)); + } else { + memset(tensor->data, 0, ggml_nbytes(tensor)); + } + return tensor; +} + +struct ggml_tensor * ggml_set_i32 (struct ggml_tensor * tensor, int32_t value) { + const int n = ggml_nrows(tensor); + const int nc = tensor->ne[0]; + const size_t n1 = tensor->nb[1]; + + char * const data = tensor->data; + + switch (tensor->type) { + case GGML_TYPE_I8: + { + assert(tensor->nb[0] == sizeof(int8_t)); + for (int i = 0; i < n; i++) { + ggml_vec_set_i8(nc, (int8_t *)(data + i*n1), value); + } + } break; + case GGML_TYPE_I16: + { + assert(tensor->nb[0] == sizeof(int16_t)); + for (int i = 0; i < n; i++) { + ggml_vec_set_i16(nc, (int16_t *)(data + i*n1), value); + } + } break; + case GGML_TYPE_I32: + { + assert(tensor->nb[0] == sizeof(int32_t)); + for (int i = 0; i < n; i++) { + ggml_vec_set_i32(nc, (int32_t *)(data + i*n1), value); + } + } break; + case GGML_TYPE_F16: + { + assert(tensor->nb[0] == sizeof(ggml_fp16_t)); + for (int i = 0; i < n; i++) { + ggml_vec_set_f16(nc, (ggml_fp16_t *)(data + i*n1), GGML_FP32_TO_FP16(value)); + } + } break; + case GGML_TYPE_BF16: + { + assert(tensor->nb[0] == sizeof(ggml_fp16_t)); + for (int i = 0; i < n; i++) { + ggml_vec_set_bf16(nc, (ggml_bf16_t *)(data + i*n1), GGML_FP32_TO_BF16(value)); + } + } break; + case GGML_TYPE_F32: + { + assert(tensor->nb[0] == sizeof(float)); + for (int i = 0; i < n; i++) { + ggml_vec_set_f32(nc, (float *)(data + i*n1), value); + } + } break; + default: + { + GGML_ABORT("fatal error"); + } + } + + return tensor; +} + +struct ggml_tensor * ggml_set_f32(struct ggml_tensor * tensor, float value) { + const int n = ggml_nrows(tensor); + const int nc = tensor->ne[0]; + const size_t n1 = tensor->nb[1]; + + char * const data = tensor->data; + + switch (tensor->type) { + case GGML_TYPE_I8: + { + assert(tensor->nb[0] == sizeof(int8_t)); + for (int i = 0; i < n; i++) { + ggml_vec_set_i8(nc, (int8_t *)(data + i*n1), value); + } + } break; + case GGML_TYPE_I16: + { + assert(tensor->nb[0] == sizeof(int16_t)); + for (int i = 0; i < n; i++) { + ggml_vec_set_i16(nc, (int16_t *)(data + i*n1), value); + } + } break; + case GGML_TYPE_I32: + { + assert(tensor->nb[0] == sizeof(int32_t)); + for (int i = 0; i < n; i++) { + ggml_vec_set_i32(nc, (int32_t *)(data + i*n1), value); + } + } break; + case GGML_TYPE_F16: + { + assert(tensor->nb[0] == sizeof(ggml_fp16_t)); + for (int i = 0; i < n; i++) { + ggml_vec_set_f16(nc, (ggml_fp16_t *)(data + i*n1), GGML_FP32_TO_FP16(value)); + } + } break; + case GGML_TYPE_BF16: + { + assert(tensor->nb[0] == sizeof(ggml_bf16_t)); + for (int i = 0; i < n; i++) { + ggml_vec_set_bf16(nc, (ggml_bf16_t *)(data + i*n1), GGML_FP32_TO_BF16(value)); + } + } break; + case GGML_TYPE_F32: + { + assert(tensor->nb[0] == sizeof(float)); + for (int i = 0; i < n; i++) { + ggml_vec_set_f32(nc, (float *)(data + i*n1), value); + } + } break; + default: + { + GGML_ABORT("fatal error"); + } + } + + return tensor; +} + +void ggml_unravel_index(const struct ggml_tensor * tensor, int64_t i, int64_t * i0, int64_t * i1, int64_t * i2, int64_t * i3) { + const int64_t ne2 = tensor->ne[2]; + const int64_t ne1 = tensor->ne[1]; + const int64_t ne0 = tensor->ne[0]; + + const int64_t i3_ = (i/(ne2*ne1*ne0)); + const int64_t i2_ = (i - i3_*ne2*ne1*ne0)/(ne1*ne0); + const int64_t i1_ = (i - i3_*ne2*ne1*ne0 - i2_*ne1*ne0)/ne0; + const int64_t i0_ = (i - i3_*ne2*ne1*ne0 - i2_*ne1*ne0 - i1_*ne0); + + if (i0) { + * i0 = i0_; + } + if (i1) { + * i1 = i1_; + } + if (i2) { + * i2 = i2_; + } + if (i3) { + * i3 = i3_; + } +} + +int32_t ggml_get_i32_1d(const struct ggml_tensor * tensor, int i) { + if (!ggml_is_contiguous(tensor)) { + int64_t id[4] = { 0, 0, 0, 0 }; + ggml_unravel_index(tensor, i, &id[0], &id[1], &id[2], &id[3]); + return ggml_get_i32_nd(tensor, id[0], id[1], id[2], id[3]); + } + switch (tensor->type) { + case GGML_TYPE_I8: + { + GGML_ASSERT(tensor->nb[0] == sizeof(int8_t)); + return ((int8_t *)(tensor->data))[i]; + } + case GGML_TYPE_I16: + { + GGML_ASSERT(tensor->nb[0] == sizeof(int16_t)); + return ((int16_t *)(tensor->data))[i]; + } + case GGML_TYPE_I32: + { + GGML_ASSERT(tensor->nb[0] == sizeof(int32_t)); + return ((int32_t *)(tensor->data))[i]; + } + case GGML_TYPE_F16: + { + GGML_ASSERT(tensor->nb[0] == sizeof(ggml_fp16_t)); + return GGML_FP16_TO_FP32(((ggml_fp16_t *)(tensor->data))[i]); + } + case GGML_TYPE_BF16: + { + GGML_ASSERT(tensor->nb[0] == sizeof(ggml_bf16_t)); + return GGML_BF16_TO_FP32(((ggml_bf16_t *)(tensor->data))[i]); + } + case GGML_TYPE_F32: + { + GGML_ASSERT(tensor->nb[0] == sizeof(float)); + return ((float *)(tensor->data))[i]; + } + default: + { + GGML_ABORT("fatal error"); + } + } +} + +void ggml_set_i32_1d(const struct ggml_tensor * tensor, int i, int32_t value) { + if (!ggml_is_contiguous(tensor)) { + int64_t id[4] = { 0, 0, 0, 0 }; + ggml_unravel_index(tensor, i, &id[0], &id[1], &id[2], &id[3]); + ggml_set_i32_nd(tensor, id[0], id[1], id[2], id[3], value); + return; + } + switch (tensor->type) { + case GGML_TYPE_I8: + { + GGML_ASSERT(tensor->nb[0] == sizeof(int8_t)); + ((int8_t *)(tensor->data))[i] = value; + } break; + case GGML_TYPE_I16: + { + GGML_ASSERT(tensor->nb[0] == sizeof(int16_t)); + ((int16_t *)(tensor->data))[i] = value; + } break; + case GGML_TYPE_I32: + { + GGML_ASSERT(tensor->nb[0] == sizeof(int32_t)); + ((int32_t *)(tensor->data))[i] = value; + } break; + case GGML_TYPE_F16: + { + GGML_ASSERT(tensor->nb[0] == sizeof(ggml_fp16_t)); + ((ggml_fp16_t *)(tensor->data))[i] = GGML_FP32_TO_FP16(value); + } break; + case GGML_TYPE_BF16: + { + GGML_ASSERT(tensor->nb[0] == sizeof(ggml_bf16_t)); + ((ggml_bf16_t *)(tensor->data))[i] = GGML_FP32_TO_BF16(value); + } break; + case GGML_TYPE_F32: + { + GGML_ASSERT(tensor->nb[0] == sizeof(float)); + ((float *)(tensor->data))[i] = value; + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +int32_t ggml_get_i32_nd(const struct ggml_tensor * tensor, int i0, int i1, int i2, int i3) { + void * data = (char *) tensor->data + i0*tensor->nb[0] + i1*tensor->nb[1] + i2*tensor->nb[2] + i3*tensor->nb[3]; + switch (tensor->type) { + case GGML_TYPE_I8: + return ((int8_t *) data)[0]; + case GGML_TYPE_I16: + return ((int16_t *) data)[0]; + case GGML_TYPE_I32: + return ((int32_t *) data)[0]; + case GGML_TYPE_F16: + return GGML_FP16_TO_FP32(((ggml_fp16_t *) data)[0]); + case GGML_TYPE_BF16: + return GGML_BF16_TO_FP32(((ggml_bf16_t *) data)[0]); + case GGML_TYPE_F32: + return ((float *) data)[0]; + default: + GGML_ABORT("fatal error"); + } +} + +void ggml_set_i32_nd(const struct ggml_tensor * tensor, int i0, int i1, int i2, int i3, int32_t value) { + void * data = (char *) tensor->data + i0*tensor->nb[0] + i1*tensor->nb[1] + i2*tensor->nb[2] + i3*tensor->nb[3]; + switch (tensor->type) { + case GGML_TYPE_I8: + { + ((int8_t *)(data))[0] = value; + } break; + case GGML_TYPE_I16: + { + ((int16_t *)(data))[0] = value; + } break; + case GGML_TYPE_I32: + { + ((int32_t *)(data))[0] = value; + } break; + case GGML_TYPE_F16: + { + ((ggml_fp16_t *)(data))[0] = GGML_FP32_TO_FP16(value); + } break; + case GGML_TYPE_BF16: + { + ((ggml_bf16_t *)(data))[0] = GGML_FP32_TO_BF16(value); + } break; + case GGML_TYPE_F32: + { + ((float *)(data))[0] = value; + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +float ggml_get_f32_1d(const struct ggml_tensor * tensor, int i) { + if (!ggml_is_contiguous(tensor)) { + int64_t id[4] = { 0, 0, 0, 0 }; + ggml_unravel_index(tensor, i, &id[0], &id[1], &id[2], &id[3]); + return ggml_get_f32_nd(tensor, id[0], id[1], id[2], id[3]); + } + switch (tensor->type) { + case GGML_TYPE_I8: + { + return ((int8_t *)(tensor->data))[i]; + } + case GGML_TYPE_I16: + { + return ((int16_t *)(tensor->data))[i]; + } + case GGML_TYPE_I32: + { + return ((int32_t *)(tensor->data))[i]; + } + case GGML_TYPE_F16: + { + return GGML_FP16_TO_FP32(((ggml_fp16_t *)(tensor->data))[i]); + } + case GGML_TYPE_BF16: + { + return GGML_BF16_TO_FP32(((ggml_bf16_t *)(tensor->data))[i]); + } + case GGML_TYPE_F32: + { + return ((float *)(tensor->data))[i]; + } + default: + { + GGML_ABORT("fatal error"); + } + } +} + +void ggml_set_f32_1d(const struct ggml_tensor * tensor, int i, float value) { + if (!ggml_is_contiguous(tensor)) { + int64_t id[4] = { 0, 0, 0, 0 }; + ggml_unravel_index(tensor, i, &id[0], &id[1], &id[2], &id[3]); + ggml_set_f32_nd(tensor, id[0], id[1], id[2], id[3], value); + return; + } + switch (tensor->type) { + case GGML_TYPE_I8: + { + ((int8_t *)(tensor->data))[i] = value; + } break; + case GGML_TYPE_I16: + { + ((int16_t *)(tensor->data))[i] = value; + } break; + case GGML_TYPE_I32: + { + ((int32_t *)(tensor->data))[i] = value; + } break; + case GGML_TYPE_F16: + { + ((ggml_fp16_t *)(tensor->data))[i] = GGML_FP32_TO_FP16(value); + } break; + case GGML_TYPE_BF16: + { + ((ggml_bf16_t *)(tensor->data))[i] = GGML_FP32_TO_BF16(value); + } break; + case GGML_TYPE_F32: + { + ((float *)(tensor->data))[i] = value; + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +float ggml_get_f32_nd(const struct ggml_tensor * tensor, int i0, int i1, int i2, int i3) { + void * data = (char *) tensor->data + i0*tensor->nb[0] + i1*tensor->nb[1] + i2*tensor->nb[2] + i3*tensor->nb[3]; + switch (tensor->type) { + case GGML_TYPE_I8: + return ((int8_t *) data)[0]; + case GGML_TYPE_I16: + return ((int16_t *) data)[0]; + case GGML_TYPE_I32: + return ((int32_t *) data)[0]; + case GGML_TYPE_F16: + return GGML_FP16_TO_FP32(((ggml_fp16_t *) data)[0]); + case GGML_TYPE_BF16: + return GGML_BF16_TO_FP32(((ggml_bf16_t *) data)[0]); + case GGML_TYPE_F32: + return ((float *) data)[0]; + default: + GGML_ABORT("fatal error"); + } +} + +void ggml_set_f32_nd(const struct ggml_tensor * tensor, int i0, int i1, int i2, int i3, float value) { + void * data = (char *) tensor->data + i0*tensor->nb[0] + i1*tensor->nb[1] + i2*tensor->nb[2] + i3*tensor->nb[3]; + switch (tensor->type) { + case GGML_TYPE_I8: + { + ((int8_t *)(data))[0] = value; + } break; + case GGML_TYPE_I16: + { + ((int16_t *)(data))[0] = value; + } break; + case GGML_TYPE_I32: + { + ((int32_t *)(data))[0] = value; + } break; + case GGML_TYPE_F16: + { + ((ggml_fp16_t *)(data))[0] = GGML_FP32_TO_FP16(value); + } break; + case GGML_TYPE_BF16: + { + ((ggml_bf16_t *)(data))[0] = GGML_FP32_TO_BF16(value); + } break; + case GGML_TYPE_F32: + { + ((float *)(data))[0] = value; + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +void * ggml_get_data(const struct ggml_tensor * tensor) { + return tensor->data; +} + +float * ggml_get_data_f32(const struct ggml_tensor * tensor) { + assert(tensor->type == GGML_TYPE_F32); + return (float *)(tensor->data); +} + +GGML_CALL enum ggml_unary_op ggml_get_unary_op(const struct ggml_tensor * tensor) { + GGML_ASSERT(tensor->op == GGML_OP_UNARY); + return (enum ggml_unary_op) ggml_get_op_params_i32(tensor, 0); +} + +const char * ggml_get_name(const struct ggml_tensor * tensor) { + return tensor->name; +} + +struct ggml_tensor * ggml_set_name(struct ggml_tensor * tensor, const char * name) { + size_t i; + for (i = 0; i < sizeof(tensor->name) - 1 && name[i] != '\0'; i++) { + tensor->name[i] = name[i]; + } + tensor->name[i] = '\0'; + return tensor; +} + +struct ggml_tensor * ggml_format_name(struct ggml_tensor * tensor, const char * fmt, ...) { + va_list args; + va_start(args, fmt); + vsnprintf(tensor->name, sizeof(tensor->name), fmt, args); + va_end(args); + return tensor; +} + +struct ggml_tensor * ggml_view_tensor( + struct ggml_context * ctx, + struct ggml_tensor * src) { + struct ggml_tensor * result = ggml_new_tensor_impl(ctx, src->type, GGML_MAX_DIMS, src->ne, src, 0); + ggml_format_name(result, "%s (view)", src->name); + + for (int i = 0; i < GGML_MAX_DIMS; i++) { + result->nb[i] = src->nb[i]; + } + + return result; +} + +struct ggml_tensor * ggml_get_first_tensor(const struct ggml_context * ctx) { + struct ggml_object * obj = ctx->objects_begin; + + char * const mem_buffer = ctx->mem_buffer; + + while (obj != NULL) { + if (obj->type == GGML_OBJECT_TYPE_TENSOR) { + return (struct ggml_tensor *)(mem_buffer + obj->offs); + } + + obj = obj->next; + } + + return NULL; +} + +struct ggml_tensor * ggml_get_next_tensor(const struct ggml_context * ctx, struct ggml_tensor * tensor) { + struct ggml_object * obj = (struct ggml_object *) ((char *)tensor - GGML_OBJECT_SIZE); + obj = obj->next; + + char * const mem_buffer = ctx->mem_buffer; + + while (obj != NULL) { + if (obj->type == GGML_OBJECT_TYPE_TENSOR) { + return (struct ggml_tensor *)(mem_buffer + obj->offs); + } + + obj = obj->next; + } + + return NULL; +} + +struct ggml_tensor * ggml_get_tensor(struct ggml_context * ctx, const char * name) { + struct ggml_object * obj = ctx->objects_begin; + + char * const mem_buffer = ctx->mem_buffer; + + while (obj != NULL) { + if (obj->type == GGML_OBJECT_TYPE_TENSOR) { + struct ggml_tensor * cur = (struct ggml_tensor *)(mem_buffer + obj->offs); + if (strcmp(cur->name, name) == 0) { + return cur; + } + } + + obj = obj->next; + } + + return NULL; +} + +//////////////////////////////////////////////////////////////////////////////// + +// ggml_dup + +static struct ggml_tensor * ggml_dup_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + bool inplace) { + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + result->op = GGML_OP_DUP; + result->src[0] = a; + + return result; +} + +struct ggml_tensor * ggml_dup( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_dup_impl(ctx, a, false); +} + +struct ggml_tensor * ggml_dup_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_dup_impl(ctx, a, true); +} + +// ggml_add + +static struct ggml_tensor * ggml_add_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + bool inplace) { + GGML_ASSERT(ggml_can_repeat(b, a)); + + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + result->op = GGML_OP_ADD; + result->src[0] = a; + result->src[1] = b; + + return result; +} + +struct ggml_tensor * ggml_add( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b) { + return ggml_add_impl(ctx, a, b, false); +} + +struct ggml_tensor * ggml_add_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b) { + return ggml_add_impl(ctx, a, b, true); +} + +// ggml_add_cast + +static struct ggml_tensor * ggml_add_cast_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + enum ggml_type type) { + // TODO: support less-strict constraint + // GGML_ASSERT(ggml_can_repeat(b, a)); + GGML_ASSERT(ggml_can_repeat_rows(b, a)); + + // currently only supported for quantized input and f16 + GGML_ASSERT(ggml_is_quantized(a->type) || + a->type == GGML_TYPE_F16 || + a->type == GGML_TYPE_BF16); + + struct ggml_tensor * result = ggml_new_tensor(ctx, type, GGML_MAX_DIMS, a->ne); + + result->op = GGML_OP_ADD; + result->src[0] = a; + result->src[1] = b; + + return result; +} + +struct ggml_tensor * ggml_add_cast( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + enum ggml_type type) { + return ggml_add_cast_impl(ctx, a, b, type); +} + +// ggml_add1 + +static struct ggml_tensor * ggml_add1_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + bool inplace) { + GGML_ASSERT(ggml_is_scalar(b)); + GGML_ASSERT(ggml_is_padded_1d(a)); + + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + result->op = GGML_OP_ADD1; + result->src[0] = a; + result->src[1] = b; + + return result; +} + +struct ggml_tensor * ggml_add1( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b) { + return ggml_add1_impl(ctx, a, b, false); +} + +struct ggml_tensor * ggml_add1_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b) { + return ggml_add1_impl(ctx, a, b, true); +} + +// ggml_acc + +static struct ggml_tensor * ggml_acc_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + size_t nb1, + size_t nb2, + size_t nb3, + size_t offset, + bool inplace) { + GGML_ASSERT(ggml_nelements(b) <= ggml_nelements(a)); + GGML_ASSERT(ggml_is_contiguous(a)); + GGML_ASSERT(a->type == GGML_TYPE_F32); + GGML_ASSERT(b->type == GGML_TYPE_F32); + + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + int32_t params[] = { nb1, nb2, nb3, offset, inplace ? 1 : 0 }; + ggml_set_op_params(result, params, sizeof(params)); + + result->op = GGML_OP_ACC; + result->src[0] = a; + result->src[1] = b; + + return result; +} + +struct ggml_tensor * ggml_acc( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + size_t nb1, + size_t nb2, + size_t nb3, + size_t offset) { + return ggml_acc_impl(ctx, a, b, nb1, nb2, nb3, offset, false); +} + +struct ggml_tensor * ggml_acc_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + size_t nb1, + size_t nb2, + size_t nb3, + size_t offset) { + return ggml_acc_impl(ctx, a, b, nb1, nb2, nb3, offset, true); +} + +// ggml_sub + +static struct ggml_tensor * ggml_sub_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + bool inplace) { + GGML_ASSERT(ggml_can_repeat(b, a)); + + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + result->op = GGML_OP_SUB; + result->src[0] = a; + result->src[1] = b; + + return result; +} + +struct ggml_tensor * ggml_sub( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b) { + return ggml_sub_impl(ctx, a, b, false); +} + +struct ggml_tensor * ggml_sub_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b) { + return ggml_sub_impl(ctx, a, b, true); +} + +// ggml_mul + +static struct ggml_tensor * ggml_mul_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + bool inplace) { + GGML_ASSERT(ggml_can_repeat(b, a)); + + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + result->op = GGML_OP_MUL; + result->src[0] = a; + result->src[1] = b; + + return result; +} + +struct ggml_tensor * ggml_mul( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b) { + return ggml_mul_impl(ctx, a, b, false); +} + +struct ggml_tensor * ggml_mul_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b) { + return ggml_mul_impl(ctx, a, b, true); +} + +// ggml_div + +static struct ggml_tensor * ggml_div_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + bool inplace) { + GGML_ASSERT(ggml_can_repeat(b, a)); + + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + result->op = GGML_OP_DIV; + result->src[0] = a; + result->src[1] = b; + + return result; +} + +struct ggml_tensor * ggml_div( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b) { + return ggml_div_impl(ctx, a, b, false); +} + +struct ggml_tensor * ggml_div_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b) { + return ggml_div_impl(ctx, a, b, true); +} + +// ggml_sqr + +static struct ggml_tensor * ggml_sqr_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + bool inplace) { + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + result->op = GGML_OP_SQR; + result->src[0] = a; + + return result; +} + +struct ggml_tensor * ggml_sqr( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_sqr_impl(ctx, a, false); +} + +struct ggml_tensor * ggml_sqr_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_sqr_impl(ctx, a, true); +} + +// ggml_sqrt + +static struct ggml_tensor * ggml_sqrt_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + bool inplace) { + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + result->op = GGML_OP_SQRT; + result->src[0] = a; + + return result; +} + +struct ggml_tensor * ggml_sqrt( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_sqrt_impl(ctx, a, false); +} + +struct ggml_tensor * ggml_sqrt_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_sqrt_impl(ctx, a, true); +} + +// ggml_log + +static struct ggml_tensor * ggml_log_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + bool inplace) { + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + result->op = GGML_OP_LOG; + result->src[0] = a; + + return result; +} + +struct ggml_tensor * ggml_log( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_log_impl(ctx, a, false); +} + +struct ggml_tensor * ggml_log_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_log_impl(ctx, a, true); +} + +// ggml_sin + +static struct ggml_tensor * ggml_sin_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + bool inplace) { + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + result->op = GGML_OP_SIN; + result->src[0] = a; + + return result; +} + +struct ggml_tensor * ggml_sin( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_sin_impl(ctx, a, false); +} + +struct ggml_tensor * ggml_sin_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_sin_impl(ctx, a, true); +} + +// ggml_cos + +static struct ggml_tensor * ggml_cos_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + bool inplace) { + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + result->op = GGML_OP_COS; + result->src[0] = a; + + return result; +} + +struct ggml_tensor * ggml_cos( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_cos_impl(ctx, a, false); +} + +struct ggml_tensor * ggml_cos_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_cos_impl(ctx, a, true); +} + +// ggml_sum + +struct ggml_tensor * ggml_sum( + struct ggml_context * ctx, + struct ggml_tensor * a) { + struct ggml_tensor * result = ggml_new_tensor_1d(ctx, a->type, 1); + + result->op = GGML_OP_SUM; + result->src[0] = a; + + return result; +} + +// ggml_sum_rows + +struct ggml_tensor * ggml_sum_rows( + struct ggml_context * ctx, + struct ggml_tensor * a) { + int64_t ne[GGML_MAX_DIMS] = { 1 }; + for (int i = 1; i < GGML_MAX_DIMS; ++i) { + ne[i] = a->ne[i]; + } + + struct ggml_tensor * result = ggml_new_tensor(ctx, a->type, GGML_MAX_DIMS, ne); + + result->op = GGML_OP_SUM_ROWS; + result->src[0] = a; + + return result; +} + +// ggml_mean + +struct ggml_tensor * ggml_mean( + struct ggml_context * ctx, + struct ggml_tensor * a) { + int64_t ne[4] = { 1, a->ne[1], a->ne[2], a->ne[3] }; + struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, 4, ne); + + result->op = GGML_OP_MEAN; + result->src[0] = a; + + return result; +} + +// ggml_argmax + +struct ggml_tensor * ggml_argmax( + struct ggml_context * ctx, + struct ggml_tensor * a) { + GGML_ASSERT(ggml_is_matrix(a)); + + struct ggml_tensor * result = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, a->ne[1]); + + result->op = GGML_OP_ARGMAX; + result->src[0] = a; + + return result; +} + +// ggml_repeat + +struct ggml_tensor * ggml_repeat( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b) { + GGML_ASSERT(ggml_can_repeat(a, b)); + + struct ggml_tensor * result = ggml_new_tensor(ctx, a->type, GGML_MAX_DIMS, b->ne); + + result->op = GGML_OP_REPEAT; + result->src[0] = a; + + return result; +} + +// ggml_repeat_back + +struct ggml_tensor * ggml_repeat_back( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b) { + GGML_ASSERT(ggml_can_repeat(b, a)); + + struct ggml_tensor * result = ggml_new_tensor(ctx, a->type, GGML_MAX_DIMS, b->ne); + + result->op = GGML_OP_REPEAT_BACK; + result->src[0] = a; + + return result; +} + +// ggml_concat + +struct ggml_tensor * ggml_concat( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + int dim) { + GGML_ASSERT(dim >= 0 && dim < GGML_MAX_DIMS); + + int64_t ne[GGML_MAX_DIMS]; + for (int d = 0; d < GGML_MAX_DIMS; ++d) { + if (d == dim) { + ne[d] = a->ne[d] + b->ne[d]; + continue; + } + GGML_ASSERT(a->ne[d] == b->ne[d]); + ne[d] = a->ne[d]; + } + + struct ggml_tensor * result = ggml_new_tensor(ctx, a->type, GGML_MAX_DIMS, ne); + + ggml_set_op_params_i32(result, 0, dim); + + result->op = GGML_OP_CONCAT; + result->src[0] = a; + result->src[1] = b; + + return result; +} + +// ggml_abs + +struct ggml_tensor * ggml_abs( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_unary(ctx, a, GGML_UNARY_OP_ABS); +} + +struct ggml_tensor * ggml_abs_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_unary_inplace(ctx, a, GGML_UNARY_OP_ABS); +} + +// ggml_sgn + +struct ggml_tensor * ggml_sgn( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_unary(ctx, a, GGML_UNARY_OP_SGN); +} + +struct ggml_tensor * ggml_sgn_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_unary_inplace(ctx, a, GGML_UNARY_OP_SGN); +} + +// ggml_neg + +struct ggml_tensor * ggml_neg( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_unary(ctx, a, GGML_UNARY_OP_NEG); +} + +struct ggml_tensor * ggml_neg_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_unary_inplace(ctx, a, GGML_UNARY_OP_NEG); +} + +// ggml_step + +struct ggml_tensor * ggml_step( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_unary(ctx, a, GGML_UNARY_OP_STEP); +} + +struct ggml_tensor * ggml_step_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_unary_inplace(ctx, a, GGML_UNARY_OP_STEP); +} + +// ggml_tanh + +struct ggml_tensor * ggml_tanh( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_unary(ctx, a, GGML_UNARY_OP_TANH); +} + +struct ggml_tensor * ggml_tanh_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_unary_inplace(ctx, a, GGML_UNARY_OP_TANH); +} + +// ggml_elu + +struct ggml_tensor * ggml_elu( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_unary(ctx, a, GGML_UNARY_OP_ELU); +} + +struct ggml_tensor * ggml_elu_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_unary_inplace(ctx, a, GGML_UNARY_OP_ELU); +} + +// ggml_relu + +struct ggml_tensor * ggml_relu( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_unary(ctx, a, GGML_UNARY_OP_RELU); +} + +struct ggml_tensor * ggml_relu_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_unary_inplace(ctx, a, GGML_UNARY_OP_RELU); +} + +// ggml_leaky_relu + +struct ggml_tensor * ggml_leaky_relu( + struct ggml_context * ctx, + struct ggml_tensor * a, + float negative_slope, + bool inplace) { + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + ggml_set_op_params(result, &negative_slope, sizeof(negative_slope)); + + result->op = GGML_OP_LEAKY_RELU; + result->src[0] = a; + + return result; +} + +// ggml_sigmoid + +struct ggml_tensor * ggml_sigmoid( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_unary(ctx, a, GGML_UNARY_OP_SIGMOID); +} + +struct ggml_tensor * ggml_sigmoid_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_unary_inplace(ctx, a, GGML_UNARY_OP_SIGMOID); +} + +// ggml_gelu + +struct ggml_tensor * ggml_gelu( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_unary(ctx, a, GGML_UNARY_OP_GELU); +} + +struct ggml_tensor * ggml_gelu_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_unary_inplace(ctx, a, GGML_UNARY_OP_GELU); +} + +// ggml_gelu_quick + +struct ggml_tensor * ggml_gelu_quick( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_unary(ctx, a, GGML_UNARY_OP_GELU_QUICK); +} + +struct ggml_tensor * ggml_gelu_quick_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_unary_inplace(ctx, a, GGML_UNARY_OP_GELU_QUICK); +} + +// ggml_silu + +struct ggml_tensor * ggml_silu( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_unary(ctx, a, GGML_UNARY_OP_SILU); +} + +struct ggml_tensor * ggml_silu_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_unary_inplace(ctx, a, GGML_UNARY_OP_SILU); +} + +// ggml_silu_back + +struct ggml_tensor * ggml_silu_back( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b) { + struct ggml_tensor * result = ggml_dup_tensor(ctx, a); + + result->op = GGML_OP_SILU_BACK; + result->src[0] = a; + result->src[1] = b; + + return result; +} + +// ggml hardswish + +struct ggml_tensor * ggml_hardswish( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_unary(ctx, a, GGML_UNARY_OP_HARDSWISH); +} + +// ggml hardsigmoid + +struct ggml_tensor * ggml_hardsigmoid( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_unary(ctx, a, GGML_UNARY_OP_HARDSIGMOID); +} + +// ggml exp + +struct ggml_tensor * ggml_exp( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_unary(ctx, a, GGML_UNARY_OP_EXP); +} + +struct ggml_tensor * ggml_exp_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_unary_inplace(ctx, a, GGML_UNARY_OP_EXP); +} + +// ggml_norm + +static struct ggml_tensor * ggml_norm_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + float eps, + bool inplace) { + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + ggml_set_op_params(result, &eps, sizeof(eps)); + + result->op = GGML_OP_NORM; + result->src[0] = a; + + return result; +} + +struct ggml_tensor * ggml_norm( + struct ggml_context * ctx, + struct ggml_tensor * a, + float eps) { + return ggml_norm_impl(ctx, a, eps, false); +} + +struct ggml_tensor * ggml_norm_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + float eps) { + return ggml_norm_impl(ctx, a, eps, true); +} + +// ggml_rms_norm + +static struct ggml_tensor * ggml_rms_norm_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + float eps, + bool inplace) { + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + ggml_set_op_params(result, &eps, sizeof(eps)); + + result->op = GGML_OP_RMS_NORM; + result->src[0] = a; + + return result; +} + +struct ggml_tensor * ggml_rms_norm( + struct ggml_context * ctx, + struct ggml_tensor * a, + float eps) { + return ggml_rms_norm_impl(ctx, a, eps, false); +} + +struct ggml_tensor * ggml_rms_norm_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + float eps) { + return ggml_rms_norm_impl(ctx, a, eps, true); +} + +// ggml_rms_norm_back + +struct ggml_tensor * ggml_rms_norm_back( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + float eps) { + struct ggml_tensor * result = ggml_dup_tensor(ctx, a); + + ggml_set_op_params(result, &eps, sizeof(eps)); + + result->op = GGML_OP_RMS_NORM_BACK; + result->src[0] = a; + result->src[1] = b; + + return result; +} + +// ggml_group_norm + +static struct ggml_tensor * ggml_group_norm_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + int n_groups, + float eps, + bool inplace) { + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + ggml_set_op_params_i32(result, 0, n_groups); + ggml_set_op_params_f32(result, 1, eps); + + result->op = GGML_OP_GROUP_NORM; + result->src[0] = a; + + return result; +} + +struct ggml_tensor * ggml_group_norm( + struct ggml_context * ctx, + struct ggml_tensor * a, + int n_groups, + float eps) { + return ggml_group_norm_impl(ctx, a, n_groups, eps, false); +} + +struct ggml_tensor * ggml_group_norm_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + int n_groups, + float eps) { + return ggml_group_norm_impl(ctx, a, n_groups, eps, true); +} + +// ggml_mul_mat + +struct ggml_tensor * ggml_mul_mat( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b) { + GGML_ASSERT(ggml_can_mul_mat(a, b)); + GGML_ASSERT(!ggml_is_transposed(a)); + + const int64_t ne[4] = { a->ne[1], b->ne[1], b->ne[2], b->ne[3] }; + struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, 4, ne); + + result->op = GGML_OP_MUL_MAT; + result->src[0] = a; + result->src[1] = b; + + return result; +} + +void ggml_mul_mat_set_prec( + struct ggml_tensor * a, + enum ggml_prec prec) { + GGML_ASSERT(a->op == GGML_OP_MUL_MAT); + + const int32_t prec_i32 = (int32_t) prec; + + ggml_set_op_params_i32(a, 0, prec_i32); +} + +// ggml_mul_mat_id + +/* + c = ggml_mul_mat_id(ctx, as, b, ids); + + as -> [cols, rows, n_expert] + ids -> [n_experts_used, n_tokens] (i32) + b -> [cols, n_expert_used, n_tokens] + c -> [rows, n_expert_used, n_tokens] + + in b, n_experts_used can be broadcasted to match the n_expert_used of ids + + c ~= as[:,:,i] @ b[:,i%r,t], i = ids[e,t] for all e,t in ids +*/ +struct ggml_tensor * ggml_mul_mat_id( + struct ggml_context * ctx, + struct ggml_tensor * as, + struct ggml_tensor * b, + struct ggml_tensor * ids) { + GGML_ASSERT(!ggml_is_transposed(as)); + GGML_ASSERT(ids->type == GGML_TYPE_I32); + + GGML_ASSERT(as->ne[3] == 1); // as is 3d (one matrix per expert) + GGML_ASSERT(b->ne[3] == 1); // b is 3d + GGML_ASSERT(ids->ne[2] == 1 && ids->ne[3] == 1); // ids is 2d + GGML_ASSERT(ids->ne[1] == b->ne[2]); // must have an expert list per b row + GGML_ASSERT(as->ne[0] == b->ne[0]); // can_mul_mat + GGML_ASSERT(ids->ne[0] % b->ne[1] == 0); // can broadcast + + const int64_t ne[4] = { as->ne[1], ids->ne[0], b->ne[2], 1 }; + struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, 4, ne); + + result->op = GGML_OP_MUL_MAT_ID; + result->src[0] = as; + result->src[1] = b; + result->src[2] = ids; + + return result; +} + +// ggml_out_prod + +struct ggml_tensor * ggml_out_prod( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b) { + GGML_ASSERT(ggml_can_out_prod(a, b)); + GGML_ASSERT(!ggml_is_transposed(a)); + + // a is broadcastable to b for ne[2] and ne[3] -> use b->ne[2] and b->ne[3] + const int64_t ne[4] = { a->ne[0], b->ne[0], b->ne[2], b->ne[3] }; + struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, 4, ne); + + result->op = GGML_OP_OUT_PROD; + result->src[0] = a; + result->src[1] = b; + + return result; +} + +// ggml_scale + +static struct ggml_tensor * ggml_scale_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + float s, + bool inplace) { + GGML_ASSERT(ggml_is_padded_1d(a)); + + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + ggml_set_op_params(result, &s, sizeof(s)); + + result->op = GGML_OP_SCALE; + result->src[0] = a; + + return result; +} + +struct ggml_tensor * ggml_scale( + struct ggml_context * ctx, + struct ggml_tensor * a, + float s) { + return ggml_scale_impl(ctx, a, s, false); +} + +struct ggml_tensor * ggml_scale_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + float s) { + return ggml_scale_impl(ctx, a, s, true); +} + +// ggml_set + +static struct ggml_tensor * ggml_set_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + size_t nb1, + size_t nb2, + size_t nb3, + size_t offset, + bool inplace) { + GGML_ASSERT(ggml_nelements(a) >= ggml_nelements(b)); + + // make a view of the destination + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + GGML_ASSERT(offset < (size_t)(1 << 30)); + int32_t params[] = { nb1, nb2, nb3, offset, inplace ? 1 : 0 }; + ggml_set_op_params(result, params, sizeof(params)); + + result->op = GGML_OP_SET; + result->src[0] = a; + result->src[1] = b; + + return result; +} + +struct ggml_tensor * ggml_set( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + size_t nb1, + size_t nb2, + size_t nb3, + size_t offset) { + return ggml_set_impl(ctx, a, b, nb1, nb2, nb3, offset, false); +} + +struct ggml_tensor * ggml_set_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + size_t nb1, + size_t nb2, + size_t nb3, + size_t offset) { + return ggml_set_impl(ctx, a, b, nb1, nb2, nb3, offset, true); +} + +struct ggml_tensor * ggml_set_1d( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + size_t offset) { + return ggml_set_impl(ctx, a, b, a->nb[1], a->nb[2], a->nb[3], offset, false); +} + +struct ggml_tensor * ggml_set_1d_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + size_t offset) { + return ggml_set_impl(ctx, a, b, a->nb[1], a->nb[2], a->nb[3], offset, true); +} + +struct ggml_tensor * ggml_set_2d( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + size_t nb1, + size_t offset) { + return ggml_set_impl(ctx, a, b, nb1, a->nb[2], a->nb[3], offset, false); +} + +struct ggml_tensor * ggml_set_2d_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + size_t nb1, + size_t offset) { + return ggml_set_impl(ctx, a, b, nb1, a->nb[2], a->nb[3], offset, true); +} + +// ggml_cpy + +static struct ggml_tensor * ggml_cpy_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b) { + GGML_ASSERT(ggml_nelements(a) == ggml_nelements(b)); + + // make a view of the destination + struct ggml_tensor * result = ggml_view_tensor(ctx, b); + if (strlen(b->name) > 0) { + ggml_format_name(result, "%s (copy of %s)", b->name, a->name); + } else { + ggml_format_name(result, "%s (copy)", a->name); + } + + result->op = GGML_OP_CPY; + result->src[0] = a; + result->src[1] = b; + + return result; +} + +struct ggml_tensor * ggml_cpy( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b) { + return ggml_cpy_impl(ctx, a, b); +} + +struct ggml_tensor * ggml_cast( + struct ggml_context * ctx, + struct ggml_tensor * a, + enum ggml_type type) { + struct ggml_tensor * result = ggml_new_tensor(ctx, type, GGML_MAX_DIMS, a->ne); + ggml_format_name(result, "%s (copy)", a->name); + + result->op = GGML_OP_CPY; + result->src[0] = a; + result->src[1] = result; + + return result; +} + +// ggml_cont + +static struct ggml_tensor * ggml_cont_impl( + struct ggml_context * ctx, + struct ggml_tensor * a) { + struct ggml_tensor * result = ggml_dup_tensor(ctx, a); + ggml_format_name(result, "%s (cont)", a->name); + + result->op = GGML_OP_CONT; + result->src[0] = a; + + return result; +} + +struct ggml_tensor * ggml_cont( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_cont_impl(ctx, a); +} + +// make contiguous, with new shape +GGML_API struct ggml_tensor * ggml_cont_1d( + struct ggml_context * ctx, + struct ggml_tensor * a, + int64_t ne0) { + return ggml_cont_4d(ctx, a, ne0, 1, 1, 1); +} + +GGML_API struct ggml_tensor * ggml_cont_2d( + struct ggml_context * ctx, + struct ggml_tensor * a, + int64_t ne0, + int64_t ne1) { + return ggml_cont_4d(ctx, a, ne0, ne1, 1, 1); +} + +GGML_API struct ggml_tensor * ggml_cont_3d( + struct ggml_context * ctx, + struct ggml_tensor * a, + int64_t ne0, + int64_t ne1, + int64_t ne2) { + return ggml_cont_4d(ctx, a, ne0, ne1, ne2, 1); +} + +struct ggml_tensor * ggml_cont_4d( + struct ggml_context * ctx, + struct ggml_tensor * a, + int64_t ne0, + int64_t ne1, + int64_t ne2, + int64_t ne3) { + GGML_ASSERT(ggml_nelements(a) == (ne0*ne1*ne2*ne3)); + + struct ggml_tensor * result = ggml_new_tensor_4d(ctx, a->type, ne0, ne1, ne2, ne3); + ggml_format_name(result, "%s (cont)", a->name); + + result->op = GGML_OP_CONT; + result->src[0] = a; + + return result; +} + +// ggml_reshape + +struct ggml_tensor * ggml_reshape( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b) { + GGML_ASSERT(ggml_is_contiguous(a)); + // as only the shape of b is relevant, and not its memory layout, b is allowed to be non contiguous. + GGML_ASSERT(ggml_nelements(a) == ggml_nelements(b)); + + struct ggml_tensor * result = ggml_new_tensor_impl(ctx, a->type, GGML_MAX_DIMS, b->ne, a, 0); + ggml_format_name(result, "%s (reshaped)", a->name); + + result->op = GGML_OP_RESHAPE; + result->src[0] = a; + + return result; +} + +struct ggml_tensor * ggml_reshape_1d( + struct ggml_context * ctx, + struct ggml_tensor * a, + int64_t ne0) { + GGML_ASSERT(ggml_is_contiguous(a)); + GGML_ASSERT(ggml_nelements(a) == ne0); + + const int64_t ne[1] = { ne0 }; + struct ggml_tensor * result = ggml_new_tensor_impl(ctx, a->type, 1, ne, a, 0); + ggml_format_name(result, "%s (reshaped)", a->name); + + result->op = GGML_OP_RESHAPE; + result->src[0] = a; + + return result; +} + +struct ggml_tensor * ggml_reshape_2d( + struct ggml_context * ctx, + struct ggml_tensor * a, + int64_t ne0, + int64_t ne1) { + GGML_ASSERT(ggml_is_contiguous(a)); + GGML_ASSERT(ggml_nelements(a) == ne0*ne1); + + const int64_t ne[2] = { ne0, ne1 }; + struct ggml_tensor * result = ggml_new_tensor_impl(ctx, a->type, 2, ne, a, 0); + ggml_format_name(result, "%s (reshaped)", a->name); + + result->op = GGML_OP_RESHAPE; + result->src[0] = a; + + return result; +} + +struct ggml_tensor * ggml_reshape_3d( + struct ggml_context * ctx, + struct ggml_tensor * a, + int64_t ne0, + int64_t ne1, + int64_t ne2) { + GGML_ASSERT(ggml_is_contiguous(a)); + GGML_ASSERT(ggml_nelements(a) == ne0*ne1*ne2); + + const int64_t ne[3] = { ne0, ne1, ne2 }; + struct ggml_tensor * result = ggml_new_tensor_impl(ctx, a->type, 3, ne, a, 0); + ggml_format_name(result, "%s (reshaped)", a->name); + + result->op = GGML_OP_RESHAPE; + result->src[0] = a; + + return result; +} + +struct ggml_tensor * ggml_reshape_4d( + struct ggml_context * ctx, + struct ggml_tensor * a, + int64_t ne0, + int64_t ne1, + int64_t ne2, + int64_t ne3) { + GGML_ASSERT(ggml_is_contiguous(a)); + GGML_ASSERT(ggml_nelements(a) == ne0*ne1*ne2*ne3); + + const int64_t ne[4] = { ne0, ne1, ne2, ne3 }; + struct ggml_tensor * result = ggml_new_tensor_impl(ctx, a->type, 4, ne, a, 0); + ggml_format_name(result, "%s (reshaped)", a->name); + + result->op = GGML_OP_RESHAPE; + result->src[0] = a; + + return result; +} + +static struct ggml_tensor * ggml_view_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + int n_dims, + const int64_t * ne, + size_t offset) { + struct ggml_tensor * result = ggml_new_tensor_impl(ctx, a->type, n_dims, ne, a, offset); + ggml_format_name(result, "%s (view)", a->name); + + ggml_set_op_params(result, &offset, sizeof(offset)); + + result->op = GGML_OP_VIEW; + result->src[0] = a; + + return result; +} + +// ggml_view_1d + +struct ggml_tensor * ggml_view_1d( + struct ggml_context * ctx, + struct ggml_tensor * a, + int64_t ne0, + size_t offset) { + struct ggml_tensor * result = ggml_view_impl(ctx, a, 1, &ne0, offset); + + return result; +} + +// ggml_view_2d + +struct ggml_tensor * ggml_view_2d( + struct ggml_context * ctx, + struct ggml_tensor * a, + int64_t ne0, + int64_t ne1, + size_t nb1, + size_t offset) { + const int64_t ne[2] = { ne0, ne1 }; + + struct ggml_tensor * result = ggml_view_impl(ctx, a, 2, ne, offset); + + result->nb[1] = nb1; + result->nb[2] = result->nb[1]*ne1; + result->nb[3] = result->nb[2]; + + return result; +} + +// ggml_view_3d + +struct ggml_tensor * ggml_view_3d( + struct ggml_context * ctx, + struct ggml_tensor * a, + int64_t ne0, + int64_t ne1, + int64_t ne2, + size_t nb1, + size_t nb2, + size_t offset) { + const int64_t ne[3] = { ne0, ne1, ne2 }; + + struct ggml_tensor * result = ggml_view_impl(ctx, a, 3, ne, offset); + + result->nb[1] = nb1; + result->nb[2] = nb2; + result->nb[3] = result->nb[2]*ne2; + + return result; +} + +// ggml_view_4d + +struct ggml_tensor * ggml_view_4d( + struct ggml_context * ctx, + struct ggml_tensor * a, + int64_t ne0, + int64_t ne1, + int64_t ne2, + int64_t ne3, + size_t nb1, + size_t nb2, + size_t nb3, + size_t offset) { + const int64_t ne[4] = { ne0, ne1, ne2, ne3 }; + + struct ggml_tensor * result = ggml_view_impl(ctx, a, 4, ne, offset); + + result->nb[1] = nb1; + result->nb[2] = nb2; + result->nb[3] = nb3; + + return result; +} + +// ggml_permute + +struct ggml_tensor * ggml_permute( + struct ggml_context * ctx, + struct ggml_tensor * a, + int axis0, + int axis1, + int axis2, + int axis3) { + GGML_ASSERT(axis0 >= 0 && axis0 < GGML_MAX_DIMS); + GGML_ASSERT(axis1 >= 0 && axis1 < GGML_MAX_DIMS); + GGML_ASSERT(axis2 >= 0 && axis2 < GGML_MAX_DIMS); + GGML_ASSERT(axis3 >= 0 && axis3 < GGML_MAX_DIMS); + + GGML_ASSERT(axis0 != axis1); + GGML_ASSERT(axis0 != axis2); + GGML_ASSERT(axis0 != axis3); + GGML_ASSERT(axis1 != axis2); + GGML_ASSERT(axis1 != axis3); + GGML_ASSERT(axis2 != axis3); + + struct ggml_tensor * result = ggml_view_tensor(ctx, a); + ggml_format_name(result, "%s (permuted)", a->name); + + int ne[GGML_MAX_DIMS]; + int nb[GGML_MAX_DIMS]; + + ne[axis0] = a->ne[0]; + ne[axis1] = a->ne[1]; + ne[axis2] = a->ne[2]; + ne[axis3] = a->ne[3]; + + nb[axis0] = a->nb[0]; + nb[axis1] = a->nb[1]; + nb[axis2] = a->nb[2]; + nb[axis3] = a->nb[3]; + + result->ne[0] = ne[0]; + result->ne[1] = ne[1]; + result->ne[2] = ne[2]; + result->ne[3] = ne[3]; + + result->nb[0] = nb[0]; + result->nb[1] = nb[1]; + result->nb[2] = nb[2]; + result->nb[3] = nb[3]; + + result->op = GGML_OP_PERMUTE; + result->src[0] = a; + + int32_t params[] = { axis0, axis1, axis2, axis3 }; + ggml_set_op_params(result, params, sizeof(params)); + + return result; +} + +// ggml_transpose + +struct ggml_tensor * ggml_transpose( + struct ggml_context * ctx, + struct ggml_tensor * a) { + struct ggml_tensor * result = ggml_view_tensor(ctx, a); + ggml_format_name(result, "%s (transposed)", a->name); + + result->ne[0] = a->ne[1]; + result->ne[1] = a->ne[0]; + + result->nb[0] = a->nb[1]; + result->nb[1] = a->nb[0]; + + result->op = GGML_OP_TRANSPOSE; + result->src[0] = a; + + return result; +} + +// ggml_get_rows + +struct ggml_tensor * ggml_get_rows( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b) { + GGML_ASSERT(a->ne[2] == b->ne[1]); + GGML_ASSERT(b->ne[3] == 1); + GGML_ASSERT(b->type == GGML_TYPE_I32); + + // TODO: implement non F32 return + enum ggml_type type = GGML_TYPE_F32; + if (a->type == GGML_TYPE_I32) { + type = a->type; + } + struct ggml_tensor * result = ggml_new_tensor_4d(ctx, type, a->ne[0], b->ne[0], b->ne[1], b->ne[2]); + + result->op = GGML_OP_GET_ROWS; + result->src[0] = a; + result->src[1] = b; + + return result; +} + +// ggml_get_rows_back + +struct ggml_tensor * ggml_get_rows_back( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + struct ggml_tensor * c) { + GGML_ASSERT(ggml_is_matrix(a) && ggml_is_vector(b) && b->type == GGML_TYPE_I32); + GGML_ASSERT(ggml_is_matrix(c) && (a->ne[0] == c->ne[0])); + + // TODO: implement non F32 return + //struct ggml_tensor * result = ggml_new_tensor_2d(ctx, a->type, a->ne[0], b->ne[0]); + struct ggml_tensor * result = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, c->ne[0], c->ne[1]); + + result->op = GGML_OP_GET_ROWS_BACK; + result->src[0] = a; + result->src[1] = b; + + return result; +} + +// ggml_diag + +struct ggml_tensor * ggml_diag( + struct ggml_context * ctx, + struct ggml_tensor * a) { + GGML_ASSERT(a->ne[1] == 1); + + const int64_t ne[4] = { a->ne[0], a->ne[0], a->ne[2], a->ne[3] }; + struct ggml_tensor * result = ggml_new_tensor(ctx, a->type, 4, ne); + + result->op = GGML_OP_DIAG; + result->src[0] = a; + + return result; +} + +// ggml_diag_mask_inf + +static struct ggml_tensor * ggml_diag_mask_inf_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + int n_past, + bool inplace) { + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + int32_t params[] = { n_past }; + ggml_set_op_params(result, params, sizeof(params)); + + result->op = GGML_OP_DIAG_MASK_INF; + result->src[0] = a; + + return result; +} + +struct ggml_tensor * ggml_diag_mask_inf( + struct ggml_context * ctx, + struct ggml_tensor * a, + int n_past) { + return ggml_diag_mask_inf_impl(ctx, a, n_past, false); +} + +struct ggml_tensor * ggml_diag_mask_inf_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + int n_past) { + return ggml_diag_mask_inf_impl(ctx, a, n_past, true); +} + +// ggml_diag_mask_zero + +static struct ggml_tensor * ggml_diag_mask_zero_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + int n_past, + bool inplace) { + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + int32_t params[] = { n_past }; + ggml_set_op_params(result, params, sizeof(params)); + + result->op = GGML_OP_DIAG_MASK_ZERO; + result->src[0] = a; + + return result; +} + +struct ggml_tensor * ggml_diag_mask_zero( + struct ggml_context * ctx, + struct ggml_tensor * a, + int n_past) { + return ggml_diag_mask_zero_impl(ctx, a, n_past, false); +} + +struct ggml_tensor * ggml_diag_mask_zero_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + int n_past) { + return ggml_diag_mask_zero_impl(ctx, a, n_past, true); +} + +// ggml_soft_max + +static struct ggml_tensor * ggml_soft_max_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * mask, + float scale, + float max_bias, + bool inplace) { + GGML_ASSERT(ggml_is_contiguous(a)); + + if (mask) { + GGML_ASSERT(mask->type == GGML_TYPE_F16 || mask->type == GGML_TYPE_F32); + GGML_ASSERT(ggml_is_contiguous(mask)); + GGML_ASSERT(ggml_is_matrix(mask)); + GGML_ASSERT(mask->ne[0] == a->ne[0]); + GGML_ASSERT(mask->ne[1] >= a->ne[1]); + } + + if (max_bias > 0.0f) { + GGML_ASSERT(mask); + } + + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + float params[] = { scale, max_bias }; + ggml_set_op_params(result, params, sizeof(params)); + + result->op = GGML_OP_SOFT_MAX; + result->src[0] = a; + result->src[1] = mask; + + return result; +} + +struct ggml_tensor * ggml_soft_max( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_soft_max_impl(ctx, a, NULL, 1.0f, 0.0f, false); +} + +struct ggml_tensor * ggml_soft_max_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_soft_max_impl(ctx, a, NULL, 1.0f, 0.0f, true); +} + +struct ggml_tensor * ggml_soft_max_ext( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * mask, + float scale, + float max_bias) { + return ggml_soft_max_impl(ctx, a, mask, scale, max_bias, false); +} + +// ggml_soft_max_back + +static struct ggml_tensor * ggml_soft_max_back_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + bool inplace) { + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + result->op = GGML_OP_SOFT_MAX_BACK; + result->src[0] = a; + result->src[1] = b; + + return result; +} + +struct ggml_tensor * ggml_soft_max_back( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b) { + return ggml_soft_max_back_impl(ctx, a, b, false); +} + +struct ggml_tensor * ggml_soft_max_back_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b) { + return ggml_soft_max_back_impl(ctx, a, b, true); +} + +// ggml_rope + +static struct ggml_tensor * ggml_rope_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + struct ggml_tensor * c, + int n_dims, + int mode, + int n_ctx_orig, + float freq_base, + float freq_scale, + float ext_factor, + float attn_factor, + float beta_fast, + float beta_slow, + bool inplace) { + GGML_ASSERT((mode & 1) == 0 && "mode & 1 == 1 is no longer supported"); + + GGML_ASSERT(ggml_is_vector(b)); + GGML_ASSERT(b->type == GGML_TYPE_I32); + GGML_ASSERT(a->ne[2] == b->ne[0]); + + if (c) { + GGML_ASSERT(c->type == GGML_TYPE_F32); + GGML_ASSERT(c->ne[0] >= n_dims / 2); + } + + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + int32_t params[11] = { /*n_past*/ 0, n_dims, mode, /*n_ctx*/ 0, n_ctx_orig }; + memcpy(params + 5, &freq_base, sizeof(float)); + memcpy(params + 6, &freq_scale, sizeof(float)); + memcpy(params + 7, &ext_factor, sizeof(float)); + memcpy(params + 8, &attn_factor, sizeof(float)); + memcpy(params + 9, &beta_fast, sizeof(float)); + memcpy(params + 10, &beta_slow, sizeof(float)); + ggml_set_op_params(result, params, sizeof(params)); + + result->op = GGML_OP_ROPE; + result->src[0] = a; + result->src[1] = b; + result->src[2] = c; + + return result; +} + +struct ggml_tensor * ggml_rope( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + int n_dims, + int mode) { + return ggml_rope_impl( + ctx, a, b, NULL, n_dims, mode, 0, 10000.0f, 1.0f, 0.0f, 1.0f, 0.0f, 0.0f, false + ); +} + +struct ggml_tensor * ggml_rope_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + int n_dims, + int mode) { + return ggml_rope_impl( + ctx, a, b, NULL, n_dims, mode, 0, 10000.0f, 1.0f, 0.0f, 1.0f, 0.0f, 0.0f, true + ); +} + +struct ggml_tensor * ggml_rope_ext( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + struct ggml_tensor * c, + int n_dims, + int mode, + int n_ctx_orig, + float freq_base, + float freq_scale, + float ext_factor, + float attn_factor, + float beta_fast, + float beta_slow) { + return ggml_rope_impl( + ctx, a, b, c, n_dims, mode, n_ctx_orig, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow, false + ); +} + +struct ggml_tensor * ggml_rope_ext_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + struct ggml_tensor * c, + int n_dims, + int mode, + int n_ctx_orig, + float freq_base, + float freq_scale, + float ext_factor, + float attn_factor, + float beta_fast, + float beta_slow) { + return ggml_rope_impl( + ctx, a, b, c, n_dims, mode, n_ctx_orig, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow, true + ); +} + +struct ggml_tensor * ggml_rope_custom( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + int n_dims, + int mode, + int n_ctx_orig, + float freq_base, + float freq_scale, + float ext_factor, + float attn_factor, + float beta_fast, + float beta_slow) { + return ggml_rope_impl( + ctx, a, b, NULL, n_dims, mode, n_ctx_orig, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow, false + ); +} + +struct ggml_tensor * ggml_rope_custom_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + int n_dims, + int mode, + int n_ctx_orig, + float freq_base, + float freq_scale, + float ext_factor, + float attn_factor, + float beta_fast, + float beta_slow) { + return ggml_rope_impl( + ctx, a, b, NULL, n_dims, mode, n_ctx_orig, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow, true + ); +} + +// ggml_rope_back + +struct ggml_tensor * ggml_rope_back( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + struct ggml_tensor * c, + int n_dims, + int mode, + int n_ctx_orig, + float freq_base, + float freq_scale, + float ext_factor, + float attn_factor, + float beta_fast, + float beta_slow) { + GGML_ASSERT(ggml_is_vector(b)); + GGML_ASSERT(b->type == GGML_TYPE_I32); + GGML_ASSERT(a->ne[2] == b->ne[0]); + + struct ggml_tensor * result = ggml_dup_tensor(ctx, a); + + int32_t params[11] = { /*n_past*/ 0, n_dims, mode, /*n_ctx*/ 0, n_ctx_orig }; + memcpy(params + 5, &freq_base, sizeof(float)); + memcpy(params + 6, &freq_scale, sizeof(float)); + memcpy(params + 7, &ext_factor, sizeof(float)); + memcpy(params + 8, &attn_factor, sizeof(float)); + memcpy(params + 9, &beta_fast, sizeof(float)); + memcpy(params + 10, &beta_slow, sizeof(float)); + ggml_set_op_params(result, params, sizeof(params)); + + result->op = GGML_OP_ROPE_BACK; + result->src[0] = a; + result->src[1] = b; + result->src[2] = c; + + return result; +} + +// ggml_clamp + +struct ggml_tensor * ggml_clamp( + struct ggml_context * ctx, + struct ggml_tensor * a, + float min, + float max) { + // TODO: when implement backward, fix this: + struct ggml_tensor * result = ggml_view_tensor(ctx, a); + + float params[] = { min, max }; + ggml_set_op_params(result, params, sizeof(params)); + + result->op = GGML_OP_CLAMP; + result->src[0] = a; + + return result; +} + +// ggml_conv_1d + +static int64_t ggml_calc_conv_output_size(int64_t ins, int64_t ks, int s, int p, int d) { + return (ins + 2 * p - d * (ks - 1) - 1) / s + 1; +} + +GGML_API struct ggml_tensor * ggml_conv_1d( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + int s0, + int p0, + int d0) { + struct ggml_tensor * im2col = ggml_im2col(ctx, a, b, s0, 0, p0, 0, d0, 0, false, GGML_TYPE_F16); // [N, OL, IC * K] + + struct ggml_tensor * result = + ggml_mul_mat(ctx, + ggml_reshape_2d(ctx, im2col, im2col->ne[0], (im2col->ne[2] * im2col->ne[1])), // [N, OL, IC * K] => [N*OL, IC * K] + ggml_reshape_2d(ctx, a, (a->ne[0] * a->ne[1]), a->ne[2])); // [OC,IC, K] => [OC, IC * K] + + result = ggml_reshape_3d(ctx, result, im2col->ne[1], a->ne[2], im2col->ne[2]); // [N, OC, OL] + + return result; +} + +// ggml_conv_1d_ph + +struct ggml_tensor* ggml_conv_1d_ph( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + int s, + int d) { + return ggml_conv_1d(ctx, a, b, s, a->ne[0] / 2, d); +} + +// ggml_conv_transpose_1d + +static int64_t ggml_calc_conv_transpose_1d_output_size(int64_t ins, int64_t ks, int s, int p, int d) { + return (ins - 1) * s - 2 * p + d * (ks - 1) + 1; +} + +GGML_API struct ggml_tensor * ggml_conv_transpose_1d( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + int s0, + int p0, + int d0) { + GGML_ASSERT(ggml_is_matrix(b)); + GGML_ASSERT(a->ne[2] == b->ne[1]); + GGML_ASSERT(a->ne[3] == 1); + + GGML_ASSERT(p0 == 0); + GGML_ASSERT(d0 == 1); + + const int64_t ne[4] = { + ggml_calc_conv_transpose_1d_output_size(b->ne[0], a->ne[0], s0, 0 /*p0*/, 1 /*d0*/), + a->ne[1], b->ne[2], 1, + }; + struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, 4, ne); + + int32_t params[] = { s0, p0, d0 }; + ggml_set_op_params(result, params, sizeof(params)); + + result->op = GGML_OP_CONV_TRANSPOSE_1D; + result->src[0] = a; + result->src[1] = b; + + return result; +} + +// ggml_conv_depthwise + +struct ggml_tensor * ggml_conv_depthwise_2d( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + int s0, + int s1, + int p0, + int p1, + int d0, + int d1) { + struct ggml_tensor * new_a = ggml_reshape_4d(ctx, a, a->ne[0], a->ne[1], 1, a->ne[2] * a->ne[3]); + struct ggml_tensor * im2col = ggml_im2col(ctx, new_a, + ggml_reshape_4d(ctx, b, b->ne[0], b->ne[1], 1, b->ne[2] * b->ne[3]), + s0, s1, p0, p1, d0, d1, true, GGML_TYPE_F16); // [N * IC, OH, OW, KH * KW] + struct ggml_tensor * new_b = ggml_reshape_4d(ctx, im2col, im2col->ne[0], im2col->ne[2] * im2col->ne[1], b->ne[2], b->ne[3]); // [N * IC, OH, OW, KH * KW] => [N, IC, OH * OW, KH * KW] + + new_a = ggml_reshape_4d(ctx, new_a, (new_a->ne[0] * new_a->ne[1]), new_a->ne[2], new_a->ne[3], 1); // [OC,1, KH, KW] => [1, OC, 1, KH * KW] + struct ggml_tensor * result = ggml_mul_mat(ctx, new_a, new_b); + result = ggml_reshape_4d(ctx, result, im2col->ne[1], im2col->ne[2], b->ne[2], b->ne[3]); // [N, OC, OH, OW] + + return result; +} +// ggml_conv_2d + +// im2col: [N, IC, IH, IW] => [N, OH, OW, IC*KH*KW] +// a: [OC,IC, KH, KW] +// b: [N, IC, IH, IW] +// result: [N, OH, OW, IC*KH*KW] +struct ggml_tensor * ggml_im2col( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + int s0, + int s1, + int p0, + int p1, + int d0, + int d1, + bool is_2D, + enum ggml_type dst_type) { + if(is_2D) { + GGML_ASSERT(a->ne[2] == b->ne[2]); + } else { + GGML_ASSERT(a->ne[1] == b->ne[1]); + GGML_ASSERT(b->ne[3] == 1); + } + + const int64_t OH = is_2D ? ggml_calc_conv_output_size(b->ne[1], a->ne[1], s1, p1, d1) : 0; + const int64_t OW = ggml_calc_conv_output_size(b->ne[0], a->ne[0], s0, p0, d0); + + GGML_ASSERT((!is_2D || OH > 0) && "b too small compared to a"); + GGML_ASSERT((OW > 0) && "b too small compared to a"); + + const int64_t ne[4] = { + is_2D ? (a->ne[2] * a->ne[1] * a->ne[0]) : a->ne[1] * a->ne[0], + OW, + is_2D ? OH : b->ne[2], + is_2D ? b->ne[3] : 1, + }; + + struct ggml_tensor * result = ggml_new_tensor(ctx, dst_type, 4, ne); + int32_t params[] = { s0, s1, p0, p1, d0, d1, (is_2D ? 1 : 0) }; + ggml_set_op_params(result, params, sizeof(params)); + + result->op = GGML_OP_IM2COL; + result->src[0] = a; + result->src[1] = b; + + return result; +} + +struct ggml_tensor * ggml_im2col_back( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + int64_t * ne, + int s0, + int s1, + int p0, + int p1, + int d0, + int d1, + bool is_2D) { + struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, 4, ne); + int32_t params[] = { s0, s1, p0, p1, d0, d1, (is_2D ? 1 : 0) }; + ggml_set_op_params(result, params, sizeof(params)); + + result->op = GGML_OP_IM2COL_BACK; + result->src[0] = a; + result->src[1] = b; + + return result; +} + +// a: [OC,IC, KH, KW] +// b: [N, IC, IH, IW] +// result: [N, OC, OH, OW] +struct ggml_tensor * ggml_conv_2d( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + int s0, + int s1, + int p0, + int p1, + int d0, + int d1) { + struct ggml_tensor * im2col = ggml_im2col(ctx, a, b, s0, s1, p0, p1, d0, d1, true, a->type); // [N, OH, OW, IC * KH * KW] + + struct ggml_tensor * result = + ggml_mul_mat(ctx, + ggml_reshape_2d(ctx, im2col, im2col->ne[0], im2col->ne[3] * im2col->ne[2] * im2col->ne[1]), // [N, OH, OW, IC * KH * KW] => [N*OH*OW, IC * KH * KW] + ggml_reshape_2d(ctx, a, (a->ne[0] * a->ne[1] * a->ne[2]), a->ne[3])); // [OC,IC, KH, KW] => [OC, IC * KH * KW] + + result = ggml_reshape_4d(ctx, result, im2col->ne[1], im2col->ne[2], im2col->ne[3], a->ne[3]); // [OC, N, OH, OW] + result = ggml_cont(ctx, ggml_permute(ctx, result, 0, 1, 3, 2)); // [N, OC, OH, OW] + + + return result; +} + +// ggml_conv_2d_sk_p0 + +struct ggml_tensor * ggml_conv_2d_sk_p0( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b) { + return ggml_conv_2d(ctx, a, b, a->ne[0], a->ne[1], 0, 0, 1, 1); +} + +// ggml_conv_2d_s1_ph + +struct ggml_tensor * ggml_conv_2d_s1_ph( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b) { + return ggml_conv_2d(ctx, a, b, 1, 1, a->ne[0] / 2, a->ne[1] / 2, 1, 1); +} + +// ggml_conv_transpose_2d_p0 + +static int64_t ggml_calc_conv_transpose_output_size(int64_t ins, int64_t ks, int s, int p) { + return (ins - 1) * s - 2 * p + ks; +} + +struct ggml_tensor * ggml_conv_transpose_2d_p0( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + int stride) { + GGML_ASSERT(a->ne[3] == b->ne[2]); + + const int64_t ne[4] = { + ggml_calc_conv_transpose_output_size(b->ne[0], a->ne[0], stride, 0 /*p0*/), + ggml_calc_conv_transpose_output_size(b->ne[1], a->ne[1], stride, 0 /*p1*/), + a->ne[2], b->ne[3], + }; + + struct ggml_tensor* result = ggml_new_tensor(ctx, GGML_TYPE_F32, 4, ne); + + ggml_set_op_params_i32(result, 0, stride); + + result->op = GGML_OP_CONV_TRANSPOSE_2D; + result->src[0] = a; + result->src[1] = b; + + return result; +} + +// ggml_pool_* + +static int64_t ggml_calc_pool_output_size(int64_t ins, int ks, int s, float p) { + return (ins + 2 * p - ks) / s + 1; +} + +// ggml_pool_1d + +struct ggml_tensor * ggml_pool_1d( + struct ggml_context * ctx, + struct ggml_tensor * a, + enum ggml_op_pool op, + int k0, + int s0, + int p0) { + const int64_t ne[4] = { + ggml_calc_pool_output_size(a->ne[0], k0, s0, p0), + a->ne[1], + a->ne[2], + a->ne[3], + }; + struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, 4, ne); + + int32_t params[] = { op, k0, s0, p0 }; + ggml_set_op_params(result, params, sizeof(params)); + + result->op = GGML_OP_POOL_1D; + result->src[0] = a; + + return result; +} + +// ggml_pool_2d + +struct ggml_tensor * ggml_pool_2d( + struct ggml_context * ctx, + struct ggml_tensor * a, + enum ggml_op_pool op, + int k0, + int k1, + int s0, + int s1, + float p0, + float p1) { + struct ggml_tensor * result; + const int64_t ne[4] = { + ggml_calc_pool_output_size(a->ne[0], k0, s0, p0), + ggml_calc_pool_output_size(a->ne[1], k1, s1, p1), + a->ne[2], + a->ne[3], + }; + result = ggml_new_tensor(ctx, GGML_TYPE_F32, 4, ne); + + int32_t params[] = { op, k0, k1, s0, s1, p0, p1 }; + ggml_set_op_params(result, params, sizeof(params)); + + result->op = GGML_OP_POOL_2D; + result->src[0] = a; + + return result; +} + +struct ggml_tensor * ggml_pool_2d_back( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * af, + enum ggml_op_pool op, + int k0, + int k1, + int s0, + int s1, + float p0, + float p1) { + struct ggml_tensor * result; + result = ggml_new_tensor(ctx, GGML_TYPE_F32, 4, af->ne); + + int32_t params[] = { op, k0, k1, s0, s1, p0, p1 }; + ggml_set_op_params(result, params, sizeof(params)); + + result->op = GGML_OP_POOL_2D_BACK; + result->src[0] = a; + result->src[1] = af; + + return result; +} + +// ggml_upscale + +static struct ggml_tensor * ggml_upscale_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + int ne0, + int ne1, + int ne2, + int ne3) { + GGML_ASSERT(a->ne[0] <= ne0); + GGML_ASSERT(a->ne[1] <= ne1); + GGML_ASSERT(a->ne[2] <= ne2); + GGML_ASSERT(a->ne[3] <= ne3); + + struct ggml_tensor * result = ggml_new_tensor_4d(ctx, a->type, ne0, ne1, ne2, ne3); + + result->op = GGML_OP_UPSCALE; + result->src[0] = a; + + return result; +} + +struct ggml_tensor * ggml_upscale( + struct ggml_context * ctx, + struct ggml_tensor * a, + int scale_factor) { + return ggml_upscale_impl(ctx, a, a->ne[0] * scale_factor, a->ne[1] * scale_factor, a->ne[2], a->ne[3]); +} + +struct ggml_tensor * ggml_upscale_ext( + struct ggml_context * ctx, + struct ggml_tensor * a, + int ne0, + int ne1, + int ne2, + int ne3) { + return ggml_upscale_impl(ctx, a, ne0, ne1, ne2, ne3); +} + +// ggml_pad + +struct ggml_tensor * ggml_pad( + struct ggml_context * ctx, + struct ggml_tensor * a, + int p0, + int p1, + int p2, + int p3) { + struct ggml_tensor * result = ggml_new_tensor_4d(ctx, a->type, + a->ne[0] + p0, + a->ne[1] + p1, + a->ne[2] + p2, + a->ne[3] + p3); + + result->op = GGML_OP_PAD; + result->src[0] = a; + + return result; +} + +// ggml_unpad + +struct ggml_tensor * ggml_unpad( + struct ggml_context * ctx, + struct ggml_tensor * a, + int p0, int p1, int p2, int p3) { + bool is_node = false; + + if (a->grad) { + GGML_ABORT("fatal error"); // TODO: implement backward + is_node = true; + } + + struct ggml_tensor * result = ggml_new_tensor_4d(ctx, a->type, + a->ne[0] - p0, + a->ne[1] - p1, + a->ne[2] - p2, + a->ne[3] - p3); + + result->op = GGML_OP_UNPAD; + result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; + result->src[0] = a; + + return result; +} + +// ggml_arange + +struct ggml_tensor * ggml_arange( + struct ggml_context * ctx, + float start, + float stop, + float step) { + GGML_ASSERT(stop > start); + + const int64_t steps = (int64_t) ceilf((stop - start) / step); + + struct ggml_tensor * result = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, steps); + + ggml_set_op_params_f32(result, 0, start); + ggml_set_op_params_f32(result, 1, stop); + ggml_set_op_params_f32(result, 2, step); + + result->op = GGML_OP_ARANGE; + + return result; +} + +// ggml_timestep_embedding + +struct ggml_tensor * ggml_timestep_embedding( + struct ggml_context * ctx, + struct ggml_tensor * timesteps, + int dim, + int max_period) { + int actual_dim = dim; + if (dim % 2 != 0) { + actual_dim = dim + 1; + } + + struct ggml_tensor * result = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, actual_dim, timesteps->ne[0]); + + ggml_set_op_params_i32(result, 0, dim); + ggml_set_op_params_i32(result, 1, max_period); + + result->op = GGML_OP_TIMESTEP_EMBEDDING; + result->src[0] = timesteps; + + return result; +} + +// ggml_argsort + +struct ggml_tensor * ggml_argsort( + struct ggml_context * ctx, + struct ggml_tensor * a, + enum ggml_sort_order order) { + struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_I32, GGML_MAX_DIMS, a->ne); + + ggml_set_op_params_i32(result, 0, (int32_t) order); + + result->op = GGML_OP_ARGSORT; + result->src[0] = a; + + return result; +} + +// ggml_top_k + +struct ggml_tensor * ggml_top_k( + struct ggml_context * ctx, + struct ggml_tensor * a, + int k) { + GGML_ASSERT(a->ne[0] >= k); + + struct ggml_tensor * result = ggml_argsort(ctx, a, GGML_SORT_ORDER_DESC); + + result = ggml_view_4d(ctx, result, + k, result->ne[1], result->ne[2], result->ne[3], + result->nb[1], result->nb[2], result->nb[3], + 0); + + return result; +} + +// ggml_flash_attn_ext + +struct ggml_tensor * ggml_flash_attn_ext( + struct ggml_context * ctx, + struct ggml_tensor * q, + struct ggml_tensor * k, + struct ggml_tensor * v, + struct ggml_tensor * mask, + float scale, + float max_bias, + float logit_softcap) { + GGML_ASSERT(ggml_can_mul_mat(k, q)); + // TODO: check if vT can be multiplied by (k*qT) + + if (mask) { + GGML_ASSERT(ggml_is_contiguous(mask)); + GGML_ASSERT(mask->ne[2] == 1); + GGML_ASSERT(mask->ne[3] == 1); + GGML_ASSERT(mask->ne[1] >= GGML_PAD(q->ne[1], GGML_KQ_MASK_PAD) && + "the Flash-Attention kernel requires the mask to be padded to GGML_KQ_MASK_PAD and at least n_queries big"); + //GGML_ASSERT(ggml_can_repeat_rows(mask, qk)); + } + + if (max_bias > 0.0f) { + GGML_ASSERT(mask); + } + + bool is_node = false; + + // permute(0, 2, 1, 3) + int64_t ne[4] = { q->ne[0], q->ne[2], q->ne[1], q->ne[3] }; + struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, 4, ne); + + float params[] = { scale, max_bias, logit_softcap }; + ggml_set_op_params(result, params, sizeof(params)); + + result->op = GGML_OP_FLASH_ATTN_EXT; + result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; + result->src[0] = q; + result->src[1] = k; + result->src[2] = v; + result->src[3] = mask; + + return result; +} + +void ggml_flash_attn_ext_set_prec( + struct ggml_tensor * a, + enum ggml_prec prec) { + GGML_ASSERT(a->op == GGML_OP_FLASH_ATTN_EXT); + + const int32_t prec_i32 = (int32_t) prec; + + ggml_set_op_params_i32(a, 3, prec_i32); // scale is on first pos, max_bias on second +} + +// ggml_flash_attn_back + +struct ggml_tensor * ggml_flash_attn_back( + struct ggml_context * ctx, + struct ggml_tensor * q, + struct ggml_tensor * k, + struct ggml_tensor * v, + struct ggml_tensor * d, + bool masked) { + GGML_ABORT("TODO: adapt to ggml_flash_attn_ext() changes"); + + GGML_ASSERT(ggml_can_mul_mat(k, q)); + // TODO: check if vT can be multiplied by (k*qT) + + // d shape [D,N,ne2,ne3] + // q shape [D,N,ne2,ne3] + // k shape [D,M,kvne2,ne3] + // v shape [M,D,kvne2,ne3] + + const int64_t D = q->ne[0]; + const int64_t N = q->ne[1]; + const int64_t M = k->ne[1]; + const int64_t ne2 = q->ne[2]; + const int64_t ne3 = q->ne[3]; + const int64_t kvne2 = k->ne[2]; + + GGML_ASSERT(k->ne[0] == D); + GGML_ASSERT(v->ne[0] == M); + GGML_ASSERT(v->ne[1] == D); + GGML_ASSERT(d->ne[0] == D); + GGML_ASSERT(d->ne[1] == N); + GGML_ASSERT(k->ne[2] == kvne2); + GGML_ASSERT(k->ne[3] == ne3); + GGML_ASSERT(v->ne[2] == kvne2); + GGML_ASSERT(v->ne[3] == ne3); + GGML_ASSERT(d->ne[2] == ne2); + GGML_ASSERT(d->ne[3] == ne3); + + GGML_ASSERT(ne2 % kvne2 == 0); + + bool is_node = false; + + if (q->grad || k->grad || v->grad) { + // when using this operation (in backwards pass) these grads are set. + // we don't want to create (big) grad of our result, so is_node is false. + is_node = false; + } + + // store gradients of q, k and v as continuous tensors concatenated in result. + // note: v and gradv are actually transposed, i.e. v->ne[0] != D. + const int64_t elem_q = ggml_nelements(q); + const int64_t elem_k = ggml_nelements(k); + const int64_t elem_v = ggml_nelements(v); + + enum ggml_type result_type = GGML_TYPE_F32; + GGML_ASSERT(ggml_blck_size(result_type) == 1); + const size_t tsize = ggml_type_size(result_type); + + const size_t offs_q = 0; + const size_t offs_k = offs_q + GGML_PAD(elem_q * tsize, GGML_MEM_ALIGN); + const size_t offs_v = offs_k + GGML_PAD(elem_k * tsize, GGML_MEM_ALIGN); + const size_t end = offs_v + GGML_PAD(elem_v * tsize, GGML_MEM_ALIGN); + + const size_t nelements = (end + tsize - 1)/tsize; + + struct ggml_tensor * result = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, nelements); + + int32_t masked_i = masked ? 1 : 0; + ggml_set_op_params(result, &masked_i, sizeof(masked_i)); + + result->op = GGML_OP_FLASH_ATTN_BACK; + result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; + result->src[0] = q; + result->src[1] = k; + result->src[2] = v; + result->src[3] = d; + + return result; +} + +// ggml_ssm_conv + +struct ggml_tensor * ggml_ssm_conv( + struct ggml_context * ctx, + struct ggml_tensor * sx, + struct ggml_tensor * c) { + GGML_ASSERT(ggml_is_3d(sx)); + GGML_ASSERT(ggml_is_matrix(c)); + + const int64_t d_conv = c->ne[0]; + const int64_t d_inner = c->ne[1]; + const int64_t n_t = sx->ne[0] - d_conv + 1; // tokens per sequence + const int64_t n_s = sx->ne[2]; + + // TODO: maybe support other strides than 1? + GGML_ASSERT(sx->ne[0] == d_conv - 1 + n_t); + GGML_ASSERT(sx->ne[1] == d_inner); + GGML_ASSERT(n_t >= 0); + + struct ggml_tensor * result = ggml_new_tensor_3d(ctx, GGML_TYPE_F32, d_inner, n_t, n_s); + + result->op = GGML_OP_SSM_CONV; + result->src[0] = sx; + result->src[1] = c; + + return result; +} + +// ggml_ssm_scan + +struct ggml_tensor * ggml_ssm_scan( + struct ggml_context * ctx, + struct ggml_tensor * s, + struct ggml_tensor * x, + struct ggml_tensor * dt, + struct ggml_tensor * A, + struct ggml_tensor * B, + struct ggml_tensor * C) { + GGML_ASSERT(ggml_is_contiguous(s)); + GGML_ASSERT(ggml_is_contiguous(x)); + GGML_ASSERT(ggml_is_contiguous(dt)); + GGML_ASSERT(ggml_is_contiguous(A)); + GGML_ASSERT(ggml_is_matrix(A)); + GGML_ASSERT(ggml_is_3d(B)); + GGML_ASSERT(ggml_is_3d(s)); + GGML_ASSERT(B->nb[0] == ggml_type_size(B->type)); + GGML_ASSERT(C->nb[0] == ggml_type_size(C->type)); + GGML_ASSERT(ggml_are_same_shape(x, dt)); + GGML_ASSERT(ggml_are_same_shape(B, C)); + + { + const int64_t d_state = s->ne[0]; + const int64_t d_inner = s->ne[1]; + const int64_t n_seq_tokens = x->ne[1]; + const int64_t n_seqs = x->ne[2]; + + GGML_ASSERT(s->ne[2] == n_seqs); + GGML_ASSERT(x->ne[0] == d_inner); + GGML_ASSERT(A->ne[0] == d_state); + GGML_ASSERT(A->ne[1] == d_inner); + GGML_ASSERT(B->ne[0] == d_state); + GGML_ASSERT(B->ne[1] == n_seq_tokens); + GGML_ASSERT(B->ne[2] == n_seqs); + } + + // concatenated y + ssm_states + struct ggml_tensor * result = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, ggml_nelements(x) + ggml_nelements(s)); + + result->op = GGML_OP_SSM_SCAN; + result->src[0] = s; + result->src[1] = x; + result->src[2] = dt; + result->src[3] = A; + result->src[4] = B; + result->src[5] = C; + + return result; +} + +// ggml_win_part + +struct ggml_tensor * ggml_win_part( + struct ggml_context * ctx, + struct ggml_tensor * a, + int w) { + GGML_ASSERT(a->ne[3] == 1); + GGML_ASSERT(a->type == GGML_TYPE_F32); + + // padding + const int px = (w - a->ne[1]%w)%w; + const int py = (w - a->ne[2]%w)%w; + + const int npx = (px + a->ne[1])/w; + const int npy = (py + a->ne[2])/w; + const int np = npx*npy; + + const int64_t ne[4] = { a->ne[0], w, w, np, }; + struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, 4, ne); + + int32_t params[] = { npx, npy, w }; + ggml_set_op_params(result, params, sizeof(params)); + + result->op = GGML_OP_WIN_PART; + result->src[0] = a; + + return result; +} + +// ggml_win_unpart + +struct ggml_tensor * ggml_win_unpart( + struct ggml_context * ctx, + struct ggml_tensor * a, + int w0, + int h0, + int w) { + GGML_ASSERT(a->type == GGML_TYPE_F32); + + const int64_t ne[4] = { a->ne[0], w0, h0, 1, }; + struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, 3, ne); + + int32_t params[] = { w }; + ggml_set_op_params(result, params, sizeof(params)); + + result->op = GGML_OP_WIN_UNPART; + result->src[0] = a; + + return result; +} + +// ggml_get_rel_pos + +struct ggml_tensor * ggml_get_rel_pos( + struct ggml_context * ctx, + struct ggml_tensor * a, + int qh, + int kh) { + GGML_ASSERT(qh == kh); + GGML_ASSERT(2*MAX(qh, kh) - 1 == a->ne[1]); + + const int64_t ne[4] = { a->ne[0], kh, qh, 1, }; + struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F16, 3, ne); + + result->op = GGML_OP_GET_REL_POS; + result->src[0] = a; + + return result; +} + +// ggml_add_rel_pos + +static struct ggml_tensor * ggml_add_rel_pos_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * pw, + struct ggml_tensor * ph, + bool inplace) { + GGML_ASSERT(ggml_are_same_shape(pw, ph)); + GGML_ASSERT(ggml_is_contiguous(a)); + GGML_ASSERT(ggml_is_contiguous(pw)); + GGML_ASSERT(ggml_is_contiguous(ph)); + GGML_ASSERT(ph->type == GGML_TYPE_F32); + GGML_ASSERT(pw->type == GGML_TYPE_F32); + GGML_ASSERT(pw->ne[3] == a->ne[2]); + GGML_ASSERT(pw->ne[0]*pw->ne[0] == a->ne[0]); + GGML_ASSERT(pw->ne[1]*pw->ne[2] == a->ne[1]); + + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + ggml_set_op_params_i32(result, 0, inplace ? 1 : 0); + + result->op = GGML_OP_ADD_REL_POS; + result->src[0] = a; + result->src[1] = pw; + result->src[2] = ph; + + return result; +} + +struct ggml_tensor * ggml_add_rel_pos( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * pw, + struct ggml_tensor * ph) { + return ggml_add_rel_pos_impl(ctx, a, pw, ph, false); +} + +struct ggml_tensor * ggml_add_rel_pos_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * pw, + struct ggml_tensor * ph) { + return ggml_add_rel_pos_impl(ctx, a, pw, ph, true); +} + +// ggml_rwkv_wkv + +struct ggml_tensor * ggml_rwkv_wkv( + struct ggml_context * ctx, + struct ggml_tensor * k, + struct ggml_tensor * v, + struct ggml_tensor * r, + struct ggml_tensor * tf, + struct ggml_tensor * td, + struct ggml_tensor * state) { + GGML_ASSERT(ggml_is_contiguous(k)); + GGML_ASSERT(ggml_is_contiguous(v)); + GGML_ASSERT(ggml_is_contiguous(r)); + GGML_ASSERT(ggml_is_contiguous(tf)); + GGML_ASSERT(ggml_is_contiguous(td)); + GGML_ASSERT(ggml_is_contiguous(state)); + + const int64_t S = k->ne[0]; + const int64_t H = k->ne[2]; + const int64_t n_tokens = k->ne[3]; + const int64_t n_seqs = state->ne[1]; + { + GGML_ASSERT(k->ne[1] == 1); + GGML_ASSERT(v->ne[0] == 1 && v->ne[1] == S && v->ne[2] == H && v->ne[3] == n_tokens); + GGML_ASSERT(r->ne[0] == 1 && r->ne[1] == S && r->ne[2] == H && r->ne[3] == n_tokens); + // TODO: RWKV v4 and v5 + GGML_ASSERT(td->ne[0] == 1 && td->ne[1] == S && td->ne[2] == H && td->ne[3] == n_tokens); + GGML_ASSERT(ggml_nelements(state) == S * S * H * n_seqs); + } + + // concat output and new_state + const int64_t ne[4] = { S * H, n_tokens + S * n_seqs, 1, 1 }; + struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, 4, ne); + + result->op = GGML_OP_RWKV_WKV; + result->src[0] = k; + result->src[1] = v; + result->src[2] = r; + result->src[3] = tf; + result->src[4] = td; + result->src[5] = state; + + return result; +} + +// ggml_unary + +static struct ggml_tensor * ggml_unary_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + enum ggml_unary_op op, + bool inplace) { + GGML_ASSERT(ggml_is_contiguous_1(a)); + + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + ggml_set_op_params_i32(result, 0, (int32_t) op); + + result->op = GGML_OP_UNARY; + result->src[0] = a; + + return result; +} + +struct ggml_tensor * ggml_unary( + struct ggml_context * ctx, + struct ggml_tensor * a, + enum ggml_unary_op op) { + return ggml_unary_impl(ctx, a, op, false); +} + +struct ggml_tensor * ggml_unary_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + enum ggml_unary_op op) { + return ggml_unary_impl(ctx, a, op, true); +} + +// ggml_map_unary + +static struct ggml_tensor * ggml_map_unary_impl_f32( + struct ggml_context * ctx, + struct ggml_tensor * a, + const ggml_unary_op_f32_t fun, + bool inplace) { + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + ggml_set_op_params(result, (const void *) &fun, sizeof(fun)); + + result->op = GGML_OP_MAP_UNARY; + result->src[0] = a; + + return result; +} + +struct ggml_tensor * ggml_map_unary_f32( + struct ggml_context * ctx, + struct ggml_tensor * a, + const ggml_unary_op_f32_t fun) { + return ggml_map_unary_impl_f32(ctx, a, fun, false); +} + +struct ggml_tensor * ggml_map_unary_inplace_f32( + struct ggml_context * ctx, + struct ggml_tensor * a, + const ggml_unary_op_f32_t fun) { + return ggml_map_unary_impl_f32(ctx, a, fun, true); +} + +// ggml_map_binary + +static struct ggml_tensor * ggml_map_binary_impl_f32( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + const ggml_binary_op_f32_t fun, + bool inplace) { + GGML_ASSERT(ggml_are_same_shape(a, b)); + + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + ggml_set_op_params(result, (const void *) &fun, sizeof(fun)); + + result->op = GGML_OP_MAP_BINARY; + result->src[0] = a; + result->src[1] = b; + + return result; +} + +struct ggml_tensor * ggml_map_binary_f32( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + const ggml_binary_op_f32_t fun) { + return ggml_map_binary_impl_f32(ctx, a, b, fun, false); +} + +struct ggml_tensor * ggml_map_binary_inplace_f32( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + const ggml_binary_op_f32_t fun) { + return ggml_map_binary_impl_f32(ctx, a, b, fun, true); +} + +// ggml_map_custom1_f32 + +static struct ggml_tensor * ggml_map_custom1_impl_f32( + struct ggml_context * ctx, + struct ggml_tensor * a, + const ggml_custom1_op_f32_t fun, + bool inplace) { + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + ggml_set_op_params(result, (const void *) &fun, sizeof(fun)); + + result->op = GGML_OP_MAP_CUSTOM1_F32; + result->src[0] = a; + + return result; +} + +struct ggml_tensor * ggml_map_custom1_f32( + struct ggml_context * ctx, + struct ggml_tensor * a, + const ggml_custom1_op_f32_t fun) { + return ggml_map_custom1_impl_f32(ctx, a, fun, false); +} + +struct ggml_tensor * ggml_map_custom1_inplace_f32( + struct ggml_context * ctx, + struct ggml_tensor * a, + const ggml_custom1_op_f32_t fun) { + return ggml_map_custom1_impl_f32(ctx, a, fun, true); +} + +// ggml_map_custom2_f32 + +static struct ggml_tensor * ggml_map_custom2_impl_f32( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + const ggml_custom2_op_f32_t fun, + bool inplace) { + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + ggml_set_op_params(result, (const void *) &fun, sizeof(fun)); + + result->op = GGML_OP_MAP_CUSTOM2_F32; + result->src[0] = a; + result->src[1] = b; + + return result; +} + +struct ggml_tensor * ggml_map_custom2_f32( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + const ggml_custom2_op_f32_t fun) { + return ggml_map_custom2_impl_f32(ctx, a, b, fun, false); +} + +struct ggml_tensor * ggml_map_custom2_inplace_f32( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + const ggml_custom2_op_f32_t fun) { + return ggml_map_custom2_impl_f32(ctx, a, b, fun, true); +} + +// ggml_map_custom3_f32 + +static struct ggml_tensor * ggml_map_custom3_impl_f32( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + struct ggml_tensor * c, + const ggml_custom3_op_f32_t fun, + bool inplace) { + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + ggml_set_op_params(result, (const void *) &fun, sizeof(fun)); + + result->op = GGML_OP_MAP_CUSTOM3_F32; + result->src[0] = a; + result->src[1] = b; + result->src[2] = c; + + return result; +} + +struct ggml_tensor * ggml_map_custom3_f32( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + struct ggml_tensor * c, + const ggml_custom3_op_f32_t fun) { + return ggml_map_custom3_impl_f32(ctx, a, b, c, fun, false); +} + +struct ggml_tensor * ggml_map_custom3_inplace_f32( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + struct ggml_tensor * c, + const ggml_custom3_op_f32_t fun) { + return ggml_map_custom3_impl_f32(ctx, a, b, c, fun, true); +} + +// ggml_map_custom1 +struct ggml_map_custom1_op_params { + ggml_custom1_op_t fun; + int n_tasks; + void * userdata; +}; + +static struct ggml_tensor * ggml_map_custom1_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + const ggml_custom1_op_t fun, + int n_tasks, + void * userdata, + bool inplace) { + GGML_ASSERT(n_tasks == GGML_N_TASKS_MAX || n_tasks > 0); + + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + struct ggml_map_custom1_op_params params = { + /*.fun =*/ fun, + /*.n_tasks =*/ n_tasks, + /*.userdata =*/ userdata + }; + ggml_set_op_params(result, (const void *) ¶ms, sizeof(params)); + + result->op = GGML_OP_MAP_CUSTOM1; + result->src[0] = a; + + return result; +} + +struct ggml_tensor * ggml_map_custom1( + struct ggml_context * ctx, + struct ggml_tensor * a, + const ggml_custom1_op_t fun, + int n_tasks, + void * userdata) { + return ggml_map_custom1_impl(ctx, a, fun, n_tasks, userdata, false); +} + +struct ggml_tensor * ggml_map_custom1_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + const ggml_custom1_op_t fun, + int n_tasks, + void * userdata) { + return ggml_map_custom1_impl(ctx, a, fun, n_tasks, userdata, true); +} + +// ggml_map_custom2 + +struct ggml_map_custom2_op_params { + ggml_custom2_op_t fun; + int n_tasks; + void * userdata; +}; + +static struct ggml_tensor * ggml_map_custom2_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + const ggml_custom2_op_t fun, + int n_tasks, + void * userdata, + bool inplace) { + GGML_ASSERT(n_tasks == GGML_N_TASKS_MAX || n_tasks > 0); + + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + struct ggml_map_custom2_op_params params = { + /*.fun =*/ fun, + /*.n_tasks =*/ n_tasks, + /*.userdata =*/ userdata + }; + ggml_set_op_params(result, (const void *) ¶ms, sizeof(params)); + + result->op = GGML_OP_MAP_CUSTOM2; + result->src[0] = a; + result->src[1] = b; + + return result; +} + +struct ggml_tensor * ggml_map_custom2( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + const ggml_custom2_op_t fun, + int n_tasks, + void * userdata) { + return ggml_map_custom2_impl(ctx, a, b, fun, n_tasks, userdata, false); +} + +struct ggml_tensor * ggml_map_custom2_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + const ggml_custom2_op_t fun, + int n_tasks, + void * userdata) { + return ggml_map_custom2_impl(ctx, a, b, fun, n_tasks, userdata, true); +} + +// ggml_map_custom3 + +struct ggml_map_custom3_op_params { + ggml_custom3_op_t fun; + int n_tasks; + void * userdata; +}; + +static struct ggml_tensor * ggml_map_custom3_impl( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + struct ggml_tensor * c, + const ggml_custom3_op_t fun, + int n_tasks, + void * userdata, + bool inplace) { + GGML_ASSERT(n_tasks == GGML_N_TASKS_MAX || n_tasks > 0); + + struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + + struct ggml_map_custom3_op_params params = { + /*.fun =*/ fun, + /*.n_tasks =*/ n_tasks, + /*.userdata =*/ userdata + }; + ggml_set_op_params(result, (const void *) ¶ms, sizeof(params)); + + result->op = GGML_OP_MAP_CUSTOM3; + result->src[0] = a; + result->src[1] = b; + result->src[2] = c; + + return result; +} + +struct ggml_tensor * ggml_map_custom3( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + struct ggml_tensor * c, + const ggml_custom3_op_t fun, + int n_tasks, + void * userdata) { + return ggml_map_custom3_impl(ctx, a, b, c, fun, n_tasks, userdata, false); +} + +struct ggml_tensor * ggml_map_custom3_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + struct ggml_tensor * c, + const ggml_custom3_op_t fun, + int n_tasks, + void * userdata) { + return ggml_map_custom3_impl(ctx, a, b, c, fun, n_tasks, userdata, true); +} + +// ggml_cross_entropy_loss + +struct ggml_tensor * ggml_cross_entropy_loss( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b) { + GGML_ASSERT(ggml_are_same_shape(a, b)); + + struct ggml_tensor * result = ggml_new_tensor_1d(ctx, a->type, 1); + + result->op = GGML_OP_CROSS_ENTROPY_LOSS; + result->src[0] = a; + result->src[1] = b; + + return result; +} + +// ggml_cross_entropy_loss_back + +struct ggml_tensor * ggml_cross_entropy_loss_back( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + struct ggml_tensor * c) { + GGML_ASSERT(ggml_are_same_shape(a, b)); + GGML_ASSERT(ggml_is_scalar(c)); + + struct ggml_tensor * result = ggml_dup_tensor(ctx, a); + + result->op = GGML_OP_CROSS_ENTROPY_LOSS_BACK; + result->src[0] = a; + result->src[1] = b; + result->src[2] = c; + + return result; +} + +// opt_step_adamw + +struct ggml_tensor * ggml_opt_step_adamw( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * grad, + float alpha, + float beta1, + float beta2, + float eps, + float wd) { + GGML_ASSERT(a->flags & GGML_TENSOR_FLAG_PARAM); + GGML_ASSERT(ggml_are_same_shape(a, grad)); + GGML_ASSERT(alpha > 0.0f); + GGML_ASSERT(beta1 >= 0.0f && beta1 <= 1.0f); + GGML_ASSERT(beta2 >= 0.0f && beta2 <= 1.0f); + GGML_ASSERT(eps >= 0.0f); + GGML_ASSERT(wd >= 0.0f && wd <= 1.0f); + + struct ggml_tensor * result = ggml_view_tensor(ctx, a); + + const int64_t iter = 1; + memcpy(&result->op_params[0], &iter, sizeof(int64_t)); + ggml_set_op_params_f32(result, 2, alpha); + ggml_set_op_params_f32(result, 3, beta1); + ggml_set_op_params_f32(result, 4, beta2); + ggml_set_op_params_f32(result, 5, eps); + ggml_set_op_params_f32(result, 6, wd); + + result->op = GGML_OP_OPT_STEP_ADAMW; + result->src[0] = a; + result->src[1] = grad; + result->src[2] = ggml_dup_tensor(ctx, grad); + result->src[3] = ggml_dup_tensor(ctx, grad); + + return result; +} + +//////////////////////////////////////////////////////////////////////////////// + +// ggml_compute_forward_dup + +static void ggml_compute_forward_dup_same_cont( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + GGML_ASSERT(ggml_nelements(dst) == ggml_nelements(src0)); + GGML_ASSERT(ggml_is_contiguous(dst) && ggml_is_contiguous(src0)); + GGML_ASSERT(src0->type == dst->type); + + const size_t nb0 = ggml_type_size(src0->type); + + const int ith = params->ith; // thread index + const int nth = params->nth; // number of threads + + // parallelize by elements + const int ne = ggml_nelements(dst); + const int dr = (ne + nth - 1) / nth; + const int ie0 = dr * ith; + const int ie1 = MIN(ie0 + dr, ne); + + if (ie0 < ie1) { + memcpy( + ((char *) dst->data + ie0*nb0), + ((char *) src0->data + ie0*nb0), + (ie1 - ie0) * nb0); + } +} + +static void ggml_compute_forward_dup_f16( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + GGML_ASSERT(ggml_nelements(dst) == ggml_nelements(src0)); + + GGML_TENSOR_UNARY_OP_LOCALS + + const int ith = params->ith; // thread index + const int nth = params->nth; // number of threads + + // parallelize by rows + const int nr = ne01; + // number of rows per thread + const int dr = (nr + nth - 1) / nth; + // row range for this thread + const int ir0 = dr * ith; + const int ir1 = MIN(ir0 + dr, nr); + + if (src0->type == dst->type && + ne00 == ne0 && + nb00 == ggml_type_size(src0->type) && nb0 == ggml_type_size(dst->type)) { + // copy by rows + const size_t rs = ne00*nb00; + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + for (int64_t i01 = ir0; i01 < ir1; i01++) { + memcpy( + ((char *) dst->data + i01*nb1 + i02*nb2 + i03*nb3), + ((char *) src0->data + i01*nb01 + i02*nb02 + i03*nb03), + rs); + } + } + } + return; + } + + // TODO: add more special-case implementations for tensor shapes/strides that can benefit from memcpy + + if (ggml_is_contiguous(dst)) { + if (nb00 == sizeof(ggml_fp16_t)) { + if (dst->type == GGML_TYPE_F16) { + size_t id = 0; + const size_t rs = ne00 * nb00; + char * dst_ptr = (char *) dst->data; + + for (int i03 = 0; i03 < ne03; i03++) { + for (int i02 = 0; i02 < ne02; i02++) { + id += rs * ir0; + for (int i01 = ir0; i01 < ir1; i01++) { + const char * src0_ptr = (char *) src0->data + i01*nb01 + i02*nb02 + i03*nb03; + memcpy(dst_ptr + id, src0_ptr, rs); + id += rs; + } + id += rs * (ne01 - ir1); + } + } + } else if (dst->type == GGML_TYPE_F32) { + size_t id = 0; + float * dst_ptr = (float *) dst->data; + + for (int i03 = 0; i03 < ne03; i03++) { + for (int i02 = 0; i02 < ne02; i02++) { + id += ne00 * ir0; + for (int i01 = ir0; i01 < ir1; i01++) { + const ggml_fp16_t * src0_ptr = (ggml_fp16_t *) ((char *) src0->data + i01*nb01 + i02*nb02 + i03*nb03); + for (int i00 = 0; i00 < ne00; i00++) { + dst_ptr[id] = GGML_FP16_TO_FP32(src0_ptr[i00]); + id++; + } + } + id += ne00 * (ne01 - ir1); + } + } + } else if (type_traits[dst->type].from_float) { + ggml_from_float_t const quantize_row_q = type_traits[dst->type].from_float; + float * src0_f32 = (float *) params->wdata + (ne00 + CACHE_LINE_SIZE_F32) * ith; + + size_t id = 0; + size_t rs = nb0 * (ne00 / ggml_blck_size(dst->type)); + char * dst_ptr = (char *) dst->data; + + for (int i03 = 0; i03 < ne03; i03++) { + for (int i02 = 0; i02 < ne02; i02++) { + id += rs * ir0; + for (int i01 = ir0; i01 < ir1; i01++) { + const ggml_fp16_t * src0_ptr = (ggml_fp16_t *) ((char *) src0->data + i01*nb01 + i02*nb02 + i03*nb03); + + for (int i00 = 0; i00 < ne00; i00++) { + src0_f32[i00] = GGML_FP16_TO_FP32(src0_ptr[i00]); + } + + quantize_row_q(src0_f32, dst_ptr + id, ne00); + id += rs; + } + id += rs * (ne01 - ir1); + } + } + } else { + GGML_ABORT("fatal error"); // TODO: implement + } + } else { + //printf("%s: this is not optimal - fix me\n", __func__); + + if (dst->type == GGML_TYPE_F32) { + size_t id = 0; + float * dst_ptr = (float *) dst->data; + + for (int i03 = 0; i03 < ne03; i03++) { + for (int i02 = 0; i02 < ne02; i02++) { + id += ne00 * ir0; + for (int i01 = ir0; i01 < ir1; i01++) { + for (int i00 = 0; i00 < ne00; i00++) { + const ggml_fp16_t * src0_ptr = (ggml_fp16_t *) ((char *) src0->data + i00*nb00 + i01*nb01 + i02*nb02 + i03*nb03); + + dst_ptr[id] = GGML_FP16_TO_FP32(*src0_ptr); + id++; + } + } + id += ne00 * (ne01 - ir1); + } + } + } else if (dst->type == GGML_TYPE_F16) { + size_t id = 0; + ggml_fp16_t * dst_ptr = (ggml_fp16_t *) dst->data; + + for (int i03 = 0; i03 < ne03; i03++) { + for (int i02 = 0; i02 < ne02; i02++) { + id += ne00 * ir0; + for (int i01 = ir0; i01 < ir1; i01++) { + for (int i00 = 0; i00 < ne00; i00++) { + const ggml_fp16_t * src0_ptr = (ggml_fp16_t *) ((char *) src0->data + i00*nb00 + i01*nb01 + i02*nb02 + i03*nb03); + + dst_ptr[id] = *src0_ptr; + id++; + } + } + id += ne00 * (ne01 - ir1); + } + } + } else { + GGML_ABORT("fatal error"); // TODO: implement + } + } + return; + } + + // dst counters + int64_t i10 = 0; + int64_t i11 = 0; + int64_t i12 = 0; + int64_t i13 = 0; + + if (dst->type == GGML_TYPE_F16) { + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + i10 += ne00 * ir0; + while (i10 >= ne0) { + i10 -= ne0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + for (int64_t i01 = ir0; i01 < ir1; i01++) { + for (int64_t i00 = 0; i00 < ne00; i00++) { + const char * src0_ptr = ((char *) src0->data + i00*nb00 + i01*nb01 + i02*nb02 + i03*nb03); + char * dst_ptr = ((char *) dst->data + i10*nb0 + i11*nb1 + i12*nb2 + i13*nb3); + + memcpy(dst_ptr, src0_ptr, sizeof(ggml_fp16_t)); + + if (++i10 == ne00) { + i10 = 0; + if (++i11 == ne01) { + i11 = 0; + if (++i12 == ne02) { + i12 = 0; + if (++i13 == ne03) { + i13 = 0; + } + } + } + } + } + } + i10 += ne00 * (ne01 - ir1); + while (i10 >= ne0) { + i10 -= ne0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + } + } + } else if (dst->type == GGML_TYPE_F32) { + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + i10 += ne00 * ir0; + while (i10 >= ne0) { + i10 -= ne0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + for (int64_t i01 = ir0; i01 < ir1; i01++) { + for (int64_t i00 = 0; i00 < ne00; i00++) { + const char * src0_ptr = ((char *) src0->data + i00*nb00 + i01*nb01 + i02*nb02 + i03*nb03); + char * dst_ptr = ((char *) dst->data + i10*nb0 + i11*nb1 + i12*nb2 + i13*nb3); + + *(float *) dst_ptr = GGML_FP16_TO_FP32(*(const ggml_fp16_t *) src0_ptr); + + if (++i10 == ne0) { + i10 = 0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + } + } + i10 += ne00 * (ne01 - ir1); + while (i10 >= ne0) { + i10 -= ne0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + } + } + } else { + GGML_ABORT("fatal error"); // TODO: implement + } +} + +static void ggml_compute_forward_dup_bf16( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + GGML_ASSERT(ggml_nelements(dst) == ggml_nelements(src0)); + + GGML_TENSOR_UNARY_OP_LOCALS + + const int ith = params->ith; // thread index + const int nth = params->nth; // number of threads + + // parallelize by rows + const int nr = ne01; + // number of rows per thread + const int dr = (nr + nth - 1) / nth; + // row range for this thread + const int ir0 = dr * ith; + const int ir1 = MIN(ir0 + dr, nr); + + if (src0->type == dst->type && + ne00 == ne0 && + nb00 == ggml_type_size(src0->type) && nb0 == ggml_type_size(dst->type)) { + // copy by rows + const size_t rs = ne00*nb00; + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + for (int64_t i01 = ir0; i01 < ir1; i01++) { + memcpy( + ((char *) dst->data + i01*nb1 + i02*nb2 + i03*nb3), + ((char *) src0->data + i01*nb01 + i02*nb02 + i03*nb03), + rs); + } + } + } + return; + } + + // TODO: add more special-case implementations for tensor shapes/strides that can benefit from memcpy + + if (ggml_is_contiguous(dst)) { + if (nb00 == sizeof(ggml_bf16_t)) { + if (dst->type == GGML_TYPE_BF16) { + size_t id = 0; + const size_t rs = ne00 * nb00; + char * dst_ptr = (char *) dst->data; + + for (int i03 = 0; i03 < ne03; i03++) { + for (int i02 = 0; i02 < ne02; i02++) { + id += rs * ir0; + for (int i01 = ir0; i01 < ir1; i01++) { + const char * src0_ptr = (char *) src0->data + i01*nb01 + i02*nb02 + i03*nb03; + memcpy(dst_ptr + id, src0_ptr, rs); + id += rs; + } + id += rs * (ne01 - ir1); + } + } + } else if (dst->type == GGML_TYPE_F16) { + size_t id = 0; + ggml_fp16_t * dst_ptr = (ggml_fp16_t *) dst->data; + + for (int i03 = 0; i03 < ne03; i03++) { + for (int i02 = 0; i02 < ne02; i02++) { + id += ne00 * ir0; + for (int i01 = ir0; i01 < ir1; i01++) { + const ggml_bf16_t * src0_ptr = (ggml_bf16_t *) ((char *) src0->data + i01*nb01 + i02*nb02 + i03*nb03); + for (int i00 = 0; i00 < ne00; i00++) { + dst_ptr[id] = GGML_FP32_TO_FP16(GGML_BF16_TO_FP32(src0_ptr[i00])); + id++; + } + } + id += ne00 * (ne01 - ir1); + } + } + } else if (dst->type == GGML_TYPE_F32) { + size_t id = 0; + float * dst_ptr = (float *) dst->data; + + for (int i03 = 0; i03 < ne03; i03++) { + for (int i02 = 0; i02 < ne02; i02++) { + id += ne00 * ir0; + for (int i01 = ir0; i01 < ir1; i01++) { + const ggml_bf16_t * src0_ptr = (ggml_bf16_t *) ((char *) src0->data + i01*nb01 + i02*nb02 + i03*nb03); + for (int i00 = 0; i00 < ne00; i00++) { + dst_ptr[id] = GGML_BF16_TO_FP32(src0_ptr[i00]); + id++; + } + } + id += ne00 * (ne01 - ir1); + } + } + } else if (type_traits[dst->type].from_float) { + ggml_from_float_t const quantize_row_q = type_traits[dst->type].from_float; + float * src0_f32 = (float *) params->wdata + (ne00 + CACHE_LINE_SIZE_F32) * ith; + + size_t id = 0; + size_t rs = nb0 * (ne00 / ggml_blck_size(dst->type)); + char * dst_ptr = (char *) dst->data; + + for (int i03 = 0; i03 < ne03; i03++) { + for (int i02 = 0; i02 < ne02; i02++) { + id += rs * ir0; + for (int i01 = ir0; i01 < ir1; i01++) { + const ggml_bf16_t * src0_ptr = (ggml_bf16_t *) ((char *) src0->data + i01*nb01 + i02*nb02 + i03*nb03); + + for (int i00 = 0; i00 < ne00; i00++) { + src0_f32[i00] = GGML_BF16_TO_FP32(src0_ptr[i00]); + } + + quantize_row_q(src0_f32, dst_ptr + id, ne00); + id += rs; + } + id += rs * (ne01 - ir1); + } + } + } else { + GGML_ABORT("fatal error"); // TODO: implement + } + } else { + //printf("%s: this is not optimal - fix me\n", __func__); + + if (dst->type == GGML_TYPE_F32) { + size_t id = 0; + float * dst_ptr = (float *) dst->data; + + for (int i03 = 0; i03 < ne03; i03++) { + for (int i02 = 0; i02 < ne02; i02++) { + id += ne00 * ir0; + for (int i01 = ir0; i01 < ir1; i01++) { + for (int i00 = 0; i00 < ne00; i00++) { + const ggml_bf16_t * src0_ptr = (ggml_bf16_t *) ((char *) src0->data + i00*nb00 + i01*nb01 + i02*nb02 + i03*nb03); + + dst_ptr[id] = GGML_BF16_TO_FP32(*src0_ptr); + id++; + } + } + id += ne00 * (ne01 - ir1); + } + } + } else if (dst->type == GGML_TYPE_BF16) { + size_t id = 0; + ggml_bf16_t * dst_ptr = (ggml_bf16_t *) dst->data; + + for (int i03 = 0; i03 < ne03; i03++) { + for (int i02 = 0; i02 < ne02; i02++) { + id += ne00 * ir0; + for (int i01 = ir0; i01 < ir1; i01++) { + for (int i00 = 0; i00 < ne00; i00++) { + const ggml_bf16_t * src0_ptr = (ggml_bf16_t *) ((char *) src0->data + i00*nb00 + i01*nb01 + i02*nb02 + i03*nb03); + + dst_ptr[id] = *src0_ptr; + id++; + } + } + id += ne00 * (ne01 - ir1); + } + } + } else if (dst->type == GGML_TYPE_F16) { + size_t id = 0; + ggml_fp16_t * dst_ptr = (ggml_fp16_t *) dst->data; + + for (int i03 = 0; i03 < ne03; i03++) { + for (int i02 = 0; i02 < ne02; i02++) { + id += ne00 * ir0; + for (int i01 = ir0; i01 < ir1; i01++) { + for (int i00 = 0; i00 < ne00; i00++) { + const ggml_bf16_t * src0_ptr = (ggml_bf16_t *) ((char *) src0->data + i00*nb00 + i01*nb01 + i02*nb02 + i03*nb03); + + dst_ptr[id] = GGML_FP32_TO_FP16(GGML_BF16_TO_FP32(*src0_ptr)); + id++; + } + } + id += ne00 * (ne01 - ir1); + } + } + } else { + GGML_ABORT("fatal error"); // TODO: implement + } + } + return; + } + + // dst counters + int64_t i10 = 0; + int64_t i11 = 0; + int64_t i12 = 0; + int64_t i13 = 0; + + if (dst->type == GGML_TYPE_BF16) { + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + i10 += ne00 * ir0; + while (i10 >= ne0) { + i10 -= ne0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + for (int64_t i01 = ir0; i01 < ir1; i01++) { + for (int64_t i00 = 0; i00 < ne00; i00++) { + const char * src0_ptr = ((char *) src0->data + i00*nb00 + i01*nb01 + i02*nb02 + i03*nb03); + char * dst_ptr = ((char *) dst->data + i10*nb0 + i11*nb1 + i12*nb2 + i13*nb3); + + memcpy(dst_ptr, src0_ptr, sizeof(ggml_bf16_t)); + + if (++i10 == ne00) { + i10 = 0; + if (++i11 == ne01) { + i11 = 0; + if (++i12 == ne02) { + i12 = 0; + if (++i13 == ne03) { + i13 = 0; + } + } + } + } + } + } + i10 += ne00 * (ne01 - ir1); + while (i10 >= ne0) { + i10 -= ne0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + } + } + } else if (dst->type == GGML_TYPE_F16) { + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + i10 += ne00 * ir0; + while (i10 >= ne0) { + i10 -= ne0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + for (int64_t i01 = ir0; i01 < ir1; i01++) { + for (int64_t i00 = 0; i00 < ne00; i00++) { + const char * src0_ptr = ((char *) src0->data + i00*nb00 + i01*nb01 + i02*nb02 + i03*nb03); + char * dst_ptr = ((char *) dst->data + i10*nb0 + i11*nb1 + i12*nb2 + i13*nb3); + + *(ggml_fp16_t *) dst_ptr = GGML_FP32_TO_FP16(GGML_BF16_TO_FP32(*(const ggml_bf16_t *) src0_ptr)); + + if (++i10 == ne0) { + i10 = 0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + } + } + i10 += ne00 * (ne01 - ir1); + while (i10 >= ne0) { + i10 -= ne0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + } + } + } else if (dst->type == GGML_TYPE_F32) { + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + i10 += ne00 * ir0; + while (i10 >= ne0) { + i10 -= ne0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + for (int64_t i01 = ir0; i01 < ir1; i01++) { + for (int64_t i00 = 0; i00 < ne00; i00++) { + const char * src0_ptr = ((char *) src0->data + i00*nb00 + i01*nb01 + i02*nb02 + i03*nb03); + char * dst_ptr = ((char *) dst->data + i10*nb0 + i11*nb1 + i12*nb2 + i13*nb3); + + *(float *) dst_ptr = GGML_BF16_TO_FP32(*(const ggml_bf16_t *) src0_ptr); + + if (++i10 == ne0) { + i10 = 0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + } + } + i10 += ne00 * (ne01 - ir1); + while (i10 >= ne0) { + i10 -= ne0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + } + } + } else { + GGML_ABORT("fatal error"); // TODO: implement + } +} + +static void ggml_compute_forward_dup_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + GGML_ASSERT(ggml_nelements(dst) == ggml_nelements(src0)); + + GGML_TENSOR_UNARY_OP_LOCALS + + const int ith = params->ith; // thread index + const int nth = params->nth; // number of threads + + // parallelize by rows + const int nr = ne01; + // number of rows per thread + const int dr = (nr + nth - 1) / nth; + // row range for this thread + const int ir0 = dr * ith; + const int ir1 = MIN(ir0 + dr, nr); + + if (src0->type == dst->type && + ne00 == ne0 && + nb00 == ggml_type_size(src0->type) && nb0 == ggml_type_size(dst->type)) { + // copy by rows + const size_t rs = ne00*nb00; + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + for (int64_t i01 = ir0; i01 < ir1; i01++) { + memcpy( + ((char *) dst->data + i01*nb1 + i02*nb2 + i03*nb3), + ((char *) src0->data + i01*nb01 + i02*nb02 + i03*nb03), + rs); + } + } + } + return; + } + + if (ggml_is_contiguous(dst)) { + // TODO: simplify + if (nb00 == sizeof(float)) { + if (dst->type == GGML_TYPE_F32) { + size_t id = 0; + const size_t rs = ne00 * nb00; + char * dst_ptr = (char *) dst->data; + + for (int i03 = 0; i03 < ne03; i03++) { + for (int i02 = 0; i02 < ne02; i02++) { + id += rs * ir0; + for (int i01 = ir0; i01 < ir1; i01++) { + const char * src0_ptr = (char *) src0->data + i01*nb01 + i02*nb02 + i03*nb03; + memcpy(dst_ptr + id, src0_ptr, rs); + id += rs; + } + id += rs * (ne01 - ir1); + } + } + } else if (type_traits[dst->type].from_float) { + ggml_from_float_t const quantize_row_q = type_traits[dst->type].from_float; + + size_t id = 0; + size_t rs = nb0 * (ne00 / ggml_blck_size(dst->type)); + char * dst_ptr = (char *) dst->data; + + for (int i03 = 0; i03 < ne03; i03++) { + for (int i02 = 0; i02 < ne02; i02++) { + id += rs * ir0; + for (int i01 = ir0; i01 < ir1; i01++) { + const float * src0_ptr = (float *) ((char *) src0->data + i01*nb01 + i02*nb02 + i03*nb03); + quantize_row_q(src0_ptr, dst_ptr + id, ne00); + id += rs; + } + id += rs * (ne01 - ir1); + } + } + } else { + GGML_ABORT("fatal error"); // TODO: implement + } + } else { + //printf("%s: this is not optimal - fix me\n", __func__); + + if (dst->type == GGML_TYPE_F32) { + size_t id = 0; + float * dst_ptr = (float *) dst->data; + + for (int i03 = 0; i03 < ne03; i03++) { + for (int i02 = 0; i02 < ne02; i02++) { + id += ne00 * ir0; + for (int i01 = ir0; i01 < ir1; i01++) { + for (int i00 = 0; i00 < ne00; i00++) { + const float * src0_ptr = (float *) ((char *) src0->data + i00*nb00 + i01*nb01 + i02*nb02 + i03*nb03); + + dst_ptr[id] = *src0_ptr; + id++; + } + } + id += ne00 * (ne01 - ir1); + } + } + } else if (dst->type == GGML_TYPE_F16) { + size_t id = 0; + ggml_fp16_t * dst_ptr = (ggml_fp16_t *) dst->data; + + for (int i03 = 0; i03 < ne03; i03++) { + for (int i02 = 0; i02 < ne02; i02++) { + id += ne00 * ir0; + for (int i01 = ir0; i01 < ir1; i01++) { + for (int i00 = 0; i00 < ne00; i00++) { + const float * src0_ptr = (float *) ((char *) src0->data + i00*nb00 + i01*nb01 + i02*nb02 + i03*nb03); + + dst_ptr[id] = GGML_FP32_TO_FP16(*src0_ptr); + id++; + } + } + id += ne00 * (ne01 - ir1); + } + } + } else if (dst->type == GGML_TYPE_BF16) { + size_t id = 0; + ggml_bf16_t * dst_ptr = (ggml_bf16_t *) dst->data; + + for (int i03 = 0; i03 < ne03; i03++) { + for (int i02 = 0; i02 < ne02; i02++) { + id += ne00 * ir0; + for (int i01 = ir0; i01 < ir1; i01++) { + for (int i00 = 0; i00 < ne00; i00++) { + const float * src0_ptr = (float *) ((char *) src0->data + i00*nb00 + i01*nb01 + i02*nb02 + i03*nb03); + + dst_ptr[id] = GGML_FP32_TO_BF16(*src0_ptr); + id++; + } + } + id += ne00 * (ne01 - ir1); + } + } + } else { + GGML_ABORT("fatal error"); // TODO: implement + } + } + + return; + } + + // dst counters + + int64_t i10 = 0; + int64_t i11 = 0; + int64_t i12 = 0; + int64_t i13 = 0; + + if (dst->type == GGML_TYPE_F32) { + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + i10 += ne00 * ir0; + while (i10 >= ne0) { + i10 -= ne0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + for (int64_t i01 = ir0; i01 < ir1; i01++) { + for (int64_t i00 = 0; i00 < ne00; i00++) { + const char * src0_ptr = ((char *) src0->data + i00*nb00 + i01*nb01 + i02*nb02 + i03*nb03); + char * dst_ptr = ((char *) dst->data + i10*nb0 + i11*nb1 + i12*nb2 + i13*nb3); + + memcpy(dst_ptr, src0_ptr, sizeof(float)); + + if (++i10 == ne0) { + i10 = 0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + } + } + i10 += ne00 * (ne01 - ir1); + while (i10 >= ne0) { + i10 -= ne0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + } + } + } else if (dst->type == GGML_TYPE_F16) { + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + i10 += ne00 * ir0; + while (i10 >= ne0) { + i10 -= ne0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + for (int64_t i01 = ir0; i01 < ir1; i01++) { + for (int64_t i00 = 0; i00 < ne00; i00++) { + const char * src0_ptr = ((char *) src0->data + i00*nb00 + i01*nb01 + i02*nb02 + i03*nb03); + char * dst_ptr = ((char *) dst->data + i10*nb0 + i11*nb1 + i12*nb2 + i13*nb3); + + *(ggml_fp16_t *) dst_ptr = GGML_FP32_TO_FP16(*(const float *) src0_ptr); + + if (++i10 == ne0) { + i10 = 0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + } + } + i10 += ne00 * (ne01 - ir1); + while (i10 >= ne0) { + i10 -= ne0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + } + } + } else if (dst->type == GGML_TYPE_BF16) { + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + i10 += ne00 * ir0; + while (i10 >= ne0) { + i10 -= ne0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + for (int64_t i01 = ir0; i01 < ir1; i01++) { + for (int64_t i00 = 0; i00 < ne00; i00++) { + const char * src0_ptr = ((char *) src0->data + i00*nb00 + i01*nb01 + i02*nb02 + i03*nb03); + char * dst_ptr = ((char *) dst->data + i10*nb0 + i11*nb1 + i12*nb2 + i13*nb3); + + *(ggml_bf16_t *) dst_ptr = GGML_FP32_TO_BF16(*(const float *) src0_ptr); + + if (++i10 == ne0) { + i10 = 0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + } + } + i10 += ne00 * (ne01 - ir1); + while (i10 >= ne0) { + i10 -= ne0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + } + } + } else { + GGML_ABORT("fatal error"); // TODO: implement + } +} + +// A simplified version of ggml_compute_forward_dup that doesn't do float upcasting, and just plain old memcpy. +static void ggml_compute_forward_dup_bytes( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + GGML_ASSERT(ggml_nelements(dst) == ggml_nelements(src0)); + GGML_ASSERT(src0->type == dst->type); + + GGML_TENSOR_UNARY_OP_LOCALS; + + if (ggml_is_contiguous(src0) && ggml_is_contiguous(dst)) { + ggml_compute_forward_dup_same_cont(params, dst); + return; + } + + const size_t type_size = ggml_type_size(src0->type); + const int ith = params->ith; // thread index + const int nth = params->nth; // number of threads + + + // parallelize by rows + const int nr = ne01; + // number of rows per thread + const int dr = (nr + nth - 1) / nth; + // row range for this thread + const int ir0 = dr * ith; + const int ir1 = MIN(ir0 + dr, nr); + + if (src0->type == dst->type && + ne00 == ne0 && + nb00 == type_size && nb0 == type_size) { + // copy by rows + const size_t rs = ne00 * type_size; + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + for (int64_t i01 = ir0; i01 < ir1; i01++) { + memcpy( + ((char *) dst->data + i01*nb1 + i02*nb2 + i03*nb3), + ((char *) src0->data + i01*nb01 + i02*nb02 + i03*nb03), + rs); + } + } + } + return; + } + + if (ggml_is_contiguous(dst)) { + size_t id = 0; + char * dst_ptr = (char *) dst->data; + const size_t rs = ne00 * type_size; + + if (nb00 == type_size) { + // src0 is contigous on first dimension, copy by rows + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + id += rs * ir0; + for (int64_t i01 = ir0; i01 < ir1; i01++) { + const char * src0_ptr = (char *) src0->data + i01*nb01 + i02*nb02 + i03*nb03; + memcpy(dst_ptr + id, src0_ptr, rs); + id += rs; + } + id += rs * (ne01 - ir1); + } + } + } else { + //printf("%s: this is not optimal - fix me\n", __func__); + + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + id += rs * ir0; + for (int64_t i01 = ir0; i01 < ir1; i01++) { + for (int64_t i00 = 0; i00 < ne00; i00++) { + const char * src0_ptr = (char *) src0->data + i00*nb00 + i01*nb01 + i02*nb02 + i03*nb03; + memcpy(dst_ptr + id, src0_ptr, type_size); + + id += type_size; + } + } + id += rs * (ne01 - ir1); + } + } + } + + return; + } + + // dst counters + + int64_t i10 = 0; + int64_t i11 = 0; + int64_t i12 = 0; + int64_t i13 = 0; + + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + i10 += ne00 * ir0; + while (i10 >= ne0) { + i10 -= ne0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + for (int64_t i01 = ir0; i01 < ir1; i01++) { + for (int64_t i00 = 0; i00 < ne00; i00++) { + const char * src0_ptr = ((char *) src0->data + i00*nb00 + i01*nb01 + i02*nb02 + i03*nb03); + char * dst_ptr = ((char *) dst->data + i10*nb0 + i11*nb1 + i12*nb2 + i13*nb3); + + memcpy(dst_ptr, src0_ptr, type_size); + + if (++i10 == ne0) { + i10 = 0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + } + } + i10 += ne00 * (ne01 - ir1); + while (i10 >= ne0) { + i10 -= ne0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + } + } +} + +static void ggml_compute_forward_dup( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + if (src0->type == dst->type) { + ggml_compute_forward_dup_bytes(params, dst); + return; + } + + switch (src0->type) { + case GGML_TYPE_F16: + { + ggml_compute_forward_dup_f16(params, dst); + } break; + case GGML_TYPE_BF16: + { + ggml_compute_forward_dup_bf16(params, dst); + } break; + case GGML_TYPE_F32: + { + ggml_compute_forward_dup_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_add + +static void ggml_compute_forward_add_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_ASSERT(ggml_can_repeat(src1, src0) && ggml_are_same_shape(src0, dst)); + + const int ith = params->ith; + const int nth = params->nth; + + const int nr = ggml_nrows(src0); + + GGML_TENSOR_BINARY_OP_LOCALS + + GGML_ASSERT( nb0 == sizeof(float)); + GGML_ASSERT(nb00 == sizeof(float)); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + if (nb10 == sizeof(float)) { + for (int ir = ir0; ir < ir1; ++ir) { + // src1 is broadcastable across src0 and dst in i1, i2, i3 + const int64_t i03 = ir/(ne02*ne01); + const int64_t i02 = (ir - i03*ne02*ne01)/ne01; + const int64_t i01 = (ir - i03*ne02*ne01 - i02*ne01); + + const int64_t i13 = i03 % ne13; + const int64_t i12 = i02 % ne12; + const int64_t i11 = i01 % ne11; + const int64_t nr0 = ne00 / ne10; + + float * dst_ptr = (float *) ((char *) dst->data + i03*nb3 + i02*nb2 + i01*nb1 ); + float * src0_ptr = (float *) ((char *) src0->data + i03*nb03 + i02*nb02 + i01*nb01); + float * src1_ptr = (float *) ((char *) src1->data + i13*nb13 + i12*nb12 + i11*nb11); + + for (int64_t r = 0; r < nr0; ++r) { +#ifdef GGML_USE_ACCELERATE + vDSP_vadd(src0_ptr + r*ne10, 1, src1_ptr, 1, dst_ptr + r*ne10, 1, ne10); +#else + ggml_vec_add_f32(ne10, dst_ptr + r*ne10, src0_ptr + r*ne10, src1_ptr); +#endif + } + } + } else { + // src1 is not contiguous + for (int ir = ir0; ir < ir1; ++ir) { + // src1 is broadcastable across src0 and dst in i1, i2, i3 + const int64_t i03 = ir/(ne02*ne01); + const int64_t i02 = (ir - i03*ne02*ne01)/ne01; + const int64_t i01 = (ir - i03*ne02*ne01 - i02*ne01); + + const int64_t i13 = i03 % ne13; + const int64_t i12 = i02 % ne12; + const int64_t i11 = i01 % ne11; + + float * dst_ptr = (float *) ((char *) dst->data + i03*nb3 + i02*nb2 + i01*nb1 ); + float * src0_ptr = (float *) ((char *) src0->data + i03*nb03 + i02*nb02 + i01*nb01); + + for (int64_t i0 = 0; i0 < ne0; ++i0) { + const int64_t i10 = i0 % ne10; + float * src1_ptr = (float *) ((char *) src1->data + i13*nb13 + i12*nb12 + i11*nb11 + i10*nb10); + + dst_ptr[i0] = src0_ptr[i0] + *src1_ptr; + } + } + } +} + +static void ggml_compute_forward_add_f16_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_ASSERT(ggml_are_same_shape(src0, src1) && ggml_are_same_shape(src0, dst)); + + const int ith = params->ith; + const int nth = params->nth; + + const int nr = ggml_nrows(src0); + + GGML_TENSOR_BINARY_OP_LOCALS + + GGML_ASSERT(src0->type == GGML_TYPE_F16); + GGML_ASSERT(src1->type == GGML_TYPE_F32); + + if (dst->type == GGML_TYPE_F32) { + GGML_ASSERT( nb0 == sizeof(float)); + } + else { + GGML_ASSERT(dst->type == GGML_TYPE_F16); + GGML_ASSERT( nb0 == sizeof(ggml_fp16_t)); + } + + GGML_ASSERT(nb00 == sizeof(ggml_fp16_t)); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + if (nb10 == sizeof(float)) { + if (dst->type == GGML_TYPE_F16) { + for (int ir = ir0; ir < ir1; ++ir) { + // src0, src1 and dst are same shape => same indices + const int i3 = ir/(ne2*ne1); + const int i2 = (ir - i3*ne2*ne1)/ne1; + const int i1 = (ir - i3*ne2*ne1 - i2*ne1); + + ggml_fp16_t * dst_ptr = (ggml_fp16_t *) ((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1); + ggml_fp16_t * src0_ptr = (ggml_fp16_t *) ((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01); + float * src1_ptr = (float *) ((char *) src1->data + i3*nb13 + i2*nb12 + i1*nb11); + + for (int i = 0; i < ne0; i++) { + dst_ptr[i] = GGML_FP32_TO_FP16(GGML_FP16_TO_FP32(src0_ptr[i]) + src1_ptr[i]); + } + } + } else { + for (int ir = ir0; ir < ir1; ++ir) { + // src0, src1 and dst are same shape => same indices + const int i3 = ir/(ne2*ne1); + const int i2 = (ir - i3*ne2*ne1)/ne1; + const int i1 = (ir - i3*ne2*ne1 - i2*ne1); + + float * dst_ptr = (float *) ((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1); + ggml_fp16_t * src0_ptr = (ggml_fp16_t *) ((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01); + float * src1_ptr = (float *) ((char *) src1->data + i3*nb13 + i2*nb12 + i1*nb11); + + for (int i = 0; i < ne0; i++) { + dst_ptr[i] = GGML_FP16_TO_FP32(src0_ptr[i]) + src1_ptr[i]; + } + } + } + } + else { + // src1 is not contiguous + GGML_ABORT("fatal error"); + } +} + +static void ggml_compute_forward_add_bf16_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_ASSERT(ggml_are_same_shape(src0, src1) && ggml_are_same_shape(src0, dst)); + + const int ith = params->ith; + const int nth = params->nth; + + const int nr = ggml_nrows(src0); + + GGML_TENSOR_BINARY_OP_LOCALS + + GGML_ASSERT(src0->type == GGML_TYPE_BF16); + GGML_ASSERT(src1->type == GGML_TYPE_F32); + + if (dst->type == GGML_TYPE_F32) { + GGML_ASSERT( nb0 == sizeof(float)); + } + else { + GGML_ASSERT(dst->type == GGML_TYPE_BF16); + GGML_ASSERT( nb0 == sizeof(ggml_bf16_t)); + } + + GGML_ASSERT(nb00 == sizeof(ggml_bf16_t)); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + if (nb10 == sizeof(float)) { + if (dst->type == GGML_TYPE_BF16) { + for (int ir = ir0; ir < ir1; ++ir) { + // src0, src1 and dst are same shape => same indices + const int i3 = ir/(ne2*ne1); + const int i2 = (ir - i3*ne2*ne1)/ne1; + const int i1 = (ir - i3*ne2*ne1 - i2*ne1); + + ggml_bf16_t * dst_ptr = (ggml_bf16_t *) ((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1); + ggml_bf16_t * src0_ptr = (ggml_bf16_t *) ((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01); + float * src1_ptr = (float *) ((char *) src1->data + i3*nb13 + i2*nb12 + i1*nb11); + + for (int i = 0; i < ne0; i++) { + dst_ptr[i] = GGML_FP32_TO_BF16(GGML_BF16_TO_FP32(src0_ptr[i]) + src1_ptr[i]); + } + } + } else { + for (int ir = ir0; ir < ir1; ++ir) { + // src0, src1 and dst are same shape => same indices + const int i3 = ir/(ne2*ne1); + const int i2 = (ir - i3*ne2*ne1)/ne1; + const int i1 = (ir - i3*ne2*ne1 - i2*ne1); + + float * dst_ptr = (float *) ((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1); + ggml_bf16_t * src0_ptr = (ggml_bf16_t *) ((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01); + float * src1_ptr = (float *) ((char *) src1->data + i3*nb13 + i2*nb12 + i1*nb11); + + for (int i = 0; i < ne0; i++) { + dst_ptr[i] = GGML_BF16_TO_FP32(src0_ptr[i]) + src1_ptr[i]; + } + } + } + } + else { + // src1 is not contiguous + GGML_ABORT("fatal error"); + } +} + +static void ggml_compute_forward_add_f16_f16( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_ASSERT(ggml_are_same_shape(src0, src1) && ggml_are_same_shape(src0, dst)); + + const int ith = params->ith; + const int nth = params->nth; + + const int nr = ggml_nrows(src0); + + GGML_TENSOR_BINARY_OP_LOCALS + + GGML_ASSERT(src0->type == GGML_TYPE_F16); + GGML_ASSERT(src1->type == GGML_TYPE_F16); + GGML_ASSERT(dst->type == GGML_TYPE_F16); + + GGML_ASSERT( nb0 == sizeof(ggml_fp16_t)); + GGML_ASSERT(nb00 == sizeof(ggml_fp16_t)); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + if (nb10 == sizeof(ggml_fp16_t)) { + for (int ir = ir0; ir < ir1; ++ir) { + // src0, src1 and dst are same shape => same indices + const int i3 = ir/(ne2*ne1); + const int i2 = (ir - i3*ne2*ne1)/ne1; + const int i1 = (ir - i3*ne2*ne1 - i2*ne1); + + ggml_fp16_t * dst_ptr = (ggml_fp16_t *) ((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1); + ggml_fp16_t * src0_ptr = (ggml_fp16_t *) ((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01); + ggml_fp16_t * src1_ptr = (ggml_fp16_t *) ((char *) src1->data + i3*nb13 + i2*nb12 + i1*nb11); + + for (int i = 0; i < ne0; i++) { + dst_ptr[i] = GGML_FP32_TO_FP16(GGML_FP16_TO_FP32(src0_ptr[i]) + GGML_FP16_TO_FP32(src1_ptr[i])); + } + } + } + else { + // src1 is not contiguous + GGML_ABORT("fatal error"); + } +} + +static void ggml_compute_forward_add_bf16_bf16( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_ASSERT(ggml_are_same_shape(src0, src1) && ggml_are_same_shape(src0, dst)); + + const int ith = params->ith; + const int nth = params->nth; + + const int nr = ggml_nrows(src0); + + GGML_TENSOR_BINARY_OP_LOCALS + + GGML_ASSERT(src0->type == GGML_TYPE_BF16); + GGML_ASSERT(src1->type == GGML_TYPE_BF16); + GGML_ASSERT(dst->type == GGML_TYPE_BF16); + + GGML_ASSERT( nb0 == sizeof(ggml_bf16_t)); + GGML_ASSERT(nb00 == sizeof(ggml_bf16_t)); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + if (nb10 == sizeof(ggml_bf16_t)) { + for (int ir = ir0; ir < ir1; ++ir) { + // src0, src1 and dst are same shape => same indices + const int i3 = ir/(ne2*ne1); + const int i2 = (ir - i3*ne2*ne1)/ne1; + const int i1 = (ir - i3*ne2*ne1 - i2*ne1); + + ggml_bf16_t * dst_ptr = (ggml_bf16_t *) ((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1); + ggml_bf16_t * src0_ptr = (ggml_bf16_t *) ((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01); + ggml_bf16_t * src1_ptr = (ggml_bf16_t *) ((char *) src1->data + i3*nb13 + i2*nb12 + i1*nb11); + + for (int i = 0; i < ne0; i++) { + dst_ptr[i] = GGML_FP32_TO_BF16(GGML_BF16_TO_FP32(src0_ptr[i]) + GGML_BF16_TO_FP32(src1_ptr[i])); + } + } + } + else { + // src1 is not contiguous + GGML_ABORT("fatal error"); + } +} + +static void ggml_compute_forward_add_q_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_ASSERT(ggml_are_same_shape(src0, src1) && ggml_are_same_shape(src0, dst)); + + const int nr = ggml_nrows(src0); + + GGML_TENSOR_BINARY_OP_LOCALS + + const int ith = params->ith; + const int nth = params->nth; + + const enum ggml_type type = src0->type; + const enum ggml_type dtype = dst->type; + ggml_to_float_t const dequantize_row_q = type_traits[type].to_float; + ggml_from_float_t const quantize_row_q = type_traits[dtype].from_float; + + // we don't support permuted src0 or src1 + GGML_ASSERT(nb00 == ggml_type_size(type)); + GGML_ASSERT(nb10 == sizeof(float)); + + // dst cannot be transposed or permuted + GGML_ASSERT(nb0 <= nb1); + GGML_ASSERT(nb1 <= nb2); + GGML_ASSERT(nb2 <= nb3); + + GGML_ASSERT(ggml_is_quantized(src0->type)); + GGML_ASSERT(src1->type == GGML_TYPE_F32); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + float * wdata = (float *) params->wdata + (ne00 + CACHE_LINE_SIZE_F32) * ith; + + for (int ir = ir0; ir < ir1; ++ir) { + // src0 indices + const int i03 = ir/(ne02*ne01); + const int i02 = (ir - i03*ne02*ne01)/ne01; + const int i01 = (ir - i03*ne02*ne01 - i02*ne01); + + // src1 and dst are same shape as src0 => same indices + const int i13 = i03; + const int i12 = i02; + const int i11 = i01; + + const int i3 = i03; + const int i2 = i02; + const int i1 = i01; + + void * src0_row = (void *) ((char *) src0->data + (i01*nb01 + i02*nb02 + i03*nb03)); + float * src1_row = (float *)((char *) src1->data + (i11*nb11 + i12*nb12 + i13*nb13)); + void * dst_row = (void *) ((char *) dst->data + ( i1*nb1 + i2*nb2 + i3*nb3)); + + assert(ne00 % 32 == 0); + + // unquantize row from src0 to temp buffer + dequantize_row_q(src0_row, wdata, ne00); + // add src1 + ggml_vec_acc_f32(ne00, wdata, src1_row); + // quantize row to dst + if (quantize_row_q != NULL) { + quantize_row_q(wdata, dst_row, ne00); + } else { + memcpy(dst_row, wdata, ne0*nb0); + } + } +} + +static void ggml_compute_forward_add( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + if (src1->type == GGML_TYPE_F32) { + ggml_compute_forward_add_f32(params, dst); + } + else { + GGML_ABORT("fatal error"); + } + } break; + case GGML_TYPE_F16: + { + if (src1->type == GGML_TYPE_F16) { + ggml_compute_forward_add_f16_f16(params, dst); + } + else if (src1->type == GGML_TYPE_F32) { + ggml_compute_forward_add_f16_f32(params, dst); + } + else { + GGML_ABORT("fatal error"); + } + } break; + case GGML_TYPE_BF16: + { + if (src1->type == GGML_TYPE_BF16) { + ggml_compute_forward_add_bf16_bf16(params, dst); + } + else if (src1->type == GGML_TYPE_F32) { + ggml_compute_forward_add_bf16_f32(params, dst); + } + else { + GGML_ABORT("fatal error"); + } + } break; + case GGML_TYPE_Q4_0: + case GGML_TYPE_Q4_1: + case GGML_TYPE_Q5_0: + case GGML_TYPE_Q5_1: + case GGML_TYPE_Q8_0: + case GGML_TYPE_Q2_K: + case GGML_TYPE_Q3_K: + case GGML_TYPE_Q4_K: + case GGML_TYPE_Q5_K: + case GGML_TYPE_Q6_K: + case GGML_TYPE_TQ1_0: + case GGML_TYPE_TQ2_0: + case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: + case GGML_TYPE_IQ3_XXS: + case GGML_TYPE_IQ1_S: + case GGML_TYPE_IQ1_M: + case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ4_XS: + case GGML_TYPE_IQ3_S: + case GGML_TYPE_IQ2_S: + case GGML_TYPE_Q4_0_4_4: + case GGML_TYPE_Q4_0_4_8: + case GGML_TYPE_Q4_0_8_8: + { + ggml_compute_forward_add_q_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_add1 + +static void ggml_compute_forward_add1_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_ASSERT(ggml_are_same_shape(src0, dst)); + GGML_ASSERT(ggml_is_scalar(src1)); + + const int ith = params->ith; + const int nth = params->nth; + + const int nr = ggml_nrows(src0); + + GGML_TENSOR_UNARY_OP_LOCALS + + GGML_ASSERT( nb0 == sizeof(float)); + GGML_ASSERT(nb00 == sizeof(float)); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + for (int ir = ir0; ir < ir1; ++ir) { + // src0 and dst are same shape => same indices + const int i3 = ir/(ne2*ne1); + const int i2 = (ir - i3*ne2*ne1)/ne1; + const int i1 = (ir - i3*ne2*ne1 - i2*ne1); + +#ifdef GGML_USE_ACCELERATE + UNUSED(ggml_vec_add1_f32); + + vDSP_vadd( + (float *) ((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01), 1, + (float *) ((char *) src1->data), 0, + (float *) ((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 ), 1, + ne0); +#else + ggml_vec_add1_f32(ne0, + (float *) ((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 ), + (float *) ((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01), + *(float *) src1->data); +#endif + } +} + +static void ggml_compute_forward_add1_f16_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_ASSERT(ggml_are_same_shape(src0, dst)); + GGML_ASSERT(ggml_is_scalar(src1)); + + // scalar to add + const float v = *(float *) src1->data; + + const int ith = params->ith; + const int nth = params->nth; + + const int nr = ggml_nrows(src0); + + GGML_TENSOR_UNARY_OP_LOCALS + + GGML_ASSERT(src0->type == GGML_TYPE_F16); + GGML_ASSERT(src1->type == GGML_TYPE_F32); + GGML_ASSERT(dst->type == GGML_TYPE_F16); + + GGML_ASSERT( nb0 == sizeof(ggml_fp16_t)); + GGML_ASSERT(nb00 == sizeof(ggml_fp16_t)); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + for (int ir = ir0; ir < ir1; ++ir) { + // src0 and dst are same shape => same indices + const int i3 = ir/(ne2*ne1); + const int i2 = (ir - i3*ne2*ne1)/ne1; + const int i1 = (ir - i3*ne2*ne1 - i2*ne1); + + ggml_fp16_t * dst_ptr = (ggml_fp16_t *) ((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 ); + ggml_fp16_t * src0_ptr = (ggml_fp16_t *) ((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01); + for (int i = 0; i < ne0; i++) { + dst_ptr[i] = GGML_FP32_TO_FP16(GGML_FP16_TO_FP32(src0_ptr[i]) + v); + } + } +} + +static void ggml_compute_forward_add1_f16_f16( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_ASSERT(ggml_are_same_shape(src0, dst)); + GGML_ASSERT(ggml_is_scalar(src1)); + + // scalar to add + const float v = GGML_FP16_TO_FP32(*(ggml_fp16_t *) src1->data); + + const int ith = params->ith; + const int nth = params->nth; + + const int nr = ggml_nrows(src0); + + GGML_TENSOR_UNARY_OP_LOCALS + + GGML_ASSERT(src0->type == GGML_TYPE_F16); + GGML_ASSERT(src1->type == GGML_TYPE_F16); + GGML_ASSERT(dst->type == GGML_TYPE_F16); + + GGML_ASSERT( nb0 == sizeof(ggml_fp16_t)); + GGML_ASSERT(nb00 == sizeof(ggml_fp16_t)); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + for (int ir = ir0; ir < ir1; ++ir) { + // src0 and dst are same shape => same indices + const int i3 = ir/(ne2*ne1); + const int i2 = (ir - i3*ne2*ne1)/ne1; + const int i1 = (ir - i3*ne2*ne1 - i2*ne1); + + ggml_fp16_t * dst_ptr = (ggml_fp16_t *) ((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 ); + ggml_fp16_t * src0_ptr = (ggml_fp16_t *) ((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01); + for (int i = 0; i < ne0; i++) { + dst_ptr[i] = GGML_FP32_TO_FP16(GGML_FP16_TO_FP32(src0_ptr[i]) + v); + } + } +} + +static void ggml_compute_forward_add1_q_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_ASSERT(ggml_are_same_shape(src0, dst)); + GGML_ASSERT(ggml_is_scalar(src1)); + + // scalar to add + const float v = *(float *) src1->data; + + const int ith = params->ith; + const int nth = params->nth; + + const int nr = ggml_nrows(src0); + + GGML_TENSOR_UNARY_OP_LOCALS + + const enum ggml_type type = src0->type; + ggml_to_float_t const dequantize_row_q = type_traits[type].to_float; + ggml_from_float_t const quantize_row_q = type_traits[type].from_float; + + // we don't support permuted src0 + GGML_ASSERT(nb00 == ggml_type_size(type)); + + // dst cannot be transposed or permuted + GGML_ASSERT(nb0 <= nb1); + GGML_ASSERT(nb1 <= nb2); + GGML_ASSERT(nb2 <= nb3); + + GGML_ASSERT(ggml_is_quantized(src0->type)); + GGML_ASSERT(dst->type == src0->type); + GGML_ASSERT(src1->type == GGML_TYPE_F32); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + float * wdata = (float *) params->wdata + (ne0 + CACHE_LINE_SIZE_F32) * ith; + + for (int ir = ir0; ir < ir1; ++ir) { + // src0 and dst are same shape => same indices + const int i3 = ir/(ne2*ne1); + const int i2 = (ir - i3*ne2*ne1)/ne1; + const int i1 = (ir - i3*ne2*ne1 - i2*ne1); + + void * src0_row = (void *) ((char *) src0->data + (i1*nb01 + i2*nb02 + i3*nb03)); + void * dst_row = (void *) ((char *) dst->data + (i1*nb1 + i2*nb2 + i3*nb0 )); + + assert(ne0 % 32 == 0); + + // unquantize row from src0 to temp buffer + dequantize_row_q(src0_row, wdata, ne0); + // add src1 + ggml_vec_acc1_f32(ne0, wdata, v); + // quantize row to dst + quantize_row_q(wdata, dst_row, ne0); + } +} + +static void ggml_compute_forward_add1_bf16_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_ASSERT(ggml_are_same_shape(src0, dst)); + GGML_ASSERT(ggml_is_scalar(src1)); + + // scalar to add + const float v = *(float *) src1->data; + + const int ith = params->ith; + const int nth = params->nth; + + const int nr = ggml_nrows(src0); + + GGML_TENSOR_UNARY_OP_LOCALS + + GGML_ASSERT(src0->type == GGML_TYPE_BF16); + GGML_ASSERT(src1->type == GGML_TYPE_F32); + GGML_ASSERT(dst->type == GGML_TYPE_BF16); + + GGML_ASSERT( nb0 == sizeof(ggml_bf16_t)); + GGML_ASSERT(nb00 == sizeof(ggml_bf16_t)); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + for (int ir = ir0; ir < ir1; ++ir) { + // src0 and dst are same shape => same indices + const int i3 = ir/(ne2*ne1); + const int i2 = (ir - i3*ne2*ne1)/ne1; + const int i1 = (ir - i3*ne2*ne1 - i2*ne1); + + ggml_bf16_t * dst_ptr = (ggml_bf16_t *) ((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 ); + ggml_bf16_t * src0_ptr = (ggml_bf16_t *) ((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01); + for (int i = 0; i < ne0; i++) { + dst_ptr[i] = GGML_FP32_TO_BF16(GGML_BF16_TO_FP32(src0_ptr[i]) + v); + } + } +} + +static void ggml_compute_forward_add1_bf16_bf16( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_ASSERT(ggml_are_same_shape(src0, dst)); + GGML_ASSERT(ggml_is_scalar(src1)); + + // scalar to add + const float v = GGML_BF16_TO_FP32(*(ggml_bf16_t *) src1->data); + + const int ith = params->ith; + const int nth = params->nth; + + const int nr = ggml_nrows(src0); + + GGML_TENSOR_UNARY_OP_LOCALS + + GGML_ASSERT(src0->type == GGML_TYPE_BF16); + GGML_ASSERT(src1->type == GGML_TYPE_BF16); + GGML_ASSERT(dst->type == GGML_TYPE_BF16); + + GGML_ASSERT( nb0 == sizeof(ggml_bf16_t)); + GGML_ASSERT(nb00 == sizeof(ggml_bf16_t)); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + for (int ir = ir0; ir < ir1; ++ir) { + // src0 and dst are same shape => same indices + const int i3 = ir/(ne2*ne1); + const int i2 = (ir - i3*ne2*ne1)/ne1; + const int i1 = (ir - i3*ne2*ne1 - i2*ne1); + + ggml_bf16_t * dst_ptr = (ggml_bf16_t *) ((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 ); + ggml_bf16_t * src0_ptr = (ggml_bf16_t *) ((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01); + for (int i = 0; i < ne0; i++) { + dst_ptr[i] = GGML_FP32_TO_BF16(GGML_BF16_TO_FP32(src0_ptr[i]) + v); + } + } +} + +static void ggml_compute_forward_add1( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_add1_f32(params, dst); + } break; + case GGML_TYPE_F16: + { + if (src1->type == GGML_TYPE_F16) { + ggml_compute_forward_add1_f16_f16(params, dst); + } + else if (src1->type == GGML_TYPE_F32) { + ggml_compute_forward_add1_f16_f32(params, dst); + } + else { + GGML_ABORT("fatal error"); + } + } break; + case GGML_TYPE_BF16: + { + if (src1->type == GGML_TYPE_BF16) { + ggml_compute_forward_add1_bf16_bf16(params, dst); + } + else if (src1->type == GGML_TYPE_F32) { + ggml_compute_forward_add1_bf16_f32(params, dst); + } + else { + GGML_ABORT("fatal error"); + } + } break; + case GGML_TYPE_Q4_0: + case GGML_TYPE_Q4_1: + case GGML_TYPE_Q5_0: + case GGML_TYPE_Q5_1: + case GGML_TYPE_Q8_0: + case GGML_TYPE_Q8_1: + case GGML_TYPE_Q2_K: + case GGML_TYPE_Q3_K: + case GGML_TYPE_Q4_K: + case GGML_TYPE_Q5_K: + case GGML_TYPE_Q6_K: + case GGML_TYPE_TQ1_0: + case GGML_TYPE_TQ2_0: + case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: + case GGML_TYPE_IQ3_XXS: + case GGML_TYPE_IQ1_S: + case GGML_TYPE_IQ1_M: + case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ4_XS: + case GGML_TYPE_IQ3_S: + case GGML_TYPE_IQ2_S: + case GGML_TYPE_Q4_0_4_4: + case GGML_TYPE_Q4_0_4_8: + case GGML_TYPE_Q4_0_8_8: + { + ggml_compute_forward_add1_q_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_acc + +static void ggml_compute_forward_acc_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_ASSERT(ggml_are_same_shape(src0, dst)); + GGML_ASSERT(ggml_is_contiguous(dst) && ggml_is_contiguous(src0)); + + // view src0 and dst with these strides and data offset inbytes during acc + // nb0 is implicitly element_size because src0 and dst are contiguous + size_t nb1 = ((int32_t *) dst->op_params)[0]; + size_t nb2 = ((int32_t *) dst->op_params)[1]; + size_t nb3 = ((int32_t *) dst->op_params)[2]; + size_t offset = ((int32_t *) dst->op_params)[3]; + bool inplace = (bool) ((int32_t *) dst->op_params)[4]; + + if (!inplace) { + if (params->ith == 0) { + // memcpy needs to be synchronized across threads to avoid race conditions. + // => do it in INIT phase + memcpy( + ((char *) dst->data), + ((char *) src0->data), + ggml_nbytes(dst)); + } + ggml_barrier(params->threadpool); + } + + const int ith = params->ith; + const int nth = params->nth; + + const int nr = ggml_nrows(src1); + const int nc = src1->ne[0]; + + GGML_TENSOR_LOCALS(int64_t, ne1, src1, ne) + GGML_TENSOR_LOCALS(size_t, nb1, src1, nb) + + // src0 and dst as viewed during acc + const size_t nb0 = ggml_element_size(src0); + + const size_t nb00 = nb0; + const size_t nb01 = nb1; + const size_t nb02 = nb2; + const size_t nb03 = nb3; + + GGML_ASSERT(offset + (ne10 == 0 ? 0 : ne10-1)*nb0 + (ne11 == 0 ? 0 : ne11-1)*nb1 + (ne12 == 0 ? 0 : ne12-1)*nb2 + (ne13 == 0 ? 0 : ne13-1)*nb3 < ggml_nbytes(dst)); + GGML_ASSERT(offset + (ne10 == 0 ? 0 : ne10-1)*nb00 + (ne11 == 0 ? 0 : ne11-1)*nb01 + (ne12 == 0 ? 0 : ne12-1)*nb02 + (ne13 == 0 ? 0 : ne13-1)*nb03 < ggml_nbytes(src0)); + + GGML_ASSERT(nb10 == sizeof(float)); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + for (int ir = ir0; ir < ir1; ++ir) { + // src0 and dst are viewed with shape of src1 and offset + // => same indices + const int i3 = ir/(ne12*ne11); + const int i2 = (ir - i3*ne12*ne11)/ne11; + const int i1 = (ir - i3*ne12*ne11 - i2*ne11); + +#ifdef GGML_USE_ACCELERATE + vDSP_vadd( + (float *) ((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + offset), 1, + (float *) ((char *) src1->data + i3*nb13 + i2*nb12 + i1*nb11), 1, + (float *) ((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + offset), 1, nc); +#else + ggml_vec_add_f32(nc, + (float *) ((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + offset), + (float *) ((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + offset), + (float *) ((char *) src1->data + i3*nb13 + i2*nb12 + i1*nb11)); +#endif + } +} + +static void ggml_compute_forward_acc( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_acc_f32(params, dst); + } break; + case GGML_TYPE_F16: + case GGML_TYPE_BF16: + case GGML_TYPE_Q4_0: + case GGML_TYPE_Q4_1: + case GGML_TYPE_Q5_0: + case GGML_TYPE_Q5_1: + case GGML_TYPE_Q8_0: + case GGML_TYPE_Q8_1: + case GGML_TYPE_Q2_K: + case GGML_TYPE_Q3_K: + case GGML_TYPE_Q4_K: + case GGML_TYPE_Q5_K: + case GGML_TYPE_Q6_K: + case GGML_TYPE_TQ1_0: + case GGML_TYPE_TQ2_0: + case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: + case GGML_TYPE_IQ3_XXS: + case GGML_TYPE_IQ1_S: + case GGML_TYPE_IQ1_M: + case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ4_XS: + case GGML_TYPE_IQ3_S: + case GGML_TYPE_IQ2_S: + case GGML_TYPE_Q4_0_4_4: + case GGML_TYPE_Q4_0_4_8: + case GGML_TYPE_Q4_0_8_8: + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_sub + +static void ggml_compute_forward_sub_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + assert(ggml_can_repeat(src1, src0) && ggml_are_same_shape(src0, dst)); + + const int ith = params->ith; + const int nth = params->nth; + + const int nr = ggml_nrows(src0); + + GGML_TENSOR_BINARY_OP_LOCALS + + GGML_ASSERT( nb0 == sizeof(float)); + GGML_ASSERT(nb00 == sizeof(float)); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + if (nb10 == sizeof(float)) { + for (int ir = ir0; ir < ir1; ++ir) { + // src1 is broadcastable across src0 and dst in i1, i2, i3 + const int64_t i03 = ir/(ne02*ne01); + const int64_t i02 = (ir - i03*ne02*ne01)/ne01; + const int64_t i01 = (ir - i03*ne02*ne01 - i02*ne01); + + const int64_t i13 = i03 % ne13; + const int64_t i12 = i02 % ne12; + const int64_t i11 = i01 % ne11; + const int64_t nr0 = ne00 / ne10; + + float * dst_ptr = (float *) ((char *) dst->data + i03*nb3 + i02*nb2 + i01*nb1 ); + float * src0_ptr = (float *) ((char *) src0->data + i03*nb03 + i02*nb02 + i01*nb01); + float * src1_ptr = (float *) ((char *) src1->data + i13*nb13 + i12*nb12 + i11*nb11); + + for (int64_t r = 0; r < nr0; ++r) { +#ifdef GGML_USE_ACCELERATE + vDSP_vsub(src1_ptr, 1, src0_ptr + r*ne10, 1, dst_ptr + r*ne10, 1, ne10); +#else + ggml_vec_sub_f32(ne10, dst_ptr + r*ne10, src0_ptr + r*ne10, src1_ptr); +#endif + } + } + } else { + // src1 is not contiguous + for (int ir = ir0; ir < ir1; ++ir) { + // src1 is broadcastable across src0 and dst in i1, i2, i3 + const int64_t i03 = ir/(ne02*ne01); + const int64_t i02 = (ir - i03*ne02*ne01)/ne01; + const int64_t i01 = (ir - i03*ne02*ne01 - i02*ne01); + + const int64_t i13 = i03 % ne13; + const int64_t i12 = i02 % ne12; + const int64_t i11 = i01 % ne11; + + float * dst_ptr = (float *) ((char *) dst->data + i03*nb3 + i02*nb2 + i01*nb1 ); + float * src0_ptr = (float *) ((char *) src0->data + i03*nb03 + i02*nb02 + i01*nb01); + + for (int64_t i0 = 0; i0 < ne0; ++i0) { + const int64_t i10 = i0 % ne10; + float * src1_ptr = (float *) ((char *) src1->data + i13*nb13 + i12*nb12 + i11*nb11 + i10*nb10); + + dst_ptr[i0] = src0_ptr[i0] - *src1_ptr; + } + } + } +} + +static void ggml_compute_forward_sub( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_sub_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_mul + +static void ggml_compute_forward_mul_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_ASSERT(ggml_can_repeat(src1, src0) && ggml_are_same_shape(src0, dst)); + + const int ith = params->ith; + const int nth = params->nth; + + const int64_t nr = ggml_nrows(src0); + + GGML_TENSOR_BINARY_OP_LOCALS + + GGML_ASSERT( nb0 == sizeof(float)); + GGML_ASSERT(nb00 == sizeof(float)); + + if (nb10 == sizeof(float)) { + for (int64_t ir = ith; ir < nr; ir += nth) { + // src0 and dst are same shape => same indices + const int64_t i03 = ir/(ne02*ne01); + const int64_t i02 = (ir - i03*ne02*ne01)/ne01; + const int64_t i01 = (ir - i03*ne02*ne01 - i02*ne01); + + const int64_t i13 = i03 % ne13; + const int64_t i12 = i02 % ne12; + const int64_t i11 = i01 % ne11; + const int64_t nr0 = ne00 / ne10; + + float * dst_ptr = (float *) ((char *) dst->data + i03*nb3 + i02*nb2 + i01*nb1 ); + float * src0_ptr = (float *) ((char *) src0->data + i03*nb03 + i02*nb02 + i01*nb01); + float * src1_ptr = (float *) ((char *) src1->data + i13*nb13 + i12*nb12 + i11*nb11); + + for (int64_t r = 0 ; r < nr0; ++r) { +#ifdef GGML_USE_ACCELERATE + UNUSED(ggml_vec_mul_f32); + + vDSP_vmul(src0_ptr + r*ne10, 1, src1_ptr, 1, dst_ptr + r*ne10, 1, ne10); +#else + ggml_vec_mul_f32(ne10, dst_ptr + r*ne10, src0_ptr + r*ne10, src1_ptr); +#endif + } + } + } else { + // src1 is not contiguous + for (int64_t ir = ith; ir < nr; ir += nth) { + // src0 and dst are same shape => same indices + // src1 is broadcastable across src0 and dst in i1, i2, i3 + const int64_t i03 = ir/(ne02*ne01); + const int64_t i02 = (ir - i03*ne02*ne01)/ne01; + const int64_t i01 = (ir - i03*ne02*ne01 - i02*ne01); + + const int64_t i13 = i03 % ne13; + const int64_t i12 = i02 % ne12; + const int64_t i11 = i01 % ne11; + + float * dst_ptr = (float *) ((char *) dst->data + i03*nb3 + i02*nb2 + i01*nb1 ); + float * src0_ptr = (float *) ((char *) src0->data + i03*nb03 + i02*nb02 + i01*nb01); + + for (int64_t i0 = 0; i0 < ne00; ++i0) { + const int64_t i10 = i0 % ne10; + float * src1_ptr = (float *) ((char *) src1->data + i13*nb13 + i12*nb12 + i11*nb11 + i10*nb10); + + dst_ptr[i0] = src0_ptr[i0] * (*src1_ptr); + } + } + } +} + +static void ggml_compute_forward_mul( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_ASSERT(src1->type == GGML_TYPE_F32 && "only f32 src1 supported for now"); + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_mul_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_div + +static void ggml_compute_forward_div_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_ASSERT(ggml_can_repeat(src1, src0) && ggml_are_same_shape(src0, dst)); + + const int ith = params->ith; + const int nth = params->nth; + + const int64_t nr = ggml_nrows(src0); + + GGML_TENSOR_BINARY_OP_LOCALS + + GGML_ASSERT( nb0 == sizeof(float)); + GGML_ASSERT(nb00 == sizeof(float)); + + if (nb10 == sizeof(float)) { + for (int64_t ir = ith; ir < nr; ir += nth) { + // src0 and dst are same shape => same indices + const int64_t i03 = ir/(ne02*ne01); + const int64_t i02 = (ir - i03*ne02*ne01)/ne01; + const int64_t i01 = (ir - i03*ne02*ne01 - i02*ne01); + + const int64_t i13 = i03 % ne13; + const int64_t i12 = i02 % ne12; + const int64_t i11 = i01 % ne11; + const int64_t nr0 = ne00 / ne10; + + float * dst_ptr = (float *) ((char *) dst->data + i03*nb3 + i02*nb2 + i01*nb1 ); + float * src0_ptr = (float *) ((char *) src0->data + i03*nb03 + i02*nb02 + i01*nb01); + float * src1_ptr = (float *) ((char *) src1->data + i13*nb13 + i12*nb12 + i11*nb11); + + for (int64_t r = 0; r < nr0; ++r) { +#ifdef GGML_USE_ACCELERATE + UNUSED(ggml_vec_div_f32); + + vDSP_vdiv(src1_ptr, 1, src0_ptr + r*ne10, 1, dst_ptr + r*ne10, 1, ne10); +#else + ggml_vec_div_f32(ne10, dst_ptr + r*ne10, src0_ptr + r*ne10, src1_ptr); +#endif + } + } + } else { + // src1 is not contiguous + for (int64_t ir = ith; ir < nr; ir += nth) { + // src0 and dst are same shape => same indices + // src1 is broadcastable across src0 and dst in i1, i2, i3 + const int64_t i03 = ir/(ne02*ne01); + const int64_t i02 = (ir - i03*ne02*ne01)/ne01; + const int64_t i01 = (ir - i03*ne02*ne01 - i02*ne01); + + const int64_t i13 = i03 % ne13; + const int64_t i12 = i02 % ne12; + const int64_t i11 = i01 % ne11; + + float * dst_ptr = (float *) ((char *) dst->data + i03*nb3 + i02*nb2 + i01*nb1 ); + float * src0_ptr = (float *) ((char *) src0->data + i03*nb03 + i02*nb02 + i01*nb01); + + for (int64_t i0 = 0; i0 < ne00; ++i0) { + const int64_t i10 = i0 % ne10; + float * src1_ptr = (float *) ((char *) src1->data + i13*nb13 + i12*nb12 + i11*nb11 + i10*nb10); + + dst_ptr[i0] = src0_ptr[i0] / (*src1_ptr); + } + } + } +} + +static void ggml_compute_forward_div( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_div_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_sqr + +static void ggml_compute_forward_sqr_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + if (params->ith != 0) { + return; + } + + assert(ggml_are_same_shape(src0, dst)); + + const int n = ggml_nrows(src0); + const int nc = src0->ne[0]; + + assert( dst->nb[0] == sizeof(float)); + assert(src0->nb[0] == sizeof(float)); + + for (int i = 0; i < n; i++) { + ggml_vec_sqr_f32(nc, + (float *) ((char *) dst->data + i*( dst->nb[1])), + (float *) ((char *) src0->data + i*(src0->nb[1]))); + } +} + +static void ggml_compute_forward_sqr( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_sqr_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_sqrt + +static void ggml_compute_forward_sqrt_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + if (params->ith != 0) { + return; + } + + assert(ggml_are_same_shape(src0, dst)); + + const int n = ggml_nrows(src0); + const int nc = src0->ne[0]; + + assert( dst->nb[0] == sizeof(float)); + assert(src0->nb[0] == sizeof(float)); + + for (int i = 0; i < n; i++) { + ggml_vec_sqrt_f32(nc, + (float *) ((char *) dst->data + i*( dst->nb[1])), + (float *) ((char *) src0->data + i*(src0->nb[1]))); + } +} + +static void ggml_compute_forward_sqrt( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_sqrt_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_log + +static void ggml_compute_forward_log_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + if (params->ith != 0) { + return; + } + + GGML_ASSERT(ggml_are_same_shape(src0, dst)); + + const int n = ggml_nrows(src0); + const int nc = src0->ne[0]; + + GGML_ASSERT( dst->nb[0] == sizeof(float)); + GGML_ASSERT(src0->nb[0] == sizeof(float)); + + for (int i = 0; i < n; i++) { + ggml_vec_log_f32(nc, + (float *) ((char *) dst->data + i*( dst->nb[1])), + (float *) ((char *) src0->data + i*(src0->nb[1]))); + } +} + +static void ggml_compute_forward_log( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_log_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_sin + +static void ggml_compute_forward_sin_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + if (params->ith != 0) { + return; + } + + GGML_ASSERT(ggml_are_same_shape(src0, dst)); + + const int n = ggml_nrows(src0); + const int nc = src0->ne[0]; + + GGML_ASSERT( dst->nb[0] == sizeof(float)); + GGML_ASSERT(src0->nb[0] == sizeof(float)); + + for (int i = 0; i < n; i++) { + ggml_vec_sin_f32(nc, + (float *) ((char *) dst->data + i*( dst->nb[1])), + (float *) ((char *) src0->data + i*(src0->nb[1]))); + } +} + +static void ggml_compute_forward_sin( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_sin_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_cos + +static void ggml_compute_forward_cos_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + if (params->ith != 0) { + return; + } + + GGML_ASSERT(ggml_are_same_shape(src0, dst)); + + const int n = ggml_nrows(src0); + const int nc = src0->ne[0]; + + GGML_ASSERT( dst->nb[0] == sizeof(float)); + GGML_ASSERT(src0->nb[0] == sizeof(float)); + + for (int i = 0; i < n; i++) { + ggml_vec_cos_f32(nc, + (float *) ((char *) dst->data + i*( dst->nb[1])), + (float *) ((char *) src0->data + i*(src0->nb[1]))); + } +} + +static void ggml_compute_forward_cos( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_cos_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_sum + +static void ggml_compute_forward_sum_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + if (params->ith != 0) { + return; + } + + assert(ggml_is_scalar(dst)); + assert(src0->nb[0] == sizeof(float)); + + GGML_TENSOR_LOCALS(int64_t, ne0, src0, ne) + GGML_TENSOR_LOCALS(size_t, nb0, src0, nb) + + ggml_float sum = 0; + ggml_float row_sum = 0; + + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + for (int64_t i01 = 0; i01 < ne01; i01++) { + ggml_vec_sum_f32_ggf(ne00, + &row_sum, + (float *) ((char *) src0->data + i01*nb01 + i02*nb02 + i03*nb03)); + sum += row_sum; + } + } + } + ((float *) dst->data)[0] = sum; +} + +static void ggml_compute_forward_sum_f16( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + if (params->ith != 0) { + return; + } + + assert(ggml_is_scalar(dst)); + + assert(src0->nb[0] == sizeof(ggml_fp16_t)); + + GGML_TENSOR_LOCALS(int64_t, ne0, src0, ne) + GGML_TENSOR_LOCALS(size_t, nb0, src0, nb) + + float sum = 0; + float row_sum = 0; + + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + for (int64_t i01 = 0; i01 < ne01; i01++) { + ggml_vec_sum_f16_ggf(ne00, + &row_sum, + (ggml_fp16_t *) ((char *) src0->data + i01 * nb01 + i02 * nb02 + i03 * nb03)); + sum += row_sum; + } + } + } + ((ggml_fp16_t *) dst->data)[0] = GGML_FP32_TO_FP16(sum); +} + +static void ggml_compute_forward_sum_bf16( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + if (params->ith != 0) { + return; + } + + assert(ggml_is_scalar(dst)); + + assert(src0->nb[0] == sizeof(ggml_bf16_t)); + + GGML_TENSOR_LOCALS(int64_t, ne0, src0, ne) + GGML_TENSOR_LOCALS(size_t, nb0, src0, nb) + + float sum = 0; + float row_sum = 0; + + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + for (int64_t i01 = 0; i01 < ne01; i01++) { + ggml_vec_sum_bf16_ggf(ne00, + &row_sum, + (ggml_bf16_t *) ((char *) src0->data + i01 * nb01 + i02 * nb02 + i03 * nb03)); + sum += row_sum; + } + } + } + ((ggml_bf16_t *) dst->data)[0] = GGML_FP32_TO_BF16(sum); +} + +static void ggml_compute_forward_sum( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_sum_f32(params, dst); + } break; + case GGML_TYPE_F16: + { + ggml_compute_forward_sum_f16(params, dst); + } break; + case GGML_TYPE_BF16: + { + ggml_compute_forward_sum_bf16(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_sum_rows + +static void ggml_compute_forward_sum_rows_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + if (params->ith != 0) { + return; + } + + GGML_ASSERT(src0->nb[0] == sizeof(float)); + GGML_ASSERT(dst->nb[0] == sizeof(float)); + + GGML_TENSOR_UNARY_OP_LOCALS + + GGML_ASSERT(ne0 == 1); + GGML_ASSERT(ne1 == ne01); + GGML_ASSERT(ne2 == ne02); + GGML_ASSERT(ne3 == ne03); + + for (int64_t i3 = 0; i3 < ne03; i3++) { + for (int64_t i2 = 0; i2 < ne02; i2++) { + for (int64_t i1 = 0; i1 < ne01; i1++) { + float * src_row = (float *) ((char *) src0->data + i1*nb01 + i2*nb02 + i3*nb03); + float * dst_row = (float *) ((char *) dst->data + i1*nb1 + i2*nb2 + i3*nb3); + float row_sum = 0; + ggml_vec_sum_f32(ne00, &row_sum, src_row); + dst_row[0] = row_sum; + } + } + } +} + +static void ggml_compute_forward_sum_rows( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_sum_rows_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_mean + +static void ggml_compute_forward_mean_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + if (params->ith != 0) { + return; + } + + assert(src0->nb[0] == sizeof(float)); + + GGML_TENSOR_UNARY_OP_LOCALS + + assert(ne0 == 1); + assert(ne1 == ne01); + assert(ne2 == ne02); + assert(ne3 == ne03); + + UNUSED(ne0); + UNUSED(ne1); + UNUSED(ne2); + UNUSED(ne3); + + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + for (int64_t i01 = 0; i01 < ne01; i01++) { + ggml_vec_sum_f32(ne00, + (float *) ((char *) dst->data + i01*nb1 + i02*nb2 + i03*nb3), + (float *) ((char *) src0->data + i01*nb01 + i02*nb02 + i03*nb03)); + + *(float *) ((char *) dst->data + i01*nb1 + i02*nb2 + i03*nb3) /= (float) ne00; + } + } + } +} + +static void ggml_compute_forward_mean( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_mean_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_argmax + +static void ggml_compute_forward_argmax_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + if (params->ith != 0) { + return; + } + + assert(src0->nb[0] == sizeof(float)); + assert(dst->nb[0] == sizeof(float)); + + const int64_t ne00 = src0->ne[0]; + const int64_t ne01 = src0->ne[1]; + + const size_t nb01 = src0->nb[1]; + const size_t nb0 = dst->nb[0]; + + for (int64_t i1 = 0; i1 < ne01; i1++) { + float * src = (float *) ((char *) src0->data + i1*nb01); + int32_t * dst_ = (int32_t *) ((char *) dst->data + i1*nb0); + int v = 0; + ggml_vec_argmax_f32(ne00, &v, src); + dst_[0] = v; + } +} + +static void ggml_compute_forward_argmax( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_argmax_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_repeat + +static void ggml_compute_forward_repeat_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + if (params->ith != 0) { + return; + } + + GGML_ASSERT(ggml_can_repeat(src0, dst)); + + GGML_TENSOR_UNARY_OP_LOCALS + + // guaranteed to be an integer due to the check in ggml_can_repeat + const int nr0 = (int)(ne0/ne00); + const int nr1 = (int)(ne1/ne01); + const int nr2 = (int)(ne2/ne02); + const int nr3 = (int)(ne3/ne03); + + // TODO: support for transposed / permuted tensors + GGML_ASSERT(nb0 == sizeof(float)); + GGML_ASSERT(nb00 == sizeof(float)); + + // TODO: maybe this is not optimal? + for (int i3 = 0; i3 < nr3; i3++) { + for (int k3 = 0; k3 < ne03; k3++) { + for (int i2 = 0; i2 < nr2; i2++) { + for (int k2 = 0; k2 < ne02; k2++) { + for (int i1 = 0; i1 < nr1; i1++) { + for (int k1 = 0; k1 < ne01; k1++) { + for (int i0 = 0; i0 < nr0; i0++) { + ggml_vec_cpy_f32(ne00, + (float *) ((char *) dst->data + (i3*ne03 + k3)*nb3 + (i2*ne02 + k2)*nb2 + (i1*ne01 + k1)*nb1 + (i0*ne00)*nb0), + (float *) ((char *) src0->data + ( k3)*nb03 + ( k2)*nb02 + ( k1)*nb01)); + } + } + } + } + } + } + } +} + +static void ggml_compute_forward_repeat_f16( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + if (params->ith != 0) { + return; + } + + GGML_ASSERT(ggml_can_repeat(src0, dst)); + + GGML_TENSOR_UNARY_OP_LOCALS + + // guaranteed to be an integer due to the check in ggml_can_repeat + const int nr0 = (int)(ne0/ne00); + const int nr1 = (int)(ne1/ne01); + const int nr2 = (int)(ne2/ne02); + const int nr3 = (int)(ne3/ne03); + + // TODO: support for transposed / permuted tensors + GGML_ASSERT(nb0 == sizeof(ggml_fp16_t)); + GGML_ASSERT(nb00 == sizeof(ggml_fp16_t)); + + // TODO: maybe this is not optimal? + for (int i3 = 0; i3 < nr3; i3++) { + for (int k3 = 0; k3 < ne03; k3++) { + for (int i2 = 0; i2 < nr2; i2++) { + for (int k2 = 0; k2 < ne02; k2++) { + for (int i1 = 0; i1 < nr1; i1++) { + for (int k1 = 0; k1 < ne01; k1++) { + for (int i0 = 0; i0 < nr0; i0++) { + ggml_fp16_t * y = (ggml_fp16_t *) ((char *) dst->data + (i3*ne03 + k3)*nb3 + (i2*ne02 + k2)*nb2 + (i1*ne01 + k1)*nb1 + (i0*ne00)*nb0); + ggml_fp16_t * x = (ggml_fp16_t *) ((char *) src0->data + ( k3)*nb03 + ( k2)*nb02 + ( k1)*nb01); + // ggml_vec_cpy_f16(ne00, y, x) + for (int i = 0; i < ne00; ++i) { + y[i] = x[i]; + } + } + } + } + } + } + } + } +} + +static void ggml_compute_forward_repeat( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F16: + case GGML_TYPE_BF16: + case GGML_TYPE_I16: + { + ggml_compute_forward_repeat_f16(params, dst); + } break; + case GGML_TYPE_F32: + case GGML_TYPE_I32: + { + ggml_compute_forward_repeat_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_repeat_back + +static void ggml_compute_forward_repeat_back_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + if (params->ith != 0) { + return; + } + + GGML_ASSERT(ggml_can_repeat(dst, src0)); + + GGML_TENSOR_UNARY_OP_LOCALS + + // guaranteed to be an integer due to the check in ggml_can_repeat + const int nr0 = (int)(ne00/ne0); + const int nr1 = (int)(ne01/ne1); + const int nr2 = (int)(ne02/ne2); + const int nr3 = (int)(ne03/ne3); + + // TODO: support for transposed / permuted tensors + GGML_ASSERT(nb0 == sizeof(float)); + GGML_ASSERT(nb00 == sizeof(float)); + + if (ggml_is_contiguous(dst)) { + ggml_vec_set_f32(ne0*ne1*ne2*ne3, dst->data, 0); + } else { + for (int k3 = 0; k3 < ne3; k3++) { + for (int k2 = 0; k2 < ne2; k2++) { + for (int k1 = 0; k1 < ne1; k1++) { + ggml_vec_set_f32(ne0, + (float *) ((char *) dst->data + k1*nb1 + k2*nb2 + k3*nb3), + 0); + } + } + } + } + + // TODO: maybe this is not optimal? + for (int i3 = 0; i3 < nr3; i3++) { + for (int k3 = 0; k3 < ne3; k3++) { + for (int i2 = 0; i2 < nr2; i2++) { + for (int k2 = 0; k2 < ne2; k2++) { + for (int i1 = 0; i1 < nr1; i1++) { + for (int k1 = 0; k1 < ne1; k1++) { + for (int i0 = 0; i0 < nr0; i0++) { + ggml_vec_acc_f32(ne0, + (float *) ((char *) dst->data + ( k3)*nb3 + ( k2)*nb2 + ( k1)*nb1), + (float *) ((char *) src0->data + (i3*ne3 + k3)*nb03 + (i2*ne2 + k2)*nb02 + (i1*ne1 + k1)*nb01 + (i0*ne0)*nb00)); + } + } + } + } + } + } + } +} + +static void ggml_compute_forward_repeat_back( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_repeat_back_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_concat + +static void ggml_compute_forward_concat_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_ASSERT(src0->nb[0] == sizeof(float)); + + const int ith = params->ith; + const int nth = params->nth; + + GGML_TENSOR_BINARY_OP_LOCALS + + const int32_t dim = ggml_get_op_params_i32(dst, 0); + + GGML_ASSERT(dim >= 0 && dim < 4); + + int64_t o[4] = {0, 0, 0, 0}; + o[dim] = src0->ne[dim]; + + const float * x; + + // TODO: smarter multi-theading + for (int i3 = 0; i3 < ne3; i3++) { + for (int i2 = ith; i2 < ne2; i2 += nth) { + for (int i1 = 0; i1 < ne1; i1++) { + for (int i0 = 0; i0 < ne0; i0++) { + if (i0 < ne00 && i1 < ne01 && i2 < ne02 && i3 < ne03) { + x = (const float *) ((const char *)src0->data + (i0 )*nb00 + (i1 )*nb01 + (i2 )*nb02 + (i3 )*nb03); + } else { + x = (const float *) ((const char *)src1->data + (i0 - o[0])*nb10 + (i1 - o[1])*nb11 + (i2 - o[2])*nb12 + (i3 - o[3])*nb13); + } + + float * y = (float *)((char *)dst->data + i0*nb0 + i1*nb1 + i2*nb2 + i3*nb3); + + *y = *x; + } + } + } + } +} + +static void ggml_compute_forward_concat( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + case GGML_TYPE_I32: + { + ggml_compute_forward_concat_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_abs + +static void ggml_compute_forward_abs_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + if (params->ith != 0) { + return; + } + + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); + assert(ggml_are_same_shape(src0, dst)); + + const int n = ggml_nrows(src0); + const int nc = src0->ne[0]; + + for (int i = 0; i < n; i++) { + ggml_vec_abs_f32(nc, + (float *) ((char *) dst->data + i*( dst->nb[1])), + (float *) ((char *) src0->data + i*(src0->nb[1]))); + } +} + +static void ggml_compute_forward_abs( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_abs_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_sgn + +static void ggml_compute_forward_sgn_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + if (params->ith != 0) { + return; + } + + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); + assert(ggml_are_same_shape(src0, dst)); + + const int n = ggml_nrows(src0); + const int nc = src0->ne[0]; + + for (int i = 0; i < n; i++) { + ggml_vec_sgn_f32(nc, + (float *) ((char *) dst->data + i*( dst->nb[1])), + (float *) ((char *) src0->data + i*(src0->nb[1]))); + } +} + +static void ggml_compute_forward_sgn( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_sgn_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_neg + +static void ggml_compute_forward_neg_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + if (params->ith != 0) { + return; + } + + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); + assert(ggml_are_same_shape(src0, dst)); + + const int n = ggml_nrows(src0); + const int nc = src0->ne[0]; + + for (int i = 0; i < n; i++) { + ggml_vec_neg_f32(nc, + (float *) ((char *) dst->data + i*( dst->nb[1])), + (float *) ((char *) src0->data + i*(src0->nb[1]))); + } +} + +static void ggml_compute_forward_neg( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_neg_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_step + +static void ggml_compute_forward_step_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + if (params->ith != 0) { + return; + } + + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); + assert(ggml_are_same_shape(src0, dst)); + + const int n = ggml_nrows(src0); + const int nc = src0->ne[0]; + + for (int i = 0; i < n; i++) { + ggml_vec_step_f32(nc, + (float *) ((char *) dst->data + i*( dst->nb[1])), + (float *) ((char *) src0->data + i*(src0->nb[1]))); + } +} + +static void ggml_compute_forward_step( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_step_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_tanh + +static void ggml_compute_forward_tanh_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + if (params->ith != 0) { + return; + } + + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); + assert(ggml_are_same_shape(src0, dst)); + + const int n = ggml_nrows(src0); + const int nc = src0->ne[0]; + + for (int i = 0; i < n; i++) { + ggml_vec_tanh_f32(nc, + (float *) ((char *) dst->data + i*( dst->nb[1])), + (float *) ((char *) src0->data + i*(src0->nb[1]))); + } +} + +static void ggml_compute_forward_tanh( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_tanh_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_elu + +static void ggml_compute_forward_elu_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + if (params->ith != 0) { + return; + } + + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); + assert(ggml_are_same_shape(src0, dst)); + + const int n = ggml_nrows(src0); + const int nc = src0->ne[0]; + + for (int i = 0; i < n; i++) { + ggml_vec_elu_f32(nc, + (float *) ((char *) dst->data + i*( dst->nb[1])), + (float *) ((char *) src0->data + i*(src0->nb[1]))); + } +} + +static void ggml_compute_forward_elu( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_elu_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_relu + +static void ggml_compute_forward_relu_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + if (params->ith != 0) { + return; + } + + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); + assert(ggml_are_same_shape(src0, dst)); + + const int n = ggml_nrows(src0); + const int nc = src0->ne[0]; + + for (int i = 0; i < n; i++) { + ggml_vec_relu_f32(nc, + (float *) ((char *) dst->data + i*( dst->nb[1])), + (float *) ((char *) src0->data + i*(src0->nb[1]))); + } +} + +static void ggml_compute_forward_relu( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_relu_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_sigmoid + +static void ggml_compute_forward_sigmoid_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + if (params->ith != 0) { + return; + } + + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); + assert(ggml_are_same_shape(src0, dst)); + + const int n = ggml_nrows(src0); + const int nc = src0->ne[0]; + + for (int i = 0; i < n; i++) { + ggml_vec_sigmoid_f32(nc, + (float *) ((char *) dst->data + i*( dst->nb[1])), + (float *) ((char *) src0->data + i*(src0->nb[1]))); + } +} + +static void ggml_compute_forward_sigmoid( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_sigmoid_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_gelu + +static void ggml_compute_forward_gelu_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); + assert(ggml_are_same_shape(src0, dst)); + + const int ith = params->ith; + const int nth = params->nth; + + const int nc = src0->ne[0]; + const int nr = ggml_nrows(src0); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + for (int i1 = ir0; i1 < ir1; i1++) { + ggml_vec_gelu_f32(nc, + (float *) ((char *) dst->data + i1*( dst->nb[1])), + (float *) ((char *) src0->data + i1*(src0->nb[1]))); + +#ifndef NDEBUG + for (int k = 0; k < nc; k++) { + const float x = ((float *) ((char *) dst->data + i1*( dst->nb[1])))[k]; + UNUSED(x); + assert(!isnan(x)); + assert(!isinf(x)); + } +#endif + } +} + +static void ggml_compute_forward_gelu( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_gelu_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_gelu_quick + +static void ggml_compute_forward_gelu_quick_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); + assert(ggml_are_same_shape(src0, dst)); + + const int ith = params->ith; + const int nth = params->nth; + + const int nc = src0->ne[0]; + const int nr = ggml_nrows(src0); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + for (int i1 = ir0; i1 < ir1; i1++) { + ggml_vec_gelu_quick_f32(nc, + (float *) ((char *) dst->data + i1*( dst->nb[1])), + (float *) ((char *) src0->data + i1*(src0->nb[1]))); + +#ifndef NDEBUG + for (int k = 0; k < nc; k++) { + const float x = ((float *) ((char *) dst->data + i1*( dst->nb[1])))[k]; + UNUSED(x); + assert(!isnan(x)); + assert(!isinf(x)); + } +#endif + } +} + +static void ggml_compute_forward_gelu_quick( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_gelu_quick_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_silu + +static void ggml_compute_forward_silu_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); + assert(ggml_are_same_shape(src0, dst)); + + const int ith = params->ith; + const int nth = params->nth; + + const int nc = src0->ne[0]; + const int nr = ggml_nrows(src0); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + for (int i1 = ir0; i1 < ir1; i1++) { + ggml_vec_silu_f32(nc, + (float *) ((char *) dst->data + i1*( dst->nb[1])), + (float *) ((char *) src0->data + i1*(src0->nb[1]))); + +#ifndef NDEBUG + for (int k = 0; k < nc; k++) { + const float x = ((float *) ((char *) dst->data + i1*(dst->nb[1])))[k]; + UNUSED(x); + assert(!isnan(x)); + assert(!isinf(x)); + } +#endif + } +} + +static void ggml_compute_forward_silu( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_silu_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} +// ggml_compute_forward_leaky_relu + +static void ggml_compute_forward_leaky_relu_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + if (params->ith != 0) { + return; + } + + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); + assert(ggml_are_same_shape(src0, dst)); + + const int n = ggml_nrows(src0); + const int nc = src0->ne[0]; + + float negative_slope; + memcpy(&negative_slope, dst->op_params, sizeof(float)); + + assert(dst->nb[0] == sizeof(float)); + assert(src0->nb[0] == sizeof(float)); + + for (int i = 0; i < n; i++) { + ggml_vec_leaky_relu_f32(nc, + (float *) ((char *) dst->data + i*( dst->nb[1])), + (float *) ((char *) src0->data + i*(src0->nb[1])), negative_slope); + } +} + +static void ggml_compute_forward_leaky_relu( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_leaky_relu_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_silu_back + +static void ggml_compute_forward_silu_back_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * grad = dst->src[1]; + + assert(ggml_is_contiguous_1(grad)); + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); + assert(ggml_are_same_shape(src0, dst)); + assert(ggml_are_same_shape(src0, grad)); + + const int ith = params->ith; + const int nth = params->nth; + + const int nc = src0->ne[0]; + const int nr = ggml_nrows(src0); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + for (int i1 = ir0; i1 < ir1; i1++) { + ggml_vec_silu_backward_f32(nc, + (float *) ((char *) dst->data + i1*( dst->nb[1])), + (float *) ((char *) src0->data + i1*(src0->nb[1])), + (float *) ((char *) grad->data + i1*(grad->nb[1]))); + +#ifndef NDEBUG + for (int k = 0; k < nc; k++) { + const float x = ((float *) ((char *) dst->data + i1*( dst->nb[1])))[k]; + UNUSED(x); + assert(!isnan(x)); + assert(!isinf(x)); + } +#endif + } +} + +static void ggml_compute_forward_silu_back( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_silu_back_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + + +static void ggml_compute_forward_hardswish_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + if (params->ith != 0) { + return; + } + + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); + assert(ggml_are_same_shape(src0, dst)); + + const int n = ggml_nrows(src0); + const int nc = src0->ne[0]; + + for (int i = 0; i < n; i++) { + ggml_vec_hardswish_f32(nc, + (float *) ((char *) dst->data + i*( dst->nb[1])), + (float *) ((char *) src0->data + i*(src0->nb[1]))); + } +} +static void ggml_compute_forward_hardswish( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_hardswish_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +static void ggml_compute_forward_hardsigmoid_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + if (params->ith != 0) { + return; + } + + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); + assert(ggml_are_same_shape(src0, dst)); + + const int n = ggml_nrows(src0); + const int nc = src0->ne[0]; + + for (int i = 0; i < n; i++) { + ggml_vec_hardsigmoid_f32(nc, + (float *) ((char *) dst->data + i*( dst->nb[1])), + (float *) ((char *) src0->data + i*(src0->nb[1]))); + } +} + +static void ggml_compute_forward_hardsigmoid( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_hardsigmoid_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +static void ggml_compute_forward_exp_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + if (params->ith != 0) { + return; + } + + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); + assert(ggml_are_same_shape(src0, dst)); + + const int n = ggml_nrows(src0); + const int nc = src0->ne[0]; + + for (int i = 0; i < n; i++) { + ggml_vec_exp_f32(nc, + (float *) ((char *) dst->data + i*( dst->nb[1])), + (float *) ((char *) src0->data + i*(src0->nb[1]))); + } +} + +static void ggml_compute_forward_exp( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_exp_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + + +// ggml_compute_forward_norm + +static void ggml_compute_forward_norm_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + GGML_ASSERT(ggml_are_same_shape(src0, dst)); + + GGML_ASSERT(src0->nb[0] == sizeof(float)); + + const int ith = params->ith; + const int nth = params->nth; + + GGML_TENSOR_UNARY_OP_LOCALS + + float eps; + memcpy(&eps, dst->op_params, sizeof(float)); + + GGML_ASSERT(eps > 0.0f); + + // TODO: optimize + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + for (int64_t i01 = ith; i01 < ne01; i01 += nth) { + const float * x = (float *) ((char *) src0->data + i01*nb01 + i02*nb02 + i03*nb03); + + ggml_float sum = 0.0; + for (int64_t i00 = 0; i00 < ne00; i00++) { + sum += (ggml_float)x[i00]; + } + + float mean = sum/ne00; + + float * y = (float *) ((char *) dst->data + i01*nb1 + i02*nb2 + i03*nb3); + + ggml_float sum2 = 0.0; + for (int64_t i00 = 0; i00 < ne00; i00++) { + float v = x[i00] - mean; + y[i00] = v; + sum2 += (ggml_float)(v*v); + } + + float variance = sum2/ne00; + const float scale = 1.0f/sqrtf(variance + eps); + + ggml_vec_scale_f32(ne00, y, scale); + } + } + } +} + +static void ggml_compute_forward_norm( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_norm_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_group_rms_norm + +static void ggml_compute_forward_rms_norm_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + GGML_ASSERT(ggml_are_same_shape(src0, dst)); + + GGML_ASSERT(src0->nb[0] == sizeof(float)); + + const int ith = params->ith; + const int nth = params->nth; + + GGML_TENSOR_UNARY_OP_LOCALS + + float eps; + memcpy(&eps, dst->op_params, sizeof(float)); + + GGML_ASSERT(eps > 0.0f); + + // TODO: optimize + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + for (int64_t i01 = ith; i01 < ne01; i01 += nth) { + const float * x = (float *) ((char *) src0->data + i01*nb01 + i02*nb02 + i03*nb03); + + ggml_float sum = 0.0; + for (int64_t i00 = 0; i00 < ne00; i00++) { + sum += (ggml_float)(x[i00] * x[i00]); + } + + const float mean = sum/ne00; + + float * y = (float *) ((char *) dst->data + i01*nb1 + i02*nb2 + i03*nb3); + + memcpy(y, x, ne00 * sizeof(float)); + // for (int i00 = 0; i00 < ne00; i00++) { + // y[i00] = x[i00]; + // } + + const float scale = 1.0f/sqrtf(mean + eps); + + ggml_vec_scale_f32(ne00, y, scale); + } + } + } +} + +static void ggml_compute_forward_rms_norm( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_rms_norm_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +static void ggml_compute_forward_rms_norm_back_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_ASSERT(ggml_are_same_shape(src0, dst) && ggml_are_same_shape(src0, src1)); + + GGML_ASSERT(src0->nb[0] == sizeof(float)); + + const int ith = params->ith; + const int nth = params->nth; + + GGML_TENSOR_BINARY_OP_LOCALS + + float eps; + memcpy(&eps, dst->op_params, sizeof(float)); + + // TODO: optimize + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + for (int64_t i01 = ith; i01 < ne01; i01 += nth) { + // src1 is same shape as src0 => same indices + const int64_t i11 = i01; + const int64_t i12 = i02; + const int64_t i13 = i03; + + const float * x = (float *) ((char *) src0->data + i01*nb01 + i02*nb02 + i03*nb03); + const float * dz = (float *) ((char *) src1->data + i11*nb11 + i12*nb12 + i13*nb13); + + ggml_float sum_xx = 0.0; + ggml_float sum_xdz = 0.0; + + for (int64_t i00 = 0; i00 < ne00; i00++) { + sum_xx += (ggml_float)(x[i00] * x[i00]); + sum_xdz += (ggml_float)(x[i00] * dz[i00]); + } + + //const float mean = (float)(sum_xx)/ne00; + const float mean_eps = (float)(sum_xx)/ne00 + eps; + const float sum_eps = (float)(sum_xx) + eps*ne00; + //const float mean_xdz = (float)(sum_xdz)/ne00; + // we could cache rms from forward pass to improve performance. + // to do this implement ggml_rms and compose ggml_rms_norm using ggml_rms. + //const float rms = sqrtf(mean_eps); + const float rrms = 1.0f / sqrtf(mean_eps); + //const float scale = -rrms/(ne00 * mean_eps); // -1/(n*rms**3) + + { + // z = rms_norm(x) + // + // rms_norm(src0) = + // scale( + // src0, + // div( + // 1, + // sqrt( + // add( + // scale( + // sum( + // sqr( + // src0)), + // (1.0/N)), + // eps)))); + + // postorder: + // ## op args grad + // 00 param src0 grad[#00] + // 01 const 1 + // 02 sqr (#00) grad[#02] + // 03 sum (#02) grad[#03] + // 04 const 1/N + // 05 scale (#03, #04) grad[#05] + // 06 const eps + // 07 add (#05, #06) grad[#07] + // 08 sqrt (#07) grad[#08] + // 09 div (#01,#08) grad[#09] + // 10 scale (#00,#09) grad[#10] + // + // backward pass, given grad[#10] + // #10: scale + // grad[#00] += scale(grad[#10],#09) + // grad[#09] += sum(mul(grad[#10],#00)) + // #09: div + // grad[#08] += neg(mul(grad[#09], div(#09,#08))) + // #08: sqrt + // grad[#07] += mul(grad[#08], div(0.5, #08)) + // #07: add + // grad[#05] += grad[#07] + // #05: scale + // grad[#03] += scale(grad[#05],#04) + // #03: sum + // grad[#02] += repeat(grad[#03], #02) + // #02: + // grad[#00] += scale(mul(#00, grad[#02]), 2.0) + // + // substitute and simplify: + // grad[#00] = scale(grad(#10), #09) + scale(mul(#00, grad[#02]), 2.0) + // grad[#02] = repeat(grad[#03], #02) + // grad[#02] = repeat(scale(grad[#05],#04), #02) + // grad[#02] = repeat(scale(grad[#07],#04), #02) + // grad[#02] = repeat(scale(mul(grad[#08], div(0.5, #08)),#04), #02) + // grad[#02] = repeat(scale(mul(neg(mul(grad[#09], div(#09,#08))), div(0.5, #08)),#04), #02) + // grad[#02] = repeat(scale(mul(neg(mul(sum(mul(grad[#10],#00)), div(#09,#08))), div(0.5, #08)),#04), #02) + // grad[#02] = repeat(-(sum(mul(grad[#10],#00)) * div(#09,#08) * div(0.5, #08) * (1/N)), #02) + // grad[#02] = repeat(-(sum(mul(grad[#10],#00)) * div(div(#01,#08),#08) * div(0.5, #08) * (1/N)), #02) + // grad[#02] = repeat(-(sum(mul(grad[#10],#00)) * div(1,#08*#08) * div(0.5, #08) * (1/N)), #02) + // grad[#02] = repeat(-(sum(mul(grad[#10],#00)) * div(1,#07) * div(0.5, #08) * (1/N)), #02) + // grad[#00] = scale(grad(#10), #09) + scale(mul(#00, grad[#02]), 2.0) + // grad[#00] = scale(grad(#10), #09) + scale(mul(#00, repeat(-(sum(mul(grad[#10],#00)) * div(1,#07) * div(0.5, #08) * (1/N)), #02)), 2.0) + // grad[#00] = scale(grad(#10), #09) + scale(scale(#00, -(sum(mul(grad[#10],#00)) * div(1,#07) * div(0.5, #08) * (1/N))), 2.0) + // grad[#00] = scale(grad(#10), #09) + scale(#00, -(sum(mul(grad[#10],#00)) * div(1,#07) * div(1,#08) * (1/N))) + // grad[#00] = scale(grad(#10), #09) + scale(#00, sum(mul(grad[#10],#00)) * div(1,#07*#08) * (-1/N)) + // grad[#00] = scale(grad(#10), #09) + scale(#00, sum(mul(grad[#10],#00)) * div(1,#07*#08) * (-1/N)) + // grad[#00] = scale(grad(#10), #09) + scale(#00, sum(mul(grad[#10],#00)) * div(1,mean_eps*rms) * (-1/N)) + // grad[#00] = scale(grad(#10), #09) + scale(#00, sum(mul(grad[#10],#00)) * div(-1,rms*N*mean_eps)) + // grad[#00] = scale(grad(#10), #09) + scale(#00, sum(mul(grad[#10],#00)) * div(-1,rms*N*(sum_xx/N+eps))) + // grad[#00] = scale(grad(#10), #09) + scale(#00, sum(mul(grad[#10],#00)) * div(-1,rms*N*sum_xx+rms*N*eps)) + // grad[#00] = scale(dz, rrms) + scale(x, sum(mul(dz,x)) * div(-1,rms*N*mean_eps)) + // grad[#00] = scale(dz, rrms) + scale(x, sum_xdz * div(-1,rms*N*mean_eps)) + // a = b*c + d*e + // a = b*c*f/f + d*e*f/f + // a = (b*c*f + d*e*f)*(1/f) + // a = (b*c*(1/c) + d*e*(1/c))*(1/(1/c)) + // a = (b + d*e/c)*c + // b = dz, c = rrms, d = x, e = sum_xdz * div(-1,rms*N*mean_eps) + // a = (dz + x*sum_xdz * div(-1,rms*N*mean_eps)/rrms)*rrms + // a = (dz + x*sum_xdz * div(-1,rms*N*mean_eps)*rms)*rrms + // a = (dz + x*sum_xdz * div(-rms,rms*N*mean_eps))*rrms + // a = (dz + x*sum_xdz * div(-1,N*mean_eps))*rrms + // a = (dz + x*div(-sum_xdz,N*mean_eps))*rrms + // a = (dz + x*div(-mean_xdz,mean_eps))*rrms + // grad[#00] = scale(dz + scale(x, div(-mean_xdz,mean_eps)),rrms) + // grad[#00] = scale(dz + scale(x, -mean_xdz/mean_eps),rrms) + // dx = scale(dz + scale(x, -mean_xdz/mean_eps),rrms) + } + // dx = scale(dz + scale(x, -mean_xdz/mean_eps),rrms) + // post-order: + // dx := x + // dx := scale(dx,-mean_xdz/mean_eps) + // dx := add(dx, dz) + // dx := scale(dx, rrms) + float * dx = (float *) ((char *) dst->data + i01*nb1 + i02*nb2 + i03*nb3); + + ggml_vec_cpy_f32 (ne00, dx, x); + // ggml_vec_scale_f32(ne00, dx, -mean_xdz/mean_eps); + ggml_vec_scale_f32(ne00, dx, (float)(-sum_xdz)/sum_eps); + ggml_vec_acc_f32 (ne00, dx, dz); + ggml_vec_scale_f32(ne00, dx, rrms); + } + } + } +} + +static void ggml_compute_forward_rms_norm_back( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_rms_norm_back_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_group_norm + +static void ggml_compute_forward_group_norm_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + GGML_ASSERT(ggml_are_same_shape(src0, dst)); + + GGML_ASSERT(src0->nb[0] == sizeof(float)); + + const int ith = params->ith; + const int nth = params->nth; + + GGML_TENSOR_UNARY_OP_LOCALS + + // TODO: optimize + + float eps; + memcpy(&eps, dst->op_params + 1, sizeof(float)); + + int n_channels = src0->ne[2]; + int n_groups = dst->op_params[0]; + int n_channels_per_group = (n_channels + n_groups - 1) / n_groups; + for (int i = ith; i < n_groups; i += nth) { + int start = i * n_channels_per_group; + int end = start + n_channels_per_group; + if (end > n_channels) { + end = n_channels; + } + int step = end - start; + + for (int64_t i03 = 0; i03 < ne03; i03++) { + ggml_float sum = 0.0; + for (int64_t i02 = start; i02 < end; i02++) { + for (int64_t i01 = 0; i01 < ne01; i01++) { + const float * x = (float *)((char *) src0->data + i01 * nb01 + i02 * nb02 + i03 * nb03); + + ggml_float sumr = 0.0; + for (int64_t i00 = 0; i00 < ne00; i00++) { + sumr += (ggml_float)x[i00]; + } + sum += sumr; + } + } + const float mean = sum / (ne00 * ne01 * step); + + ggml_float sum2 = 0.0; + for (int64_t i02 = start; i02 < end; i02++) { + for (int64_t i01 = 0; i01 < ne01; i01++) { + const float * x = (float *)((char *) src0->data + i01 * nb01 + i02 * nb02 + i03 * nb03); + + float * y = (float *)((char *) dst->data + i01 * nb1 + i02 * nb2 + i03 * nb3); + + ggml_float sumr = 0.0; + for (int64_t i00 = 0; i00 < ne00; i00++) { + float v = x[i00] - mean; + y[i00] = v; + sumr += (ggml_float)(v * v); + } + sum2 += sumr; + } + } + const float variance = sum2 / (ne00 * ne01 * step); + const float scale = 1.0f / sqrtf(variance + eps); + + for (int64_t i02 = start; i02 < end; i02++) { + for (int64_t i01 = 0; i01 < ne01; i01++) { + float * y = (float *)((char *) dst->data + i01 * nb1 + i02 * nb2 + i03 * nb3); + ggml_vec_scale_f32(ne00, y, scale); + } + } + } + } +} + +static void ggml_compute_forward_group_norm( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_group_norm_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_mul_mat + +static void ggml_compute_forward_mul_mat_one_chunk( + const struct ggml_compute_params * params, + struct ggml_tensor * dst, + const int64_t num_rows_per_vec_dot, + const int64_t ir0_start, + const int64_t ir0_end, + const int64_t ir1_start, + const int64_t ir1_end) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_TENSOR_BINARY_OP_LOCALS + + const enum ggml_type type = src0->type; + + const bool src1_cont = ggml_is_contiguous(src1); + + ggml_vec_dot_t const vec_dot = type_traits[type].vec_dot; + enum ggml_type const vec_dot_type = type_traits[type].vec_dot_type; + + // broadcast factors + const int64_t r2 = ne12 / ne02; + const int64_t r3 = ne13 / ne03; + + //printf("ir0_start = %6lld, ir0_end = %6lld, ir1_start = %6lld, ir1_end = %6lld\n", ir0_start, ir0_end, ir1_start, ir1_end); + + // threads with no work simply yield (not sure if it helps) + if (ir0_start >= ir0_end || ir1_start >= ir1_end) { + return; + } + + const void * wdata = (src1->type == vec_dot_type) ? src1->data : params->wdata; + const size_t row_size = ggml_row_size(vec_dot_type, ne10); + + assert(ne12 % ne02 == 0); + assert(ne13 % ne03 == 0); + + // block-tiling attempt + const int64_t blck_0 = 16; + const int64_t blck_1 = 16; + + const size_t src1_col_stride = src1_cont || src1->type != vec_dot_type ? row_size : nb11; + + // attempt to reduce false-sharing (does not seem to make a difference) + // 16 * 2, accounting for mmla kernels + float tmp[32]; + + for (int64_t iir1 = ir1_start; iir1 < ir1_end; iir1 += blck_1) { + for (int64_t iir0 = ir0_start; iir0 < ir0_end; iir0 += blck_0) { + for (int64_t ir1 = iir1; ir1 < iir1 + blck_1 && ir1 < ir1_end; ir1 += num_rows_per_vec_dot) { + const int64_t i13 = (ir1 / (ne12 * ne1)); + const int64_t i12 = (ir1 - i13 * ne12 * ne1) / ne1; + const int64_t i11 = (ir1 - i13 * ne12 * ne1 - i12 * ne1); + + // broadcast src0 into src1 + const int64_t i03 = i13 / r3; + const int64_t i02 = i12 / r2; + + const int64_t i1 = i11; + const int64_t i2 = i12; + const int64_t i3 = i13; + + const char * src0_row = (const char*)src0->data + (0 + i02 * nb02 + i03 * nb03); + + // desc: when src1 is not a contiguous memory block we have to calculate the offset using the strides + // if it is, then we have either copied the data to params->wdata and made it contiguous or we are using + // the original src1 data pointer, so we should index using the indices directly + // TODO: this is a bit of a hack, we should probably have a better way to handle this + const char * src1_col = (const char*)wdata + + (src1_cont || src1->type != vec_dot_type + ? (i11 + i12 * ne11 + i13 * ne12 * ne11) * row_size + : (i11 * nb11 + i12 * nb12 + i13 * nb13)); + float * dst_col = (float*)((char*)dst->data + (i1 * nb1 + i2 * nb2 + i3 * nb3)); + + //for (int64_t ir0 = iir0; ir0 < iir0 + blck_0 && ir0 < ir0_end; ++ir0) { + // vec_dot(ne00, &dst_col[ir0], src0_row + ir0*nb01, src1_col); + //} + + for (int64_t ir0 = iir0; ir0 < iir0 + blck_0 && ir0 < ir0_end; ir0 += num_rows_per_vec_dot) { + vec_dot(ne00, &tmp[ir0 - iir0], (num_rows_per_vec_dot > 1 ? 16 : 0), src0_row + ir0 * nb01, (num_rows_per_vec_dot > 1 ? nb01 : 0), src1_col, (num_rows_per_vec_dot > 1 ? src1_col_stride : 0), num_rows_per_vec_dot); + } + + for (int cn = 0; cn < num_rows_per_vec_dot; ++cn) { + memcpy(&dst_col[iir0 + cn * nb1 / nb0], tmp + (cn * 16), (MIN(iir0 + blck_0, ir0_end) - iir0) * sizeof(float)); + } + } + } + } +} + +static void ggml_compute_forward_mul_mat( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_TENSOR_BINARY_OP_LOCALS + + const int ith = params->ith; + const int nth = params->nth; + + const enum ggml_type type = src0->type; + + enum ggml_type const vec_dot_type = type_traits[type].vec_dot_type; + ggml_from_float_t const from_float = type_traits[vec_dot_type].from_float; + ggml_from_float_to_mat_t const from_float_to_mat = type_traits[vec_dot_type].from_float_to_mat; + int64_t const vec_dot_num_rows = type_traits[type].nrows; + int64_t const matmul_num_cols = type_traits[type].ncols; + int64_t const blck_size_interleave = type_traits[type].blck_size_interleave; + ggml_gemv_t const gemv = type_traits[type].gemv; + ggml_gemm_t const gemm = type_traits[type].gemm; + + GGML_ASSERT(ne0 == ne01); + GGML_ASSERT(ne1 == ne11); + GGML_ASSERT(ne2 == ne12); + GGML_ASSERT(ne3 == ne13); + + // we don't support permuted src0 or src1 + GGML_ASSERT(nb00 == ggml_type_size(type)); + GGML_ASSERT(nb10 == ggml_type_size(src1->type)); + + // dst cannot be transposed or permuted + GGML_ASSERT(nb0 == sizeof(float)); + GGML_ASSERT(nb0 <= nb1); + GGML_ASSERT(nb1 <= nb2); + GGML_ASSERT(nb2 <= nb3); + + // nb01 >= nb00 - src0 is not transposed + // compute by src0 rows + +#if GGML_USE_LLAMAFILE + // broadcast factors + const int64_t r2 = ne12 / ne02; + const int64_t r3 = ne13 / ne03; + + const bool src1_cont = ggml_is_contiguous(src1); + + if (src1_cont) { + for (int64_t i13 = 0; i13 < ne13; i13++) + for (int64_t i12 = 0; i12 < ne12; i12++) + if (!llamafile_sgemm(ne01, ne11, ne00/ggml_blck_size(src0->type), + (const char *)src0->data + i12/r2*nb02 + i13/r3*nb03, + nb01/ggml_type_size(src0->type), + (const char *)src1->data + i12*nb12 + i13*nb13, + nb11/ggml_type_size(src1->type), + (char *)dst->data + i12*nb2 + i13*nb3, + nb1/ggml_type_size(dst->type), + ith, nth, + src0->type, + src1->type, + dst->type)) + goto UseGgmlGemm1; + return; + } +UseGgmlGemm1:; +#endif + + if (src1->type != vec_dot_type) { + char * wdata = params->wdata; + + const size_t nbw1 = ggml_row_size(vec_dot_type, ne10); + const size_t nbw2 = nbw1*ne11; + const size_t nbw3 = nbw2*ne12; + + assert(params->wsize >= ne13*nbw3); + GGML_ASSERT(src1->type == GGML_TYPE_F32); + + for (int64_t i13 = 0; i13 < ne13; ++i13) { + for (int64_t i12 = 0; i12 < ne12; ++i12) { + int64_t i11_processed = 0; + if ((ggml_n_dims(src1) == 2) && from_float_to_mat && gemm) { + for (int64_t i11 = ith * 4; i11 < ne11 - ne11 % 4; i11 += nth * 4) { + from_float_to_mat((float *)((char *) src1->data + i13*nb13 + i12*nb12 + i11*nb11), + (void *) (wdata + i13*nbw3 + i12*nbw2 + i11*nbw1), + 4, ne10, blck_size_interleave); + } + i11_processed = ne11 - ne11 % 4; + } + for (int64_t i11 = i11_processed + ith; i11 < ne11; i11 += nth) { + from_float((float *)((char *) src1->data + i13*nb13 + i12*nb12 + i11*nb11), + (void *) (wdata + i13*nbw3 + i12*nbw2 + i11*nbw1), + ne10); + } + } + } + } + + if (ith == 0) { + // Every thread starts at ith, so the first unprocessed chunk is nth. This save a bit of coordination right at the start. + atomic_store_explicit(¶ms->threadpool->current_chunk, nth, memory_order_relaxed); + } + + ggml_barrier(params->threadpool); + +#if GGML_USE_LLAMAFILE + if (src1->type != vec_dot_type) { + const void* wdata = (src1->type == vec_dot_type) ? src1->data : params->wdata; + const size_t row_size = ggml_row_size(vec_dot_type, ne10); + + for (int64_t i13 = 0; i13 < ne13; i13++) + for (int64_t i12 = 0; i12 < ne12; i12++) + if (!llamafile_sgemm(ne01, ne11, ne00/ggml_blck_size(src0->type), + (const char *)src0->data + i12/r2*nb02 + i13/r3*nb03, + nb01/ggml_type_size(src0->type), + (const char *)wdata + (i12*ne11 + i13*ne12*ne11)*row_size, + row_size/ggml_type_size(vec_dot_type), + (char *)dst->data + i12*nb2 + i13*nb3, + nb1/ggml_type_size(dst->type), + ith, nth, + src0->type, + vec_dot_type, + dst->type)) + goto UseGgmlGemm2; + return; + } +UseGgmlGemm2:; +#endif + + // This is the size of the first dimension of the result, so we can iterate that way. (see the ASSERT above, these are the same numbers) + const int64_t nr0 = ne0; + + // This is the size of the rest of the dimensions of the result + const int64_t nr1 = ne1 * ne2 * ne3; + + // dot kernels can handle 1 row and col at a time, but mmla kernels can process 2 rows and cols + int64_t num_rows_per_vec_dot = vec_dot_num_rows; + // TODO: currently the mmla kernels support only even numbered rows/cols. + // this check can be removed once they are extended to support odd numbered rows/cols too + if ((nr0 % 2 != 0) || (ne11 % 2 != 0)) { + num_rows_per_vec_dot = 1; + } + + // Now select a reasonable chunk size. + int chunk_size = 16; + + // We need to step up the size if it's small + if (nr0 == 1 || nr1 == 1) { + chunk_size = 64; + } + + // distribute the work across the inner or outer loop based on which one is larger + // The number of chunks in the 0/1 dim. + // CEIL(nr0/chunk_size) + int64_t nchunk0 = (nr0 + chunk_size - 1) / chunk_size; + int64_t nchunk1 = (nr1 + chunk_size - 1) / chunk_size; + + // If the chunking is poor for the number of threads on this setup, scrap the whole plan. Re-chunk it by thread. + // Also, chunking by thread was measured to have perform better on NUMA systems. See https://github.com/ggerganov/llama.cpp/pull/6915 + // In theory, chunking should be just as useful on NUMA and non NUMA systems, but testing disagreed with that. + if (nchunk0 * nchunk1 < nth * 4 || ggml_is_numa()) { + // distribute the thread work across the inner or outer loop based on which one is larger + nchunk0 = nr0 > nr1 ? nth : 1; // parallelize by src0 rows + nchunk1 = nr0 > nr1 ? 1 : nth; // parallelize by src1 rows + } + + // The number of elements in each chunk + const int64_t dr0 = (nr0 + nchunk0 - 1) / nchunk0; + const int64_t dr1 = (nr1 + nchunk1 - 1) / nchunk1; + + if ((ggml_n_dims(src0) == 2) && gemv) { + const void * src1_wdata = (src1->type == vec_dot_type) ? src1->data : params->wdata; + const size_t src1_col_stride = ggml_is_contiguous(src1) || src1->type != vec_dot_type ? ggml_row_size(vec_dot_type, ne10) : nb11; + int64_t src0_start = (ith * ne01) / nth; + int64_t src0_end = ((ith + 1) * ne01) / nth; + src0_start = (src0_start % matmul_num_cols) ? src0_start + matmul_num_cols - (src0_start % matmul_num_cols): src0_start; + src0_end = (src0_end % matmul_num_cols) ? src0_end + matmul_num_cols - (src0_end % matmul_num_cols): src0_end; + if (src0_start >= src0_end) return; + + // If there are more than three rows in src1, use gemm; otherwise, use gemv. + if (gemm && (ne11 > 3)) { + gemm(ne00, (float *)((char *) dst->data) + src0_start, ne01, (const char *) src0->data + src0_start * nb01, + (const char *) src1_wdata, ne11 - ne11 % 4, src0_end - src0_start); + } + for (int iter = gemm ? ne11 - ne11 % 4 : 0; iter < ne11; iter++) { + gemv(ne00, (float *)((char *) dst->data + (iter * nb1)) + src0_start, ne01, + (const char *) src0->data + src0_start * nb01, (const char *) src1_wdata + (src1_col_stride * iter), 1, + src0_end - src0_start); + } + return; + } + + // The first chunk comes from our thread_id, the rest will get auto-assigned. + int current_chunk = ith; + + while (current_chunk < nchunk0 * nchunk1) { + const int64_t ith0 = current_chunk % nchunk0; + const int64_t ith1 = current_chunk / nchunk0; + + const int64_t ir0_start = dr0 * ith0; + const int64_t ir0_end = MIN(ir0_start + dr0, nr0); + + const int64_t ir1_start = dr1 * ith1; + const int64_t ir1_end = MIN(ir1_start + dr1, nr1); + + ggml_compute_forward_mul_mat_one_chunk(params, dst, num_rows_per_vec_dot, ir0_start, ir0_end, ir1_start, ir1_end); + + if (nth >= nchunk0 * nchunk1) { + break; + } + + current_chunk = atomic_fetch_add_explicit(¶ms->threadpool->current_chunk, 1, memory_order_relaxed); + } +} + +// ggml_compute_forward_mul_mat_id + +static void ggml_compute_forward_mul_mat_id( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + const struct ggml_tensor * ids = dst->src[2]; + + GGML_TENSOR_BINARY_OP_LOCALS + + const int ith = params->ith; + const int nth = params->nth; + + const enum ggml_type type = src0->type; + + const bool src1_cont = ggml_is_contiguous(src1); + + ggml_vec_dot_t const vec_dot = type_traits[type].vec_dot; + enum ggml_type const vec_dot_type = type_traits[type].vec_dot_type; + ggml_from_float_t const from_float = type_traits[vec_dot_type].from_float; + int64_t const matmul_num_cols = type_traits[type].ncols; + ggml_gemv_t const gemv = type_traits[type].gemv; + + // we don't support permuted src0 or src1 + GGML_ASSERT(nb00 == ggml_type_size(type)); + GGML_ASSERT(nb10 == ggml_type_size(src1->type)); + + // dst cannot be transposed or permuted + GGML_ASSERT(nb0 == sizeof(float)); + GGML_ASSERT(nb0 <= nb1); + GGML_ASSERT(nb1 <= nb2); + GGML_ASSERT(nb2 <= nb3); + + // row groups + const int n_ids = ids->ne[0]; // n_expert_used + const int n_as = ne02; // n_expert + + char * wdata_src1_end = (src1->type == vec_dot_type) ? + (char *) params->wdata : + (char *) params->wdata + GGML_PAD(ggml_row_size(vec_dot_type, ggml_nelements(src1)), sizeof(int64_t)); + + struct mmid_row_mapping { + int32_t i1; + int32_t i2; + }; + + int64_t * matrix_row_counts = (int64_t *) (wdata_src1_end); // [n_as] + struct mmid_row_mapping * matrix_rows = (struct mmid_row_mapping *)(matrix_row_counts + n_as); // [n_as][ne11] + + if (src1->type != vec_dot_type) { + char * wdata = params->wdata; + + const size_t nbw1 = ggml_row_size(vec_dot_type, ne10); + const size_t nbw2 = nbw1*ne11; + const size_t nbw3 = nbw2*ne12; + + assert(params->wsize >= ne13*nbw3); + GGML_ASSERT(src1->type == GGML_TYPE_F32); + + for (int64_t i13 = 0; i13 < ne13; ++i13) { + for (int64_t i12 = 0; i12 < ne12; ++i12) { + for (int64_t i11 = ith; i11 < ne11; i11 += nth) { + from_float((float *)((char *) src1->data + i13*nb13 + i12*nb12 + i11*nb11), + (void *) (wdata + i13*nbw3 + i12*nbw2 + i11*nbw1), + ne10); + } + } + } + } + +#define MMID_MATRIX_ROW(row_id, i1) matrix_rows[(row_id)*ne12 + (i1)] + + if (ith == 0) { + // initialize matrix_row_counts + memset(matrix_row_counts, 0, n_as*sizeof(int64_t)); + + // group rows by src0 matrix + for (int64_t iid1 = 0; iid1 < ids->ne[1]; ++iid1) { + for (int id = 0; id < n_ids; ++id) { + const int32_t i02 = *(const int32_t *) ((const char *) ids->data + iid1*ids->nb[1] + id*ids->nb[0]); + + assert(i02 >= 0 && i02 < n_as); + + MMID_MATRIX_ROW(i02, matrix_row_counts[i02]) = (struct mmid_row_mapping) {id, iid1}; + matrix_row_counts[i02] += 1; + } + } + } + + ggml_barrier(params->threadpool); + + // compute each matrix multiplication in sequence + for (int cur_a = 0; cur_a < n_as; ++cur_a) { + const int64_t cne1 = matrix_row_counts[cur_a]; + + if (cne1 == 0) { + continue; + } + + const char * src0_cur = (const char *) src0->data + cur_a*nb02; + + const void * wdata = (src1->type == vec_dot_type) ? src1->data : params->wdata; + const size_t row_size = ggml_row_size(vec_dot_type, ne10); + + const int64_t nr0 = ne01; // src0 rows + const int64_t nr1 = cne1; // src1 rows + + if (((ggml_n_dims(src0) - 1) == 2) && gemv) { + int64_t src0_cur_start = (ith * ne01) / nth; + int64_t src0_cur_end = ((ith + 1) * ne01) / nth; + src0_cur_start = (src0_cur_start % matmul_num_cols) ? src0_cur_start + matmul_num_cols - (src0_cur_start % matmul_num_cols): src0_cur_start; + src0_cur_end = (src0_cur_end % matmul_num_cols) ? src0_cur_end + matmul_num_cols - (src0_cur_end % matmul_num_cols): src0_cur_end; + if (src0_cur_start >= src0_cur_end) return; + + for (int ir1 = 0; ir1 < nr1; ir1++) { + struct mmid_row_mapping row_mapping = MMID_MATRIX_ROW(cur_a, ir1); + const int id = row_mapping.i1; // selected expert index + + const int64_t i11 = id % ne11; + const int64_t i12 = row_mapping.i2; // row index in src1 + + const int64_t i1 = id; // selected expert index + const int64_t i2 = i12; // row + + const char * src1_col = (const char *) wdata + + (src1_cont || src1->type != vec_dot_type + ? (i11 + i12 * ne11) * row_size + : (i11 * nb11 + i12 * nb12)); + + gemv(ne00, (float *)((char *) dst->data + (i1 * nb1 + i2 * nb2)) + src0_cur_start, ne01, + (const char *) src0_cur + src0_cur_start * nb01, src1_col, 1, src0_cur_end - src0_cur_start); + } + continue; + } + + // distribute the thread work across the inner or outer loop based on which one is larger + + const int64_t nth0 = nr0 > nr1 ? nth : 1; // parallelize by src0 rows + const int64_t nth1 = nr0 > nr1 ? 1 : nth; // parallelize by src1 rows + + const int64_t ith0 = ith % nth0; + const int64_t ith1 = ith / nth0; + + const int64_t dr0 = (nr0 + nth0 - 1)/nth0; + const int64_t dr1 = (nr1 + nth1 - 1)/nth1; + + const int64_t ir010 = dr0*ith0; + const int64_t ir011 = MIN(ir010 + dr0, nr0); + + const int64_t ir110 = dr1*ith1; + const int64_t ir111 = MIN(ir110 + dr1, nr1); + + // threads with no work simply yield (not sure if it helps) + //if (ir010 >= ir011 || ir110 >= ir111) { + // sched_yield(); + // continue; + //} + + // block-tiling attempt + const int64_t blck_0 = 16; + const int64_t blck_1 = 16; + + // attempt to reduce false-sharing (does not seem to make a difference) + float tmp[16]; + + for (int64_t iir1 = ir110; iir1 < ir111; iir1 += blck_1) { + for (int64_t iir0 = ir010; iir0 < ir011; iir0 += blck_0) { + for (int64_t ir1 = iir1; ir1 < iir1 + blck_1 && ir1 < ir111; ++ir1) { + const int64_t _i12 = ir1; // logical row index for this expert + + struct mmid_row_mapping row_mapping = MMID_MATRIX_ROW(cur_a, _i12); + const int id = row_mapping.i1; // selected expert index + + const int64_t i11 = id % ne11; + const int64_t i12 = row_mapping.i2; // row index in src1 + + const int64_t i1 = id; // selected expert index + const int64_t i2 = i12; // row + + // desc: when src1 is not a contiguous memory block we have to calculate the offset using the strides + // if it is, then we have either copied the data to params->wdata and made it contiguous or we are using + // the original src1 data pointer, so we should index using the indices directly + // TODO: this is a bit of a hack, we should probably have a better way to handle this + const char * src1_col = (const char *) wdata + + (src1_cont || src1->type != vec_dot_type + ? (i11 + i12*ne11)*row_size + : (i11*nb11 + i12*nb12)); + + float * dst_col = (float *) ((char *) dst->data + (i1*nb1 + i2*nb2)); + + //for (int64_t ir0 = iir0; ir0 < iir0 + blck_0 && ir0 < ir011; ++ir0) { + // vec_dot(ne00, &dst_col[ir0], src0_row + ir0*nb01, src1_col); + //} + + for (int64_t ir0 = iir0; ir0 < iir0 + blck_0 && ir0 < ir011; ++ir0) { + vec_dot(ne00, &tmp[ir0 - iir0], 0, src0_cur + ir0*nb01, 0, src1_col, 0, 1); + } + + memcpy(&dst_col[iir0], tmp, (MIN(iir0 + blck_0, ir011) - iir0)*sizeof(float)); + } + } + } + } + +#undef MMID_MATRIX_ROW +} + +// ggml_compute_forward_out_prod + +static void ggml_compute_forward_out_prod_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_TENSOR_BINARY_OP_LOCALS + + const int ith = params->ith; + const int nth = params->nth; + + GGML_ASSERT(ne0 == ne00); + GGML_ASSERT(ne1 == ne10); + GGML_ASSERT(ne2 == ne02); + GGML_ASSERT(ne02 == ne12); + GGML_ASSERT(ne3 == ne13); + GGML_ASSERT(ne03 == ne13); + + // we don't support permuted src0 or src1 + GGML_ASSERT(nb00 == sizeof(float)); + + // dst cannot be transposed or permuted + GGML_ASSERT(nb0 == sizeof(float)); + // GGML_ASSERT(nb0 <= nb1); + // GGML_ASSERT(nb1 <= nb2); + // GGML_ASSERT(nb2 <= nb3); + + // nb01 >= nb00 - src0 is not transposed + // compute by src0 rows + + if (ith == 0) { + ggml_vec_set_f32(ne0*ne1*ne2*ne3, dst->data, 0); + } + ggml_barrier(params->threadpool); + + // dst[:,:,:,:] = 0 + // for i2,i3: + // for i1: + // for i01: + // for i0: + // dst[i0,i1,i2,i3] += src0[i0,i01,i2,i3] * src1[i1,i01,i2,i3] + + // parallelize by last three dimensions + + // total rows in dst + const int64_t nr = ne1*ne2*ne3; + + // rows per thread + const int64_t dr = (nr + nth - 1)/nth; + + // row range for this thread + const int64_t ir0 = dr*ith; + const int64_t ir1 = MIN(ir0 + dr, nr); + + // block-tiling attempt + const int64_t blck_0 = MAX(GGML_VEC_MAD_UNROLL, 32); + const int64_t blck_1 = 16; + + for (int64_t bir = ir0; bir < ir1; bir += blck_1) { + const int64_t bir1 = MIN(bir + blck_1, ir1); + for (int64_t bi01 = 0; bi01 < ne01; bi01 += blck_0) { + const int64_t bne01 = MIN(bi01 + blck_0, ne01); + for (int64_t ir = bir; ir < bir1; ++ir) { + // dst indices + const int64_t i3 = ir/(ne2*ne1); + const int64_t i2 = (ir - i3*ne2*ne1)/ne1; + const int64_t i1 = (ir - i3*ne2*ne1 - i2*ne1); + + const int64_t i02 = i2; + const int64_t i03 = i3; + + //const int64_t i10 = i1; + const int64_t i12 = i2; + const int64_t i13 = i3; + +#if GGML_VEC_MAD_UNROLL > 2 + const int64_t bne01_unroll = bne01 - (bne01 % GGML_VEC_MAD_UNROLL); + for (int64_t i01 = bi01; i01 < bne01_unroll; i01 += GGML_VEC_MAD_UNROLL) { + const int64_t i11 = i01; + + float * s0 = (float *) ((char *) src0->data + ( i01*nb01 + i02*nb02 + i03*nb03)); + float * s1 = (float *) ((char *) src1->data + (i1*nb10 + i11*nb11 + i12*nb12 + i13*nb13)); + float * d = (float *) ((char *) dst->data + ( i1*nb1 + i2*nb2 + i3*nb3)); + + ggml_vec_mad_f32_unroll(ne0, nb01, nb11, d, s0, s1); + } + for (int64_t i01 = bne01_unroll; i01 < bne01; ++i01) { + const int64_t i11 = i01; + + float * s0 = (float *) ((char *) src0->data + ( i01*nb01 + i02*nb02 + i03*nb03)); + float * s1 = (float *) ((char *) src1->data + (i1*nb10 + i11*nb11 + i12*nb12 + i13*nb13)); + float * d = (float *) ((char *) dst->data + ( i1*nb1 + i2*nb2 + i3*nb3)); + + ggml_vec_mad_f32(ne0, d, s0, *s1); + } +#else + for (int64_t i01 = bi01; i01 < bne01; ++i01) { + const int64_t i11 = i01; + + float * s0 = (float *) ((char *) src0->data + ( i01*nb01 + i02*nb02 + i03*nb03)); + float * s1 = (float *) ((char *) src1->data + (i1*nb10 + i11*nb11 + i12*nb12 + i13*nb13)); + float * d = (float *) ((char *) dst->data + ( i1*nb1 + i2*nb2 + i3*nb3)); + + ggml_vec_mad_f32(ne0, d, s0, *s1); + } +#endif + } + } + } +} + +static void ggml_compute_forward_out_prod_q_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_TENSOR_BINARY_OP_LOCALS; + + const int ith = params->ith; + const int nth = params->nth; + + const enum ggml_type type = src0->type; + ggml_to_float_t const dequantize_row_q = type_traits[type].to_float; + + GGML_ASSERT(ne02 == ne12); + GGML_ASSERT(ne03 == ne13); + GGML_ASSERT(ne2 == ne12); + GGML_ASSERT(ne3 == ne13); + + // we don't support permuted src0 dim0 + GGML_ASSERT(nb00 == ggml_type_size(type)); + + // dst dim0 cannot be transposed or permuted + GGML_ASSERT(nb0 == sizeof(float)); + // GGML_ASSERT(nb0 <= nb1); + // GGML_ASSERT(nb1 <= nb2); + // GGML_ASSERT(nb2 <= nb3); + + GGML_ASSERT(ne0 == ne00); + GGML_ASSERT(ne1 == ne10); + GGML_ASSERT(ne2 == ne02); + GGML_ASSERT(ne3 == ne03); + + // nb01 >= nb00 - src0 is not transposed + // compute by src0 rows + + if (ith == 0) { + ggml_vec_set_f32(ne0*ne1*ne2*ne3, dst->data, 0); + } + ggml_barrier(params->threadpool); + + // parallelize by last three dimensions + + // total rows in dst + const int64_t nr = ne1*ne2*ne3; + + // rows per thread + const int64_t dr = (nr + nth - 1)/nth; + + // row range for this thread + const int64_t ir0 = dr*ith; + const int64_t ir1 = MIN(ir0 + dr, nr); + + // dst[:,:,:,:] = 0 + // for i2,i3: + // for i1: + // for i01: + // for i0: + // dst[i0,i1,i2,i3] += src0[i0,i01,i2,i3] * src1[i1,i01,i2,i3] + + float * wdata = (float *) params->wdata + (ne0 + CACHE_LINE_SIZE_F32) * ith; + + for (int64_t ir = ir0; ir < ir1; ++ir) { + // dst indices + const int64_t i3 = ir/(ne2*ne1); + const int64_t i2 = (ir - i3*ne2*ne1)/ne1; + const int64_t i1 = (ir - i3*ne2*ne1 - i2*ne1); + + const int64_t i02 = i2; + const int64_t i03 = i3; + + //const int64_t i10 = i1; + const int64_t i12 = i2; + const int64_t i13 = i3; + + for (int64_t i01 = 0; i01 < ne01; ++i01) { + const int64_t i11 = i01; + + float * s0 = (float *) ((char *) src0->data + ( i01*nb01 + i02*nb02 + i03*nb03)); + float * s1 = (float *) ((char *) src1->data + (i1*nb10 + i11*nb11 + i12*nb12 + i13*nb13)); + float * d = (float *) ((char *) dst->data + ( i1*nb1 + i2*nb2 + i3*nb3)); + + dequantize_row_q(s0, wdata, ne0); + ggml_vec_mad_f32(ne0, d, wdata, *s1); + } + } +} + +static void ggml_compute_forward_out_prod( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_Q4_0: + case GGML_TYPE_Q4_1: + case GGML_TYPE_Q5_0: + case GGML_TYPE_Q5_1: + case GGML_TYPE_Q8_0: + case GGML_TYPE_Q2_K: + case GGML_TYPE_Q3_K: + case GGML_TYPE_Q4_K: + case GGML_TYPE_Q5_K: + case GGML_TYPE_Q6_K: + case GGML_TYPE_TQ1_0: + case GGML_TYPE_TQ2_0: + case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: + case GGML_TYPE_IQ3_XXS: + case GGML_TYPE_IQ1_S: + case GGML_TYPE_IQ1_M: + case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ4_XS: + case GGML_TYPE_IQ3_S: + case GGML_TYPE_IQ2_S: + case GGML_TYPE_Q4_0_4_4: + case GGML_TYPE_Q4_0_4_8: + case GGML_TYPE_Q4_0_8_8: + { + ggml_compute_forward_out_prod_q_f32(params, dst); + } break; + case GGML_TYPE_F16: + { + GGML_ABORT("fatal error"); // todo + // ggml_compute_forward_out_prod_f16_f32(params, dst); + } + case GGML_TYPE_F32: + { + ggml_compute_forward_out_prod_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_scale + +static void ggml_compute_forward_scale_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + GGML_ASSERT(ggml_is_contiguous(src0)); + GGML_ASSERT(ggml_is_contiguous(dst)); + GGML_ASSERT(ggml_are_same_shape(src0, dst)); + + // scale factor + float v; + memcpy(&v, dst->op_params, sizeof(float)); + + const int ith = params->ith; + const int nth = params->nth; + + const int nc = src0->ne[0]; + const int nr = ggml_nrows(src0); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + const size_t nb01 = src0->nb[1]; + + const size_t nb1 = dst->nb[1]; + + for (int i1 = ir0; i1 < ir1; i1++) { + if (dst->data != src0->data) { + // src0 is same shape as dst => same indices + memcpy((char *)dst->data + i1*nb1, (char *)src0->data + i1*nb01, nc * sizeof(float)); + } + ggml_vec_scale_f32(nc, (float *) ((char *) dst->data + i1*nb1), v); + } +} + +static void ggml_compute_forward_scale( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_scale_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_set + +static void ggml_compute_forward_set_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_ASSERT(ggml_are_same_shape(src0, dst)); + GGML_ASSERT(ggml_is_contiguous(dst) && ggml_is_contiguous(src0)); + + // view src0 and dst with these strides and data offset inbytes during set + // nb0 is implicitly element_size because src0 and dst are contiguous + size_t nb1 = ((int32_t *) dst->op_params)[0]; + size_t nb2 = ((int32_t *) dst->op_params)[1]; + size_t nb3 = ((int32_t *) dst->op_params)[2]; + size_t offset = ((int32_t *) dst->op_params)[3]; + bool inplace = (bool) ((int32_t *) dst->op_params)[4]; + + if (!inplace) { + if (params->ith == 0) { + // memcpy needs to be synchronized across threads to avoid race conditions. + // => do it in INIT phase + memcpy( + ((char *) dst->data), + ((char *) src0->data), + ggml_nbytes(dst)); + } + ggml_barrier(params->threadpool); + } + + const int ith = params->ith; + const int nth = params->nth; + + const int nr = ggml_nrows(src1); + const int nc = src1->ne[0]; + + GGML_TENSOR_LOCALS(int64_t, ne1, src1, ne) + GGML_TENSOR_LOCALS(size_t, nb1, src1, nb) + + // src0 and dst as viewed during set + const size_t nb0 = ggml_element_size(src0); + + const int im0 = (ne10 == 0 ? 0 : ne10-1); + const int im1 = (ne11 == 0 ? 0 : ne11-1); + const int im2 = (ne12 == 0 ? 0 : ne12-1); + const int im3 = (ne13 == 0 ? 0 : ne13-1); + + GGML_ASSERT(offset + im0*nb0 + im1*nb1 + im2*nb2 + im3*nb3 <= ggml_nbytes(dst)); + + GGML_ASSERT(nb10 == sizeof(float)); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + for (int ir = ir0; ir < ir1; ++ir) { + // src0 and dst are viewed with shape of src1 and offset + // => same indices + const int i3 = ir/(ne12*ne11); + const int i2 = (ir - i3*ne12*ne11)/ne11; + const int i1 = (ir - i3*ne12*ne11 - i2*ne11); + + ggml_vec_cpy_f32(nc, + (float *) ((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + offset), + (float *) ((char *) src1->data + i3*nb13 + i2*nb12 + i1*nb11)); + } +} + +static void ggml_compute_forward_set( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_set_f32(params, dst); + } break; + case GGML_TYPE_F16: + case GGML_TYPE_BF16: + case GGML_TYPE_Q4_0: + case GGML_TYPE_Q4_1: + case GGML_TYPE_Q5_0: + case GGML_TYPE_Q5_1: + case GGML_TYPE_Q8_0: + case GGML_TYPE_Q8_1: + case GGML_TYPE_Q2_K: + case GGML_TYPE_Q3_K: + case GGML_TYPE_Q4_K: + case GGML_TYPE_Q5_K: + case GGML_TYPE_Q6_K: + case GGML_TYPE_TQ1_0: + case GGML_TYPE_TQ2_0: + case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: + case GGML_TYPE_IQ3_XXS: + case GGML_TYPE_IQ1_S: + case GGML_TYPE_IQ1_M: + case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ4_XS: + case GGML_TYPE_IQ3_S: + case GGML_TYPE_IQ2_S: + case GGML_TYPE_Q4_0_4_4: + case GGML_TYPE_Q4_0_4_8: + case GGML_TYPE_Q4_0_8_8: + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_cpy + +static void ggml_compute_forward_cpy( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + ggml_compute_forward_dup(params, dst); +} + +// ggml_compute_forward_cont + +static void ggml_compute_forward_cont( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + ggml_compute_forward_dup(params, dst); +} + +// ggml_compute_forward_reshape + +static void ggml_compute_forward_reshape( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + // NOP + UNUSED(params); + UNUSED(dst); +} + +// ggml_compute_forward_view + +static void ggml_compute_forward_view( + const struct ggml_compute_params * params, + const struct ggml_tensor * dst) { + // NOP + UNUSED(params); + UNUSED(dst); +} + +// ggml_compute_forward_permute + +static void ggml_compute_forward_permute( + const struct ggml_compute_params * params, + const struct ggml_tensor * dst) { + // NOP + UNUSED(params); + UNUSED(dst); +} + +// ggml_compute_forward_transpose + +static void ggml_compute_forward_transpose( + const struct ggml_compute_params * params, + const struct ggml_tensor * dst) { + // NOP + UNUSED(params); + UNUSED(dst); +} + +// ggml_compute_forward_get_rows + +static void ggml_compute_forward_get_rows_q( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_TENSOR_BINARY_OP_LOCALS + + const int64_t nc = ne00; + const int64_t nr = ggml_nelements(src1); + + const enum ggml_type type = src0->type; + ggml_to_float_t const dequantize_row_q = type_traits[type].to_float; + + assert(ne0 == nc); + assert(ne02 == ne11); + assert(nb00 == ggml_type_size(type)); + assert(ggml_nrows(dst) == nr); + + const int ith = params->ith; + const int nth = params->nth; + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + for (int64_t i = ir0; i < ir1; ++i) { + const int64_t i12 = i/(ne11*ne10); + const int64_t i11 = (i - i12*ne11*ne10)/ne10; + const int64_t i10 = (i - i12*ne11*ne10 - i11*ne10); + const int64_t i01 = *(int32_t *) ((char *) src1->data + i10*nb10 + i11*nb11 + i12*nb12); + + GGML_ASSERT(i01 >= 0 && i01 < ne01); + + dequantize_row_q( + (const void *) ((char *) src0->data + i01*nb01 + i11*nb02 + i12*nb03), + (float *) ((char *) dst->data + i10*nb1 + i11*nb2 + i12*nb3), nc); + } +} + +static void ggml_compute_forward_get_rows_f16( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_TENSOR_BINARY_OP_LOCALS + + const int64_t nc = ne00; + const int64_t nr = ggml_nelements(src1); + + assert(ne0 == nc); + assert(ne02 == ne11); + assert(nb00 == sizeof(ggml_fp16_t)); + assert(ggml_nrows(dst) == nr); + + const int ith = params->ith; + const int nth = params->nth; + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + for (int64_t i = ir0; i < ir1; ++i) { + const int64_t i12 = i/(ne11*ne10); + const int64_t i11 = (i - i12*ne11*ne10)/ne10; + const int64_t i10 = (i - i12*ne11*ne10 - i11*ne10); + const int64_t i01 = *(int32_t *) ((char *) src1->data + i10*nb10 + i11*nb11 + i12*nb12); + + GGML_ASSERT(i01 >= 0 && i01 < ne01); + + ggml_fp16_to_fp32_row( + (const void *) ((char *) src0->data + i01*nb01 + i11*nb02 + i12*nb03), + (float *) ((char *) dst->data + i10*nb1 + i11*nb2 + i12*nb3), nc); + } +} + +static void ggml_compute_forward_get_rows_bf16( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_TENSOR_BINARY_OP_LOCALS + + const int64_t nc = ne00; + const int64_t nr = ggml_nelements(src1); + + assert(ne0 == nc); + assert(ne02 == ne11); + assert(nb00 == sizeof(ggml_bf16_t)); + assert(ggml_nrows(dst) == nr); + + const int ith = params->ith; + const int nth = params->nth; + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + for (int64_t i = ir0; i < ir1; ++i) { + const int64_t i12 = i/(ne11*ne10); + const int64_t i11 = (i - i12*ne11*ne10)/ne10; + const int64_t i10 = (i - i12*ne11*ne10 - i11*ne10); + const int64_t i01 = *(int32_t *) ((char *) src1->data + i10*nb10 + i11*nb11 + i12*nb12); + + GGML_ASSERT(i01 >= 0 && i01 < ne01); + + ggml_bf16_to_fp32_row( + (const void *) ((char *) src0->data + i01*nb01 + i11*nb02 + i12*nb03), + (float *) ((char *) dst->data + i10*nb1 + i11*nb2 + i12*nb3), nc); + } +} + +static void ggml_compute_forward_get_rows_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_TENSOR_BINARY_OP_LOCALS + + const int64_t nc = ne00; + const int64_t nr = ggml_nelements(src1); + + assert(ne0 == nc); + assert(ne02 == ne11); + assert(nb00 == sizeof(float)); + assert(ggml_nrows(dst) == nr); + + const int ith = params->ith; + const int nth = params->nth; + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + for (int64_t i = ir0; i < ir1; ++i) { + const int64_t i12 = i/(ne11*ne10); + const int64_t i11 = (i - i12*ne11*ne10)/ne10; + const int64_t i10 = (i - i12*ne11*ne10 - i11*ne10); + const int64_t i01 = *(int32_t *) ((char *) src1->data + i10*nb10 + i11*nb11 + i12*nb12); + + GGML_ASSERT(i01 >= 0 && i01 < ne01); + + ggml_vec_cpy_f32(nc, + (float *) ((char *) dst->data + i10*nb1 + i11*nb2 + i12*nb3), + (float *) ((char *) src0->data + i01*nb01 + i11*nb02 + i12*nb03)); + } +} + +static void ggml_compute_forward_get_rows( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_Q4_0: + case GGML_TYPE_Q4_1: + case GGML_TYPE_Q5_0: + case GGML_TYPE_Q5_1: + case GGML_TYPE_Q8_0: + case GGML_TYPE_Q8_1: + case GGML_TYPE_Q2_K: + case GGML_TYPE_Q3_K: + case GGML_TYPE_Q4_K: + case GGML_TYPE_Q5_K: + case GGML_TYPE_Q6_K: + case GGML_TYPE_TQ1_0: + case GGML_TYPE_TQ2_0: + case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: + case GGML_TYPE_IQ3_XXS: + case GGML_TYPE_IQ1_S: + case GGML_TYPE_IQ1_M: + case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ4_XS: + case GGML_TYPE_IQ3_S: + case GGML_TYPE_IQ2_S: + case GGML_TYPE_Q4_0_4_4: + case GGML_TYPE_Q4_0_4_8: + case GGML_TYPE_Q4_0_8_8: + { + ggml_compute_forward_get_rows_q(params, dst); + } break; + case GGML_TYPE_F16: + { + ggml_compute_forward_get_rows_f16(params, dst); + } break; + case GGML_TYPE_BF16: + { + ggml_compute_forward_get_rows_bf16(params, dst); + } break; + case GGML_TYPE_F32: + case GGML_TYPE_I32: + { + ggml_compute_forward_get_rows_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } + + //static bool first = true; + //printf("ne0 = %d, ne1 = %d, ne2 = %d\n", dst->ne[0], dst->ne[1], dst->ne[2]); + //if (first) { + // first = false; + //} else { + // for (int k = 0; k < dst->ne[1]; ++k) { + // for (int j = 0; j < dst->ne[0]/16; ++j) { + // for (int i = 0; i < 16; ++i) { + // printf("%8.4f ", ((float *) dst->data)[k*dst->ne[0] + j*16 + i]); + // } + // printf("\n"); + // } + // printf("\n"); + // } + // printf("\n"); + // exit(0); + //} +} + +// ggml_compute_forward_get_rows_back + +static void ggml_compute_forward_get_rows_back_f32_f16( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + if (params->ith != 0) { + return; + } + + GGML_ASSERT(ggml_is_contiguous(dst)); + + // ggml_compute_forward_dup_same_cont(params, opt0, dst); + + memset(dst->data, 0, ggml_nbytes(dst)); + + const int nc = src0->ne[0]; + const int nr = ggml_nelements(src1); + + GGML_ASSERT( dst->ne[0] == nc); + GGML_ASSERT(src0->nb[0] == sizeof(ggml_fp16_t)); + + for (int i = 0; i < nr; ++i) { + const int r = ((int32_t *) src1->data)[i]; + + for (int j = 0; j < nc; ++j) { + ggml_fp16_t v = ((ggml_fp16_t *) ((char *) src0->data + i*src0->nb[1]))[j]; + ((float *) ((char *) dst->data + r*dst->nb[1]))[j] += GGML_FP16_TO_FP32(v); + } + } +} + +static void ggml_compute_forward_get_rows_back_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + if (params->ith != 0) { + return; + } + + GGML_ASSERT(ggml_is_contiguous(dst)); + + // ggml_compute_forward_dup_same_cont(params, opt0, dst); + + memset(dst->data, 0, ggml_nbytes(dst)); + + const int nc = src0->ne[0]; + const int nr = ggml_nelements(src1); + + GGML_ASSERT( dst->ne[0] == nc); + GGML_ASSERT(src0->nb[0] == sizeof(float)); + + for (int i = 0; i < nr; ++i) { + const int r = ((int32_t *) src1->data)[i]; + + ggml_vec_add_f32(nc, + (float *) ((char *) dst->data + r*dst->nb[1]), + (float *) ((char *) dst->data + r*dst->nb[1]), + (float *) ((char *) src0->data + i*src0->nb[1])); + } +} + +static void ggml_compute_forward_get_rows_back( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F16: + { + ggml_compute_forward_get_rows_back_f32_f16(params, dst); + } break; + case GGML_TYPE_F32: + { + ggml_compute_forward_get_rows_back_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } + + //static bool first = true; + //printf("ne0 = %d, ne1 = %d, ne2 = %d\n", dst->ne[0], dst->ne[1], dst->ne[2]); + //if (first) { + // first = false; + //} else { + // for (int k = 0; k < dst->ne[1]; ++k) { + // for (int j = 0; j < dst->ne[0]/16; ++j) { + // for (int i = 0; i < 16; ++i) { + // printf("%8.4f ", ((float *) dst->data)[k*dst->ne[0] + j*16 + i]); + // } + // printf("\n"); + // } + // printf("\n"); + // } + // printf("\n"); + // exit(0); + //} +} + +// ggml_compute_forward_diag + +static void ggml_compute_forward_diag_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + if (params->ith != 0) { + return; + } + + // TODO: handle transposed/permuted matrices + + GGML_TENSOR_UNARY_OP_LOCALS + + GGML_ASSERT(ne00 == ne0); + GGML_ASSERT(ne00 == ne1); + GGML_ASSERT(ne01 == 1); + GGML_ASSERT(ne02 == ne2); + GGML_ASSERT(ne03 == ne3); + + GGML_ASSERT(nb00 == sizeof(float)); + GGML_ASSERT(nb0 == sizeof(float)); + + for (int i3 = 0; i3 < ne3; i3++) { + for (int i2 = 0; i2 < ne2; i2++) { + for (int i1 = 0; i1 < ne1; i1++) { + float * d = (float *)((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1); + float * s = (float *)((char *) src0->data + i3*nb03 + i2*nb02); + for (int i0 = 0; i0 < i1; i0++) { + d[i0] = 0; + } + d[i1] = s[i1]; + for (int i0 = i1+1; i0 < ne0; i0++) { + d[i0] = 0; + } + } + } + } +} + +static void ggml_compute_forward_diag( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_diag_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_diag_mask_inf + +static void ggml_compute_forward_diag_mask_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst, + const float value) { + + const struct ggml_tensor * src0 = dst->src[0]; + + const int ith = params->ith; + const int nth = params->nth; + + const int n_past = ((int32_t *) dst->op_params)[0]; + const bool inplace = src0->data == dst->data; + + GGML_ASSERT(n_past >= 0); + + if (!inplace) { + if (ith == 0) { + // memcpy needs to be synchronized across threads to avoid race conditions. + // => do it in INIT phase + GGML_ASSERT(ggml_nelements(dst) == ggml_nelements(src0)); + GGML_ASSERT(ggml_is_contiguous(dst) && ggml_is_contiguous(src0)); + memcpy( + ((char *) dst->data), + ((char *) src0->data), + ggml_nbytes(dst)); + } + ggml_barrier(params->threadpool); + } + + // TODO: handle transposed/permuted matrices + + const int n = ggml_nrows(src0); + const int nc = src0->ne[0]; + const int nr = src0->ne[1]; + const int nz = n/nr; + + GGML_ASSERT( dst->nb[0] == sizeof(float)); + GGML_ASSERT(src0->nb[0] == sizeof(float)); + + for (int k = 0; k < nz; k++) { + for (int j = ith; j < nr; j += nth) { + for (int i = n_past; i < nc; i++) { + if (i > n_past + j) { + *(float *)((char *) dst->data + k*dst->nb[2] + j*dst->nb[1] + i*dst->nb[0]) = value; + } + } + } + } +} + +static void ggml_compute_forward_diag_mask_inf( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_diag_mask_f32(params, dst, -INFINITY); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +static void ggml_compute_forward_diag_mask_zero( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_diag_mask_f32(params, dst, 0); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_soft_max + +static void ggml_compute_forward_soft_max_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + assert(ggml_is_contiguous(dst)); + assert(ggml_are_same_shape(src0, dst)); + + float scale = 1.0f; + float max_bias = 0.0f; + + memcpy(&scale, (float *) dst->op_params + 0, sizeof(float)); + memcpy(&max_bias, (float *) dst->op_params + 1, sizeof(float)); + + // TODO: handle transposed/permuted matrices + + const int ith = params->ith; + const int nth = params->nth; + + GGML_TENSOR_UNARY_OP_LOCALS + + //const int64_t ne11 = src1 ? src1->ne[1] : 1; + + // TODO: is this supposed to be ceil instead of floor? + // https://huggingface.co/mosaicml/mpt-7b/blob/main/attention.py#L370 + const uint32_t n_head = ne02; + const uint32_t n_head_log2 = 1u << (uint32_t) floor(log2(n_head)); + + const float m0 = powf(2.0f, -(max_bias ) / n_head_log2); + const float m1 = powf(2.0f, -(max_bias / 2.0f) / n_head_log2); + + const int nc = src0->ne[0]; + const int nr = ggml_nrows(src0); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + float * wp = (float *) params->wdata + (nc + CACHE_LINE_SIZE_F32) * ith; + + const bool use_f16 = (src1 && src1->type == GGML_TYPE_F16); + + for (int i1 = ir0; i1 < ir1; i1++) { + // ALiBi + const uint32_t h = (i1/ne01)%ne02; // head + const float slope = (max_bias > 0.0f) ? h < n_head_log2 ? powf(m0, h + 1) : powf(m1, 2*(h - n_head_log2) + 1) : 1.0f; + + float * sp = (float *)((char *) src0->data + i1*src0->nb[1]); + float * dp = (float *)((char *) dst->data + i1*dst->nb[1]); + + // broadcast the mask across rows + ggml_fp16_t * mp_f16 = src1 ? (ggml_fp16_t *)((char *) src1->data) + (i1%ne01)*ne00 : NULL; + float * mp_f32 = src1 ? (float *)((char *) src1->data) + (i1%ne01)*ne00 : NULL; + + ggml_vec_cpy_f32 (nc, wp, sp); + ggml_vec_scale_f32(nc, wp, scale); + if (mp_f32) { + if (use_f16) { + for (int i = 0; i < nc; ++i) { + wp[i] += slope*GGML_FP16_TO_FP32(mp_f16[i]); + } + } else { + for (int i = 0; i < nc; ++i) { + wp[i] += slope*mp_f32[i]; + } + } + } + +#ifndef NDEBUG + for (int i = 0; i < nc; ++i) { + //printf("p[%d] = %f\n", i, p[i]); + assert(!isnan(wp[i])); + } +#endif + + float max = -INFINITY; + ggml_vec_max_f32(nc, &max, wp); + + ggml_float sum = ggml_vec_soft_max_f32(nc, dp, wp, max); + assert(sum > 0.0); + + sum = 1.0/sum; + ggml_vec_scale_f32(nc, dp, sum); + +#ifndef NDEBUG + for (int i = 0; i < nc; ++i) { + assert(!isnan(dp[i])); + assert(!isinf(dp[i])); + } +#endif + } +} + +static void ggml_compute_forward_soft_max( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_soft_max_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + + +// ggml_compute_forward_soft_max_back + +static void ggml_compute_forward_soft_max_back_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_ASSERT(ggml_is_contiguous(src0)); + GGML_ASSERT(ggml_is_contiguous(src1)); + GGML_ASSERT(ggml_is_contiguous(dst)); + GGML_ASSERT(ggml_are_same_shape(src0, dst)); + GGML_ASSERT(ggml_are_same_shape(src1, dst)); + + // TODO: handle transposed/permuted matrices + + const int ith = params->ith; + const int nth = params->nth; + + const int nc = src0->ne[0]; + const int nr = ggml_nrows(src0); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + for (int i1 = ir0; i1 < ir1; i1++) { + float *dy = (float *)((char *) src0->data + i1*src0->nb[1]); + float *y = (float *)((char *) src1->data + i1*src1->nb[1]); + float *dx = (float *)((char *) dst->data + i1*dst->nb[1]); + +#ifndef NDEBUG + for (int i = 0; i < nc; ++i) { + //printf("p[%d] = %f\n", i, p[i]); + assert(!isnan(dy[i])); + assert(!isnan(y[i])); + } +#endif + // Jii = yi - yi*yi + // Jij = -yi*yj + // J = diag(y)-y.T*y + // dx = J * dy + // dxk = sum_i(Jki * dyi) + // dxk = sum_i(-yk*yi * dyi) - (-yk*yk)*dyk + (yk - yk*yk)*dyk + // dxk = sum_i(-yk*yi * dyi) + yk*yk*dyk + yk*dyk - yk*yk*dyk + // dxk = sum_i(-yk*yi * dyi) + yk*dyk + // dxk = -yk * sum_i(yi * dyi) + yk*dyk + // dxk = -yk * dot(y, dy) + yk*dyk + // dxk = yk * (- dot(y, dy) + dyk) + // dxk = yk * (dyk - dot(y, dy)) + // + // post-order: + // dot_y_dy := dot(y, dy) + // dx := dy + // dx := dx - dot_y_dy + // dx := dx * y + + // linear runtime, no additional memory + float dot_y_dy = 0; + ggml_vec_dot_f32 (nc, &dot_y_dy, 0, y, 0, dy, 0, 1); + ggml_vec_cpy_f32 (nc, dx, dy); + ggml_vec_acc1_f32(nc, dx, -dot_y_dy); + ggml_vec_mul_f32 (nc, dx, dx, y); + +#ifndef NDEBUG + for (int i = 0; i < nc; ++i) { + assert(!isnan(dx[i])); + assert(!isinf(dx[i])); + } +#endif + } +} + +static void ggml_compute_forward_soft_max_back( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_soft_max_back_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_clamp + +static void ggml_compute_forward_clamp_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + if (params->ith != 0) { + return; + } + + float min; + float max; + memcpy(&min, (float *) dst->op_params + 0, sizeof(float)); + memcpy(&max, (float *) dst->op_params + 1, sizeof(float)); + + const int ith = params->ith; + const int nth = params->nth; + + const int n = ggml_nrows(src0); + const int nc = src0->ne[0]; + + const size_t nb00 = src0->nb[0]; + const size_t nb01 = src0->nb[1]; + + const size_t nb0 = dst->nb[0]; + const size_t nb1 = dst->nb[1]; + + GGML_ASSERT( nb0 == sizeof(float)); + GGML_ASSERT(nb00 == sizeof(float)); + + for (int j = ith; j < n; j += nth) { + float * dst_ptr = (float *) ((char *) dst->data + j*nb1); + float * src0_ptr = (float *) ((char *) src0->data + j*nb01); + + for (int i = 0; i < nc; i++) { + dst_ptr[i] = MAX(MIN(src0_ptr[i], max), min); + } + } +} + +static void ggml_compute_forward_clamp( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_clamp_f32(params, dst); + } break; + case GGML_TYPE_F16: + case GGML_TYPE_BF16: + case GGML_TYPE_Q4_0: + case GGML_TYPE_Q4_1: + case GGML_TYPE_Q5_0: + case GGML_TYPE_Q5_1: + case GGML_TYPE_Q8_0: + case GGML_TYPE_Q8_1: + case GGML_TYPE_Q2_K: + case GGML_TYPE_Q3_K: + case GGML_TYPE_Q4_K: + case GGML_TYPE_Q5_K: + case GGML_TYPE_Q6_K: + case GGML_TYPE_TQ1_0: + case GGML_TYPE_TQ2_0: + case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: + case GGML_TYPE_IQ3_XXS: + case GGML_TYPE_IQ1_S: + case GGML_TYPE_IQ1_M: + case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ4_XS: + case GGML_TYPE_IQ3_S: + case GGML_TYPE_IQ2_S: + case GGML_TYPE_Q8_K: + case GGML_TYPE_Q4_0_4_4: + case GGML_TYPE_Q4_0_4_8: + case GGML_TYPE_Q4_0_8_8: + case GGML_TYPE_I8: + case GGML_TYPE_I16: + case GGML_TYPE_I32: + case GGML_TYPE_I64: + case GGML_TYPE_F64: + case GGML_TYPE_COUNT: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_rope + +static float rope_yarn_ramp(const float low, const float high, const int i0) { + const float y = (i0 / 2 - low) / MAX(0.001f, high - low); + return 1 - MIN(1, MAX(0, y)); +} + +// YaRN algorithm based on LlamaYaRNScaledRotaryEmbedding.py from https://github.com/jquesnelle/yarn +// MIT licensed. Copyright (c) 2023 Jeffrey Quesnelle and Bowen Peng. +static void rope_yarn( + float theta_extrap, float freq_scale, float corr_dims[2], int64_t i0, float ext_factor, float mscale, + float * cos_theta, float * sin_theta) { + // Get n-d rotational scaling corrected for extrapolation + float theta_interp = freq_scale * theta_extrap; + float theta = theta_interp; + if (ext_factor != 0.0f) { + float ramp_mix = rope_yarn_ramp(corr_dims[0], corr_dims[1], i0) * ext_factor; + theta = theta_interp * (1 - ramp_mix) + theta_extrap * ramp_mix; + + // Get n-d magnitude scaling corrected for interpolation + mscale *= 1.0f + 0.1f * logf(1.0f / freq_scale); + } + *cos_theta = cosf(theta) * mscale; + *sin_theta = sinf(theta) * mscale; +} + +// Apparently solving `n_rot = 2pi * x * base^((2 * max_pos_emb) / n_dims)` for x, we get +// `corr_dim(n_rot) = n_dims * log(max_pos_emb / (n_rot * 2pi)) / (2 * log(base))` +static float ggml_rope_yarn_corr_dim(int n_dims, int n_ctx_orig, float n_rot, float base) { + return n_dims * logf(n_ctx_orig / (n_rot * 2 * (float)M_PI)) / (2 * logf(base)); +} + +static void ggml_rope_cache_init( + float theta_base, float freq_scale, const float * freq_factors, float corr_dims[2], int64_t ne0, float ext_factor, float mscale, + float * cache, float sin_sign, float theta_scale) { + // ref: https://github.com/jquesnelle/yarn/blob/master/scaled_rope/LlamaYaRNScaledRotaryEmbedding.py + float theta = theta_base; + for (int64_t i0 = 0; i0 < ne0; i0 += 2) { + const float ff = freq_factors ? freq_factors[i0/2] : 1.0f; + rope_yarn( + theta/ff, freq_scale, corr_dims, i0, ext_factor, mscale, &cache[i0 + 0], &cache[i0 + 1] + ); + cache[i0 + 1] *= sin_sign; + + theta *= theta_scale; + } +} + +GGML_CALL void ggml_rope_yarn_corr_dims( + int n_dims, int n_ctx_orig, float freq_base, float beta_fast, float beta_slow, float dims[2] +) { + // start and end correction dims + float start = floorf(ggml_rope_yarn_corr_dim(n_dims, n_ctx_orig, beta_fast, freq_base)); + float end = ceilf(ggml_rope_yarn_corr_dim(n_dims, n_ctx_orig, beta_slow, freq_base)); + dims[0] = MAX(0, start); + dims[1] = MIN(n_dims - 1, end); +} + +static void ggml_compute_forward_rope_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst, + const bool forward) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + const struct ggml_tensor * src2 = dst->src[2]; + + float freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow; + + //const int n_past = ((int32_t *) dst->op_params)[0]; + const int n_dims = ((int32_t *) dst->op_params)[1]; + const int mode = ((int32_t *) dst->op_params)[2]; + //const int n_ctx = ((int32_t *) dst->op_params)[3]; + const int n_ctx_orig = ((int32_t *) dst->op_params)[4]; + + memcpy(&freq_base, (int32_t *) dst->op_params + 5, sizeof(float)); + memcpy(&freq_scale, (int32_t *) dst->op_params + 6, sizeof(float)); + memcpy(&ext_factor, (int32_t *) dst->op_params + 7, sizeof(float)); + memcpy(&attn_factor, (int32_t *) dst->op_params + 8, sizeof(float)); + memcpy(&beta_fast, (int32_t *) dst->op_params + 9, sizeof(float)); + memcpy(&beta_slow, (int32_t *) dst->op_params + 10, sizeof(float)); + + GGML_TENSOR_UNARY_OP_LOCALS + + //printf("ne0: %d, ne1: %d, ne2: %d, ne3: %d\n", ne0, ne1, ne2, ne3); + //printf("n_past = %d, ne2 = %d\n", n_past, ne2); + + GGML_ASSERT(nb00 == sizeof(float)); + + const int ith = params->ith; + const int nth = params->nth; + + const int nr = ggml_nrows(dst); + + GGML_ASSERT(n_dims <= ne0); + GGML_ASSERT(n_dims % 2 == 0); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + // row index used to determine which thread to use + int ir = 0; + + const float theta_scale = powf(freq_base, -2.0f/n_dims); + + float corr_dims[2]; + ggml_rope_yarn_corr_dims(n_dims, n_ctx_orig, freq_base, beta_fast, beta_slow, corr_dims); + + const bool is_neox = mode & GGML_ROPE_TYPE_NEOX; + + const float * freq_factors = NULL; + if (src2 != NULL) { + GGML_ASSERT(src2->type == GGML_TYPE_F32); + GGML_ASSERT(src2->ne[0] >= n_dims / 2); + freq_factors = (const float *) src2->data; + } + + // backward process uses inverse rotation by cos and sin. + // cos and sin build a rotation matrix, where the inverse is the transpose. + // this essentially just switches the sign of sin. + const float sin_sign = forward ? 1.0f : -1.0f; + + const int32_t * pos = (const int32_t *) src1->data; + + for (int64_t i3 = 0; i3 < ne3; i3++) { + for (int64_t i2 = 0; i2 < ne2; i2++) { + const int64_t p = pos[i2]; + + float * cache = (float *) params->wdata + (ne0 + CACHE_LINE_SIZE_F32)*ith; + ggml_rope_cache_init(p, freq_scale, freq_factors, corr_dims, ne0, ext_factor, attn_factor, cache, sin_sign, theta_scale); + + for (int64_t i1 = 0; i1 < ne1; i1++) { + if (ir++ < ir0) continue; + if (ir > ir1) break; + + if (!is_neox) { + for (int64_t i0 = 0; i0 < n_dims; i0 += 2) { + const float cos_theta = cache[i0 + 0]; + const float sin_theta = cache[i0 + 1]; + + const float * const src = (float *)((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); + float * dst_data = (float *)((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + const float x0 = src[0]; + const float x1 = src[1]; + + dst_data[0] = x0*cos_theta - x1*sin_theta; + dst_data[1] = x0*sin_theta + x1*cos_theta; + } + } else { + for (int64_t i0 = 0; i0 < n_dims; i0 += 2) { + const int64_t ic = i0/2; + + const float cos_theta = cache[i0 + 0]; + const float sin_theta = cache[i0 + 1]; + + const float * const src = (float *)((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + ic*nb00); + float * dst_data = (float *)((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + ic*nb0); + + const float x0 = src[0]; + const float x1 = src[n_dims/2]; + + dst_data[0] = x0*cos_theta - x1*sin_theta; + dst_data[n_dims/2] = x0*sin_theta + x1*cos_theta; + } + } + + for (int64_t i0 = n_dims; i0 < ne0; i0 += 2) { + const float * const src = (float *)((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); + float * dst_data = (float *)((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + dst_data[0] = src[0]; + dst_data[1] = src[1]; + } + } + } + } +} + +// TODO: deduplicate f16/f32 code +static void ggml_compute_forward_rope_f16( + const struct ggml_compute_params * params, + struct ggml_tensor * dst, + const bool forward) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + const struct ggml_tensor * src2 = dst->src[2]; + + float freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow; + + //const int n_past = ((int32_t *) dst->op_params)[0]; + const int n_dims = ((int32_t *) dst->op_params)[1]; + const int mode = ((int32_t *) dst->op_params)[2]; + //const int n_ctx = ((int32_t *) dst->op_params)[3]; + const int n_ctx_orig = ((int32_t *) dst->op_params)[4]; + memcpy(&freq_base, (int32_t *) dst->op_params + 5, sizeof(float)); + memcpy(&freq_scale, (int32_t *) dst->op_params + 6, sizeof(float)); + memcpy(&ext_factor, (int32_t *) dst->op_params + 7, sizeof(float)); + memcpy(&attn_factor, (int32_t *) dst->op_params + 8, sizeof(float)); + memcpy(&beta_fast, (int32_t *) dst->op_params + 9, sizeof(float)); + memcpy(&beta_slow, (int32_t *) dst->op_params + 10, sizeof(float)); + + GGML_TENSOR_UNARY_OP_LOCALS + + //printf("ne0: %d, ne1: %d, ne2: %d, ne3: %d\n", ne0, ne1, ne2, ne3); + //printf("n_past = %d, ne2 = %d\n", n_past, ne2); + + GGML_ASSERT(nb0 == sizeof(ggml_fp16_t)); + + const int ith = params->ith; + const int nth = params->nth; + + const int nr = ggml_nrows(dst); + + GGML_ASSERT(n_dims <= ne0); + GGML_ASSERT(n_dims % 2 == 0); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + // row index used to determine which thread to use + int ir = 0; + + const float theta_scale = powf(freq_base, -2.0f/n_dims); + + float corr_dims[2]; + ggml_rope_yarn_corr_dims(n_dims, n_ctx_orig, freq_base, beta_fast, beta_slow, corr_dims); + + const bool is_neox = mode & GGML_ROPE_TYPE_NEOX; + + const float * freq_factors = NULL; + if (src2 != NULL) { + GGML_ASSERT(src2->type == GGML_TYPE_F32); + GGML_ASSERT(src2->ne[0] >= n_dims / 2); + freq_factors = (const float *) src2->data; + } + + // backward process uses inverse rotation by cos and sin. + // cos and sin build a rotation matrix, where the inverse is the transpose. + // this essentially just switches the sign of sin. + const float sin_sign = forward ? 1.0f : -1.0f; + + const int32_t * pos = (const int32_t *) src1->data; + + for (int64_t i3 = 0; i3 < ne3; i3++) { + for (int64_t i2 = 0; i2 < ne2; i2++) { + const int64_t p = pos[i2]; + + float * cache = (float *) params->wdata + (ne0 + CACHE_LINE_SIZE_F32)*ith; + ggml_rope_cache_init(p, freq_scale, freq_factors, corr_dims, ne0, ext_factor, attn_factor, cache, sin_sign, theta_scale); + + for (int64_t i1 = 0; i1 < ne1; i1++) { + if (ir++ < ir0) continue; + if (ir > ir1) break; + + if (!is_neox) { + for (int64_t i0 = 0; i0 < n_dims; i0 += 2) { + const float cos_theta = cache[i0 + 0]; + const float sin_theta = cache[i0 + 1]; + + const ggml_fp16_t * const src = (ggml_fp16_t *)((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); + ggml_fp16_t * dst_data = (ggml_fp16_t *)((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + const float x0 = GGML_FP16_TO_FP32(src[0]); + const float x1 = GGML_FP16_TO_FP32(src[1]); + + dst_data[0] = GGML_FP32_TO_FP16(x0*cos_theta - x1*sin_theta); + dst_data[1] = GGML_FP32_TO_FP16(x0*sin_theta + x1*cos_theta); + } + } else { + for (int64_t i0 = 0; i0 < n_dims; i0 += 2) { + const int64_t ic = i0/2; + + const float cos_theta = cache[i0 + 0]; + const float sin_theta = cache[i0 + 1]; + + const ggml_fp16_t * const src = (ggml_fp16_t *)((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + ic*nb00); + ggml_fp16_t * dst_data = (ggml_fp16_t *)((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + ic*nb0); + + const float x0 = GGML_FP16_TO_FP32(src[0]); + const float x1 = GGML_FP16_TO_FP32(src[n_dims/2]); + + dst_data[0] = GGML_FP32_TO_FP16(x0*cos_theta - x1*sin_theta); + dst_data[n_dims/2] = GGML_FP32_TO_FP16(x0*sin_theta + x1*cos_theta); + } + } + + for (int64_t i0 = n_dims; i0 < ne0; i0 += 2) { + const ggml_fp16_t * const src = (ggml_fp16_t *)((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); + ggml_fp16_t * dst_data = (ggml_fp16_t *)((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + dst_data[0] = src[0]; + dst_data[1] = src[1]; + } + } + } + } +} + +static void ggml_compute_forward_rope( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F16: + { + ggml_compute_forward_rope_f16(params, dst, true); + } break; + case GGML_TYPE_F32: + { + ggml_compute_forward_rope_f32(params, dst, true); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_rope_back + +static void ggml_compute_forward_rope_back( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F16: + { + ggml_compute_forward_rope_f16(params, dst, false); + } break; + case GGML_TYPE_F32: + { + ggml_compute_forward_rope_f32(params, dst, false); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_conv_transpose_1d + +static void ggml_compute_forward_conv_transpose_1d_f16_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_ASSERT(src0->type == GGML_TYPE_F16); + GGML_ASSERT(src1->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F32); + + GGML_TENSOR_BINARY_OP_LOCALS + + const int ith = params->ith; + const int nth = params->nth; + + const int nk = ne00*ne01*ne02; + + GGML_ASSERT(nb00 == sizeof(ggml_fp16_t)); + GGML_ASSERT(nb10 == sizeof(float)); + + if (ith == 0) { + memset(params->wdata, 0, params->wsize); + + // permute kernel data (src0) from (K x Cout x Cin) to (Cin x K x Cout) + { + ggml_fp16_t * const wdata = (ggml_fp16_t *) params->wdata + 0; + + for (int64_t i02 = 0; i02 < ne02; i02++) { + for (int64_t i01 = 0; i01 < ne01; i01++) { + const ggml_fp16_t * const src = (ggml_fp16_t *)((char *) src0->data + i02*nb02 + i01*nb01); + ggml_fp16_t * dst_data = wdata + i01*ne00*ne02; + for (int64_t i00 = 0; i00 < ne00; i00++) { + dst_data[i00*ne02 + i02] = src[i00]; + } + } + } + } + + // permute source data (src1) from (L x Cin) to (Cin x L) + { + ggml_fp16_t * const wdata = (ggml_fp16_t *) params->wdata + nk; + ggml_fp16_t * dst_data = wdata; + + for (int64_t i11 = 0; i11 < ne11; i11++) { + const float * const src = (float *)((char *) src1->data + i11*nb11); + for (int64_t i10 = 0; i10 < ne10; i10++) { + dst_data[i10*ne11 + i11] = GGML_FP32_TO_FP16(src[i10]); + } + } + } + + // need to zero dst since we are accumulating into it + memset(dst->data, 0, ggml_nbytes(dst)); + } + ggml_barrier(params->threadpool); + + const int32_t s0 = ((const int32_t*)(dst->op_params))[0]; + + // total rows in dst + const int nr = ne1; + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + ggml_fp16_t * const wdata = (ggml_fp16_t *) params->wdata + 0; + ggml_fp16_t * const wdata_src = wdata + nk; + + for (int i1 = ir0; i1 < ir1; i1++) { + float * dst_data = (float *)((char *) dst->data + i1*nb1); + ggml_fp16_t * wdata_kernel = wdata + i1*ne02*ne00; + for (int i10 = 0; i10 < ne10; i10++) { + const int i1n = i10*ne11; + for (int i00 = 0; i00 < ne00; i00++) { + float v = 0; + ggml_vec_dot_f16(ne02, &v, 0, + (ggml_fp16_t *) wdata_src + i1n, 0, + (ggml_fp16_t *) wdata_kernel + i00*ne02, 0, 1); + dst_data[i10*s0 + i00] += v; + } + } + } +} + +static void ggml_compute_forward_conv_transpose_1d_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_ASSERT(src0->type == GGML_TYPE_F32); + GGML_ASSERT(src1->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F32); + + GGML_TENSOR_BINARY_OP_LOCALS + + const int ith = params->ith; + const int nth = params->nth; + + const int nk = ne00*ne01*ne02; + + GGML_ASSERT(nb00 == sizeof(float)); + GGML_ASSERT(nb10 == sizeof(float)); + + if (ith == 0) { + memset(params->wdata, 0, params->wsize); + + // prepare kernel data (src0) from (K x Cout x Cin) to (Cin x K x Cout) + { + float * const wdata = (float *) params->wdata + 0; + + for (int64_t i02 = 0; i02 < ne02; i02++) { + for (int64_t i01 = 0; i01 < ne01; i01++) { + const float * const src = (float *)((char *) src0->data + i02*nb02 + i01*nb01); + float * dst_data = wdata + i01*ne00*ne02; + for (int64_t i00 = 0; i00 < ne00; i00++) { + dst_data[i00*ne02 + i02] = src[i00]; + } + } + } + } + + // prepare source data (src1) + { + float * const wdata = (float *) params->wdata + nk; + float * dst_data = wdata; + + for (int64_t i11 = 0; i11 < ne11; i11++) { + const float * const src = (float *)((char *) src1->data + i11*nb11); + for (int64_t i10 = 0; i10 < ne10; i10++) { + dst_data[i10*ne11 + i11] = src[i10]; + } + } + } + + // need to zero dst since we are accumulating into it + memset(dst->data, 0, ggml_nbytes(dst)); + } + ggml_barrier(params->threadpool); + + const int32_t s0 = ((const int32_t*)(dst->op_params))[0]; + + // total rows in dst + const int nr = ne1; + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + float * const wdata = (float *) params->wdata + 0; + float * const wdata_src = wdata + nk; + + for (int i1 = ir0; i1 < ir1; i1++) { + float * dst_data = (float *)((char *) dst->data + i1*nb1); + float * wdata_kernel = wdata + i1*ne02*ne00; + for (int i10 = 0; i10 < ne10; i10++) { + const int i1n = i10*ne11; + for (int i00 = 0; i00 < ne00; i00++) { + float v = 0; + ggml_vec_dot_f32(ne02, &v, 0, + wdata_src + i1n, 0, + wdata_kernel + i00*ne02, 0, 1); + dst_data[i10*s0 + i00] += v; + } + } + } +} + +static void ggml_compute_forward_conv_transpose_1d( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F16: + { + ggml_compute_forward_conv_transpose_1d_f16_f32(params, dst); + } break; + case GGML_TYPE_F32: + { + ggml_compute_forward_conv_transpose_1d_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_im2col_f32 +// src0: kernel [OC, IC, KH, KW] +// src1: image [N, IC, IH, IW] +// dst: result [N, OH, OW, IC*KH*KW] +static void ggml_compute_forward_im2col_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_ASSERT(src1->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F32); + + GGML_TENSOR_BINARY_OP_LOCALS; + + const int32_t s0 = ((const int32_t *)(dst->op_params))[0]; + const int32_t s1 = ((const int32_t *)(dst->op_params))[1]; + const int32_t p0 = ((const int32_t *)(dst->op_params))[2]; + const int32_t p1 = ((const int32_t *)(dst->op_params))[3]; + const int32_t d0 = ((const int32_t *)(dst->op_params))[4]; + const int32_t d1 = ((const int32_t *)(dst->op_params))[5]; + const bool is_2D = ((const int32_t *)(dst->op_params))[6] == 1; + + const int ith = params->ith; + const int nth = params->nth; + + const int64_t N = is_2D ? ne13 : ne12; + const int64_t IC = is_2D ? ne12 : ne11; + const int64_t IH = is_2D ? ne11 : 1; + const int64_t IW = ne10; + + const int64_t KH = is_2D ? ne01 : 1; + const int64_t KW = ne00; + + const int64_t OH = is_2D ? ne2 : 1; + const int64_t OW = ne1; + + int ofs0 = is_2D ? nb13 : nb12; + int ofs1 = is_2D ? nb12 : nb11; + + GGML_ASSERT(nb10 == sizeof(float)); + + // im2col: [N, IC, IH, IW] => [N, OH, OW, IC*KH*KW] + { + float * const wdata = (float *) dst->data; + + for (int64_t in = 0; in < N; in++) { + for (int64_t ioh = 0; ioh < OH; ioh++) { // 1 + for (int64_t iow = 0; iow < OW; iow++) { + for (int64_t iic = ith; iic < IC; iic += nth) { + + // micro kernel + float * dst_data = wdata + (in*OH*OW + ioh*OW + iow)*(IC*KH*KW); // [IC, KH, KW] + const float * const src_data = (float *)((char *) src1->data + in*ofs0 + iic*ofs1); // [IH, IW] + + for (int64_t ikh = 0; ikh < KH; ikh++) { // 1 + for (int64_t ikw = 0; ikw < KW; ikw++) { + const int64_t iiw = iow*s0 + ikw*d0 - p0; + const int64_t iih = ioh*s1 + ikh*d1 - p1; + + if (iih < 0 || iih >= IH || iiw < 0 || iiw >= IW) { + dst_data[iic*(KH*KW) + ikh*KW + ikw] = 0; + } else { + dst_data[iic*(KH*KW) + ikh*KW + ikw] = (src_data[iih*IW + iiw]); + } + } + } + } + } + } + } + } +} + + +// ggml_compute_forward_im2col_f16 +// src0: kernel [OC, IC, KH, KW] +// src1: image [N, IC, IH, IW] +// dst: result [N, OH, OW, IC*KH*KW] +static void ggml_compute_forward_im2col_f16( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_ASSERT(src0->type == GGML_TYPE_F16); + GGML_ASSERT(src1->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F16); + + GGML_TENSOR_BINARY_OP_LOCALS; + + const int32_t s0 = ((const int32_t *)(dst->op_params))[0]; + const int32_t s1 = ((const int32_t *)(dst->op_params))[1]; + const int32_t p0 = ((const int32_t *)(dst->op_params))[2]; + const int32_t p1 = ((const int32_t *)(dst->op_params))[3]; + const int32_t d0 = ((const int32_t *)(dst->op_params))[4]; + const int32_t d1 = ((const int32_t *)(dst->op_params))[5]; + const bool is_2D = ((const int32_t *)(dst->op_params))[6] == 1; + + const int ith = params->ith; + const int nth = params->nth; + + const int64_t N = is_2D ? ne13 : ne12; + const int64_t IC = is_2D ? ne12 : ne11; + const int64_t IH = is_2D ? ne11 : 1; + const int64_t IW = ne10; + + const int64_t KH = is_2D ? ne01 : 1; + const int64_t KW = ne00; + + const int64_t OH = is_2D ? ne2 : 1; + const int64_t OW = ne1; + + int ofs0 = is_2D ? nb13 : nb12; + int ofs1 = is_2D ? nb12 : nb11; + + GGML_ASSERT(nb00 == sizeof(ggml_fp16_t)); + GGML_ASSERT(nb10 == sizeof(float)); + + // im2col: [N, IC, IH, IW] => [N, OH, OW, IC*KH*KW] + { + ggml_fp16_t * const wdata = (ggml_fp16_t *) dst->data; + + for (int64_t in = 0; in < N; in++) { + for (int64_t ioh = 0; ioh < OH; ioh++) { // 1 + for (int64_t iow = 0; iow < OW; iow++) { + for (int64_t iic = ith; iic < IC; iic += nth) { + + // micro kernel + ggml_fp16_t * dst_data = wdata + (in*OH*OW + ioh*OW + iow)*(IC*KH*KW); // [IC, KH, KW] + const float * const src_data = (float *)((char *) src1->data + in*ofs0 + iic*ofs1); // [IH, IW] + + for (int64_t ikh = 0; ikh < KH; ikh++) { // 1 + for (int64_t ikw = 0; ikw < KW; ikw++) { + const int64_t iiw = iow*s0 + ikw*d0 - p0; + const int64_t iih = ioh*s1 + ikh*d1 - p1; + + if (iih < 0 || iih >= IH || iiw < 0 || iiw >= IW) { + dst_data[iic*(KH*KW) + ikh*KW + ikw] = 0; + } else { + dst_data[iic*(KH*KW) + ikh*KW + ikw] = GGML_FP32_TO_FP16(src_data[iih*IW + iiw]); + } + } + } + } + } + } + } + } +} + +static void ggml_compute_forward_im2col( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + switch (dst->type) { + case GGML_TYPE_F16: + { + ggml_compute_forward_im2col_f16(params, dst); + } break; + case GGML_TYPE_F32: + { + ggml_compute_forward_im2col_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_im2col_back_f32 + +static void ggml_compute_forward_im2col_back_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_ASSERT(src1->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F32); + + GGML_TENSOR_BINARY_OP_LOCALS; + + const int32_t s0 = ((const int32_t *)(dst->op_params))[0]; + const int32_t s1 = ((const int32_t *)(dst->op_params))[1]; + const int32_t p0 = ((const int32_t *)(dst->op_params))[2]; + const int32_t p1 = ((const int32_t *)(dst->op_params))[3]; + const int32_t d0 = ((const int32_t *)(dst->op_params))[4]; + const int32_t d1 = ((const int32_t *)(dst->op_params))[5]; + const bool is_2D = ((const int32_t *)(dst->op_params))[6] == 1; + + const int ith = params->ith; + const int nth = params->nth; + + const int64_t N = is_2D ? ne3 : ne2; + const int64_t IC = is_2D ? ne2 : ne1; + const int64_t IH = is_2D ? ne1 : 1; + const int64_t IW = ne0; + + const int64_t KH = is_2D ? ne01 : 1; + const int64_t KW = ne00; + + const int64_t OH = is_2D ? ne12 : 1; + const int64_t OW = ne11; + + int ofs0 = is_2D ? nb3 : nb2; + int ofs1 = is_2D ? nb2 : nb1; + + GGML_ASSERT(nb0 == sizeof(float)); + + // im2col: [N, IC, IH, IW] => [N, OH, OW, IC*KH*KW] + { + float * const wdata = (float *) dst->data; + + for (int64_t in = 0; in < N; in++) { + for (int64_t iic = ith; iic < IC; iic += nth) { + for (int64_t iih = 0; iih < IH; iih++) { + for (int64_t iiw = 0; iiw < IW; iiw++) { + + // micro kernel + float grad = 0.0f; + for (int64_t ikh = 0; ikh < KH; ikh++) { + for (int64_t ikw = 0; ikw < KW; ikw++) { + // For s0 > 1 some values were skipped over in the forward pass. + // These values have tmpw % s0 != 0 and need to be skipped in the backwards pass as well. + const int64_t tmpw = (iiw + p0 - ikw*d0); + if (tmpw % s0 != 0) { + continue; + } + const int64_t iow = tmpw / s0; + + // Equivalent logic as above except for s1. + int64_t ioh; + if (is_2D) { + const int64_t tmph = iih + p1 - ikh*d1; + + if (tmph % s1 != 0) { + continue; + } + + ioh = tmph / s1; + } else { + ioh = 0; + } + + if (iow < 0 || iow >= OW || ioh < 0 || ioh >= OH) { + continue; + } + + const float * const src_data = (const float *) src1->data + + (in*OH*OW + ioh*OW + iow)*(IC*KH*KW); // [IC, KH, KW] + grad += src_data[iic*(KH*KW) + ikh*KW + ikw]; + } + } + float * dst_data = (float *)((char *) wdata + (in*ofs0 + iic*ofs1)); // [IH, IW] + dst_data[iih*IW + iiw] = grad; + } + } + } + } + } +} + +// ggml_compute_forward_conv_transpose_2d + +static void ggml_compute_forward_conv_transpose_2d( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_ASSERT(src0->type == GGML_TYPE_F16); + GGML_ASSERT(src1->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F32); + + GGML_TENSOR_BINARY_OP_LOCALS + + const int ith = params->ith; + const int nth = params->nth; + + const int nk = ne00*ne01*ne02*ne03; + + GGML_ASSERT(nb00 == sizeof(ggml_fp16_t)); + GGML_ASSERT(nb10 == sizeof(float)); + + if (ith == 0) { + memset(params->wdata, 0, params->wsize); + + // permute kernel data (src0) from (Kw x Kh x Cout x Cin) to (Cin x Kw x Kh x Cout) + { + ggml_fp16_t * const wdata = (ggml_fp16_t *) params->wdata + 0; + + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + const ggml_fp16_t * const src = (ggml_fp16_t *)((char *) src0->data + i03*nb03 + i02*nb02); + ggml_fp16_t * dst_data = wdata + i02*ne01*ne00*ne03; + for (int64_t i01 = 0; i01 < ne01; i01++) { + for (int64_t i00 = 0; i00 < ne00; i00++) { + dst_data[i01*ne00*ne03 + i00*ne03 + i03] = src[i01 * ne00 + i00]; + } + } + } + } + } + + // permute source data (src1) from (Sw x Sh x Cin) to (Cin x Sw x Sh) + { + ggml_fp16_t * const wdata = (ggml_fp16_t *) params->wdata + nk; + for (int i12 = 0; i12 < ne12; i12++) { + for (int i11 = 0; i11 < ne11; i11++) { + const float * const src = (float *)((char *) src1->data + i12*nb12 + i11*nb11); + ggml_fp16_t * dst_data = wdata + i11*ne10*ne12; + for (int i10 = 0; i10 < ne10; i10++) { + dst_data[i10*ne12 + i12] = GGML_FP32_TO_FP16(src[i10]); + } + } + } + } + + memset(dst->data, 0, ggml_nbytes(dst)); + } + ggml_barrier(params->threadpool); + + const int32_t stride = ggml_get_op_params_i32(dst, 0); + + // total patches in dst + const int np = ne2; + + // patches per thread + const int dp = (np + nth - 1)/nth; + + // patch range for this thread + const int ip0 = dp*ith; + const int ip1 = MIN(ip0 + dp, np); + + ggml_fp16_t * const wdata = (ggml_fp16_t *) params->wdata + 0; + ggml_fp16_t * const wdata_src = wdata + nk; + + for (int i2 = ip0; i2 < ip1; i2++) { // Cout + float * dst_data = (float *)((char *) dst->data + i2*nb2); + ggml_fp16_t * wdata_kernel = wdata + i2*ne01*ne00*ne03; + for (int i11 = 0; i11 < ne11; i11++) { + for (int i10 = 0; i10 < ne10; i10++) { + const int i1n = i11*ne10*ne12 + i10*ne12; + for (int i01 = 0; i01 < ne01; i01++) { + for (int i00 = 0; i00 < ne00; i00++) { + float v = 0; + ggml_vec_dot_f16(ne03, &v, 0, + wdata_src + i1n, 0, + wdata_kernel + i01*ne00*ne03 + i00*ne03, 0, 1); + dst_data[(i11*stride + i01)*ne0 + i10*stride + i00] += v; + } + } + } + } + } +} + +// ggml_compute_forward_pool_1d_sk_p0 + +static void ggml_compute_forward_pool_1d_sk_p0( + const struct ggml_compute_params * params, + const enum ggml_op_pool op, + const int k, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src = dst->src[0]; + + assert(src->type == GGML_TYPE_F32 || src->type == GGML_TYPE_F16); + + if (params->ith != 0) { + return; + } + + const char * cdata = (const char *)src->data; + const char * const data_end = cdata + ggml_nbytes(src); + float * drow = (float *)dst->data; + + const int64_t rs = dst->ne[0]; + + while (cdata < data_end) { + const void * srow = (const void *)cdata; + int j = 0; + for (int64_t i = 0; i < rs; ++i) { + switch (op) { + case GGML_OP_POOL_AVG: drow[i] = 0; break; + case GGML_OP_POOL_MAX: drow[i] = -FLT_MAX; break; + case GGML_OP_POOL_COUNT: GGML_ABORT("fatal error"); + } + for (int ki = 0; ki < k; ++ki) { + const float srow_j = (src->type == GGML_TYPE_F32) ? ((const float*)srow)[j] : GGML_FP16_TO_FP32(((const ggml_fp16_t*)srow)[j]); + switch (op) { + case GGML_OP_POOL_AVG: drow[i] += srow_j; break; + case GGML_OP_POOL_MAX: if (srow_j > drow[i]) drow[i] = srow_j; break; + case GGML_OP_POOL_COUNT: GGML_ABORT("fatal error"); + } + ++j; + } + switch (op) { + case GGML_OP_POOL_AVG: drow[i] /= k; break; + case GGML_OP_POOL_MAX: break; + case GGML_OP_POOL_COUNT: GGML_ABORT("fatal error"); + } + } + + cdata += src->nb[1]; + drow += rs; + } +} + +// ggml_compute_forward_pool_1d + +static void ggml_compute_forward_pool_1d( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const int32_t * opts = (const int32_t *)dst->op_params; + enum ggml_op_pool op = opts[0]; + const int k0 = opts[1]; + const int s0 = opts[2]; + const int p0 = opts[3]; + GGML_ASSERT(p0 == 0); // padding not supported + GGML_ASSERT(k0 == s0); // only s = k supported + + ggml_compute_forward_pool_1d_sk_p0(params, op, k0, dst); +} + +// ggml_compute_forward_pool_2d + +static void ggml_compute_forward_pool_2d( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src = dst->src[0]; + + assert(src->type == GGML_TYPE_F32 || src->type == GGML_TYPE_F16); + + if (params->ith != 0) { + return; + } + + const int32_t * opts = (const int32_t *)dst->op_params; + enum ggml_op_pool op = opts[0]; + const int k0 = opts[1]; + const int k1 = opts[2]; + const int s0 = opts[3]; + const int s1 = opts[4]; + const int p0 = opts[5]; + const int p1 = opts[6]; + const char * cdata = (const char*)src->data; + const char * const data_end = cdata + ggml_nbytes(src); + + const int64_t px = dst->ne[0]; + const int64_t py = dst->ne[1]; + const int64_t pa = px * py; + + float * dplane = (float *)dst->data; + + const int ka = k0 * k1; + const int offset0 = -p0; + const int offset1 = -p1; + + while (cdata < data_end) { + for (int oy = 0; oy < py; ++oy) { + float * const drow = dplane + oy * px; + for (int ox = 0; ox < px; ++ox) { + float * const out = drow + ox; + switch (op) { + case GGML_OP_POOL_AVG: *out = 0; break; + case GGML_OP_POOL_MAX: *out = -FLT_MAX; break; + case GGML_OP_POOL_COUNT: GGML_ABORT("fatal error"); + } + + const int ix = offset0 + ox * s0; + const int iy = offset1 + oy * s1; + + for (int ky = 0; ky < k1; ++ky) { + if (iy + ky < 0 || iy + ky >= src->ne[1]) continue; + const void * srow = (const void *)(cdata + src->nb[1] * (iy + ky)); + for (int kx = 0; kx < k0; ++kx) { + int j = ix + kx; + if (j < 0 || j >= src->ne[0]) continue; + const float srow_j = (src->type == GGML_TYPE_F32) ? ((const float*)srow)[j] : GGML_FP16_TO_FP32(((const ggml_fp16_t*)srow)[j]); + switch (op) { + case GGML_OP_POOL_AVG: *out += srow_j; break; + case GGML_OP_POOL_MAX: if (srow_j > *out) *out = srow_j; break; + case GGML_OP_POOL_COUNT: GGML_ABORT("fatal error"); + } + } + } + switch (op) { + case GGML_OP_POOL_AVG: *out /= ka; break; + case GGML_OP_POOL_MAX: break; + case GGML_OP_POOL_COUNT: GGML_ABORT("fatal error"); + } + } + } + + cdata += src->nb[2]; + dplane += pa; + } +} + +// ggml_compute_forward_pool_2d_back + +static void ggml_compute_forward_pool_2d_back( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src = dst->src[0]; + const struct ggml_tensor * dstf = dst->src[1]; // forward tensor of dst + + assert(dst->type == GGML_TYPE_F32 || dst->type == GGML_TYPE_F16); + + if (params->ith != 0) { + return; + } + + const int32_t * opts = (const int32_t *)dst->op_params; + enum ggml_op_pool op = opts[0]; + const int k0 = opts[1]; + const int k1 = opts[2]; + const int s0 = opts[3]; + const int s1 = opts[4]; + const int p0 = opts[5]; + const int p1 = opts[6]; + + char * cdata = (char *) dst->data; + const char * cdataf = (const char *) dstf->data; + const char * const data_end = cdata + ggml_nbytes(dst); + + GGML_ASSERT(params->ith == 0); + memset(cdata, 0, ggml_nbytes(dst)); + + const int64_t px = src->ne[0]; + const int64_t py = src->ne[1]; + const int64_t pa = px * py; + + const float * splane = (const float *) src->data; + + const int ka = k0 * k1; + const int offset0 = -p0; + const int offset1 = -p1; + + while (cdata < data_end) { + for (int oy = 0; oy < py; ++oy) { + const float * const srow = splane + oy * px; + for (int ox = 0; ox < px; ++ox) { + const float grad0 = srow[ox]; + + const int ix = offset0 + ox * s0; + const int iy = offset1 + oy * s1; + + if (op == GGML_OP_POOL_MAX) { + float maxval = -FLT_MAX; + int kxmax = -1; + int kymax = -1; + + for (int ky = 0; ky < k1; ++ky) { + if (iy + ky < 0 || iy + ky >= dst->ne[1]) { + continue; + } + const void * drowf = (const void *)(cdataf + dst->nb[1] * (iy + ky)); + for (int kx = 0; kx < k0; ++kx) { + int j = ix + kx; + if (j < 0 || j >= dst->ne[0]) { + continue; + } + + const float val = dst->type == GGML_TYPE_F32 ? + ((const float *) drowf)[j] : GGML_FP16_TO_FP32(((const ggml_fp16_t *) drowf)[j]); + if (val <= maxval) { + continue; + } + + maxval = val; + kxmax = kx; + kymax = ky; + } + } + + if (kxmax == -1 || kymax == -1) { + continue; + } + + void * drow = (void *)(cdata + dst->nb[1] * (iy + kymax)); + const int j = ix + kxmax; + if (dst->type == GGML_TYPE_F32) { + ((float *) drow)[j] += grad0; + } else { + ((ggml_fp16_t *) drow)[j] = GGML_FP32_TO_FP16(grad0 + GGML_FP16_TO_FP32(((const ggml_fp16_t *) drow)[j])); + } + } else if (op == GGML_OP_POOL_AVG) { + const float grad = grad0 / ka; + + for (int ky = 0; ky < k1; ++ky) { + if (iy + ky < 0 || iy + ky >= dst->ne[1]) { + continue; + } + void * drow = (void *)(cdata + dst->nb[1] * (iy + ky)); + for (int kx = 0; kx < k0; ++kx) { + int j = ix + kx; + if (j < 0 || j >= dst->ne[0]) { + continue; + } + + if (dst->type == GGML_TYPE_F32) { + ((float *) drow)[j] += grad; + } else { + ((ggml_fp16_t *) drow)[j] += GGML_FP32_TO_FP16(grad); + } + } + } + } else { + GGML_ASSERT(false); + } + } + } + + cdata += dst->nb[2]; + cdataf += dst->nb[2]; + splane += pa; + } +} + +// ggml_compute_forward_upscale + +static void ggml_compute_forward_upscale_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + GGML_ASSERT(src0->type == GGML_TYPE_F32); + + const int ith = params->ith; + const int nth = params->nth; + + GGML_TENSOR_UNARY_OP_LOCALS + + const float sf0 = (float)ne0/src0->ne[0]; + const float sf1 = (float)ne1/src0->ne[1]; + const float sf2 = (float)ne2/src0->ne[2]; + const float sf3 = (float)ne3/src0->ne[3]; + + // TODO: optimize + + for (int64_t i3 = 0; i3 < ne3; i3++) { + const int64_t i03 = i3 / sf3; + for (int64_t i2 = ith; i2 < ne2; i2 += nth) { + const int64_t i02 = i2 / sf2; + for (int64_t i1 = 0; i1 < ne1; i1++) { + const int64_t i01 = i1 / sf1; + for (int64_t i0 = 0; i0 < ne0; i0++) { + const int64_t i00 = i0 / sf0; + + const float * x = (float *)((char *) src0->data + i00*nb00 + i01*nb01 + i02*nb02 + i03*nb03); + float * y = (float *)((char *) dst->data + i0*nb0 + i1*nb1 + i2*nb2 + i3*nb3); + + *y = *x; + } + } + } + } +} + +static void ggml_compute_forward_upscale( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_upscale_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + + +// ggml_compute_forward_pad + +static void ggml_compute_forward_pad_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + GGML_ASSERT(src0->nb[0] == sizeof(float)); + GGML_ASSERT( dst->nb[0] == sizeof(float)); + + const int ith = params->ith; + const int nth = params->nth; + + GGML_TENSOR_UNARY_OP_LOCALS + + float * dst_ptr = (float *) dst->data; + + // TODO: optimize + + for (int64_t i2 = 0; i2 < ne2; ++i2) { + for (int64_t i1 = ith; i1 < ne1; i1 += nth) { + for (int64_t i0 = 0; i0 < ne0; ++i0) { + for (int64_t i3 = 0; i3 < ne3; ++i3) { + const int64_t dst_idx = i3*(ne0*ne1*ne2) + i2*(ne0*ne1) + i1*ne0 + i0; + + const float * src_ptr = (const float *)((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); + + if (i0 < ne00 && i1 < ne01 && i2 < ne02 && i3 < ne03) { + dst_ptr[dst_idx] = *src_ptr; + } else { + dst_ptr[dst_idx] = 0; + } + } + } + } + } +} + +static void ggml_compute_forward_pad( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_pad_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +static void ggml_compute_forward_unpad_f32( + const struct ggml_compute_params *params, + struct ggml_tensor *dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + GGML_ASSERT(src0->nb[0] == sizeof(float)); + GGML_ASSERT( dst->nb[0] == sizeof(float)); + + const int ith = params->ith; + const int nth = params->nth; + + GGML_TENSOR_UNARY_OP_LOCALS + + float * dst_ptr = (float *) dst->data; + + // TODO: optimize + + for (int64_t i2 = 0; i2 < ne2; ++i2) { + for (int64_t i1 = ith; i1 < ne1; i1 += nth) { + for (int64_t i0 = 0; i0 < ne0; ++i0) { + for (int64_t i3 = 0; i3 < ne3; ++i3) { + const int64_t dst_idx = i3*(ne0*ne1*ne2) + i2*(ne0*ne1) + i1*ne0 + i0; + + const float * src_ptr = (const float *)((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); + + if (i0 < ne00 && i1 < ne01 && i2 < ne02 && i3 < ne03) { + dst_ptr[dst_idx] = *src_ptr; + } + } + } + } + } +} + +static void ggml_compute_forward_unpad( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_unpad_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_arange + +static void ggml_compute_forward_arange_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + GGML_ASSERT(dst->nb[0] == sizeof(float)); + + const int ith = params->ith; + const int nth = params->nth; + + const float start = ggml_get_op_params_f32(dst, 0); + const float stop = ggml_get_op_params_f32(dst, 1); + const float step = ggml_get_op_params_f32(dst, 2); + + const int64_t steps = (int64_t) ceilf((stop - start) / step); + + GGML_ASSERT(ggml_nelements(dst) == steps); + + for (int64_t i = ith; i < steps; i+= nth) { + float value = start + step * i; + ((float *)dst->data)[i] = value; + } +} + +static void ggml_compute_forward_arange( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + switch (dst->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_arange_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +static void ggml_compute_forward_timestep_embedding_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + GGML_ASSERT(src0->nb[0] == sizeof(float)); + + const int ith = params->ith; + const int nth = params->nth; + + GGML_TENSOR_UNARY_OP_LOCALS + + const int dim = ggml_get_op_params_i32(dst, 0); + const int max_period = ggml_get_op_params_i32(dst, 1); + + int half = dim / 2; + + for (int64_t i = 0; i < ne00; i++) { + float * embed_data = (float *)((char *) dst->data + i*nb1); + for (int64_t j = ith; j < half; j += nth) { + float timestep = ((float *)src0->data)[i]; + float freq = (float)expf(-logf(max_period) * j / half); + float arg = timestep * freq; + embed_data[j] = cosf(arg); + embed_data[j + half] = sinf(arg); + } + if (dim % 2 != 0 && ith == 0) { + embed_data[dim] = 0.f; + } + } +} + +static void ggml_compute_forward_timestep_embedding( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_timestep_embedding_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_argsort + +static void ggml_compute_forward_argsort_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + GGML_TENSOR_UNARY_OP_LOCALS + + GGML_ASSERT(nb0 == sizeof(float)); + + const int ith = params->ith; + const int nth = params->nth; + + const int64_t nr = ggml_nrows(src0); + + enum ggml_sort_order order = (enum ggml_sort_order) ggml_get_op_params_i32(dst, 0); + + for (int64_t i = ith; i < nr; i += nth) { + int32_t * dst_data = (int32_t *)((char *) dst->data + i*nb1); + const float * src_data = (float *)((char *) src0->data + i*nb01); + + for (int64_t j = 0; j < ne0; j++) { + dst_data[j] = j; + } + + // C doesn't have a functional sort, so we do a bubble sort instead + for (int64_t j = 0; j < ne0; j++) { + for (int64_t k = j + 1; k < ne0; k++) { + if ((order == GGML_SORT_ORDER_ASC && src_data[dst_data[j]] > src_data[dst_data[k]]) || + (order == GGML_SORT_ORDER_DESC && src_data[dst_data[j]] < src_data[dst_data[k]])) { + int32_t tmp = dst_data[j]; + dst_data[j] = dst_data[k]; + dst_data[k] = tmp; + } + } + } + } +} + +static void ggml_compute_forward_argsort( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_argsort_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_flash_attn_ext + +static void ggml_compute_forward_flash_attn_ext_f16( + const struct ggml_compute_params * params, + const struct ggml_tensor * q, + const struct ggml_tensor * k, + const struct ggml_tensor * v, + const struct ggml_tensor * mask, + struct ggml_tensor * dst) { + + GGML_TENSOR_LOCALS(int64_t, neq, q, ne) + GGML_TENSOR_LOCALS(size_t, nbq, q, nb) + GGML_TENSOR_LOCALS(int64_t, nek, k, ne) + GGML_TENSOR_LOCALS(size_t, nbk, k, nb) + GGML_TENSOR_LOCALS(int64_t, nev, v, ne) + GGML_TENSOR_LOCALS(size_t, nbv, v, nb) + GGML_TENSOR_LOCALS(int64_t, ne, dst, ne) + GGML_TENSOR_LOCALS(size_t, nb, dst, nb) + + const int ith = params->ith; + const int nth = params->nth; + + const int64_t D = neq0; + const int64_t N = neq1; + + GGML_ASSERT(ne0 == D); + GGML_ASSERT(ne2 == N); + + // input tensor rows must be contiguous + GGML_ASSERT(nbq0 == ggml_type_size(q->type)); + GGML_ASSERT(nbk0 == ggml_type_size(k->type)); + GGML_ASSERT(nbv0 == ggml_type_size(v->type)); + + GGML_ASSERT(neq0 == D); + GGML_ASSERT(nek0 == D); + GGML_ASSERT(nev0 == D); + + GGML_ASSERT(neq1 == N); + GGML_ASSERT(nev0 == D); + + // dst cannot be transposed or permuted + GGML_ASSERT(nb0 == sizeof(float)); + GGML_ASSERT(nb0 <= nb1); + GGML_ASSERT(nb1 <= nb2); + GGML_ASSERT(nb2 <= nb3); + + // broadcast factors + const int64_t rk2 = neq2/nek2; + const int64_t rk3 = neq3/nek3; + + const int64_t rv2 = neq2/nev2; + const int64_t rv3 = neq3/nev3; + + // parallelize by q rows using ggml_vec_dot_f32 + + // total rows in q + const int nr = neq1*neq2*neq3; + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + float scale = 1.0f; + float max_bias = 0.0f; + float logit_softcap = 0.0f; + + memcpy(&scale, (float *) dst->op_params + 0, sizeof(float)); + memcpy(&max_bias, (float *) dst->op_params + 1, sizeof(float)); + memcpy(&logit_softcap, (float *) dst->op_params + 2, sizeof(float)); + + if (logit_softcap != 0) { + scale /= logit_softcap; + } + + const uint32_t n_head = neq2; + const uint32_t n_head_log2 = 1u << (uint32_t) floor(log2(n_head)); + + const float m0 = powf(2.0f, -(max_bias ) / n_head_log2); + const float m1 = powf(2.0f, -(max_bias / 2.0f) / n_head_log2); + + enum ggml_type const k_vec_dot_type = type_traits[k->type].vec_dot_type; + ggml_from_float_t const q_to_vec_dot = type_traits[k_vec_dot_type].from_float; + ggml_vec_dot_t const kq_vec_dot = type_traits[k->type].vec_dot; + ggml_to_float_t const v_to_float = type_traits[v->type].to_float; + + // loop over n_batch and n_head + for (int ir = ir0; ir < ir1; ++ir) { + // q indices + const int iq3 = ir/(neq2*neq1); + const int iq2 = (ir - iq3*neq2*neq1)/neq1; + const int iq1 = (ir - iq3*neq2*neq1 - iq2*neq1); + + const uint32_t h = iq2; // head index + const float slope = (max_bias > 0.0f) ? h < n_head_log2 ? powf(m0, h + 1) : powf(m1, 2*(h - n_head_log2) + 1) : 1.0f; + + float S = 0.0f; // sum + float M = -INFINITY; // maximum KQ value + + float * VKQ32 = (float *) params->wdata + ith*(3*D + CACHE_LINE_SIZE_F32); // FP32 VKQ accumulator + float * V32 = (VKQ32 + 1*D); // (temporary) FP32 V buffer + ggml_fp16_t * VKQ16 = (ggml_fp16_t *) (VKQ32 + 1*D); // (temporary) FP16 VKQ accumulator + ggml_fp16_t * Q_q = (ggml_fp16_t *) (VKQ32 + 2*D); // (temporary) buffer for Q converted to quantized/FP16 + + if (v->type == GGML_TYPE_F16) { + memset(VKQ16, 0, D*sizeof(ggml_fp16_t)); + } else { + memset(VKQ32, 0, D*sizeof(float)); + } + + const ggml_fp16_t * mp = mask ? (ggml_fp16_t *)((char *) mask->data + iq1*mask->nb[1]) : NULL; + + // k indices + const int ik3 = iq3 / rk3; + const int ik2 = iq2 / rk2; + + // v indices + const int iv3 = iq3 / rv3; + const int iv2 = iq2 / rv2; + + const float * pq = (const float *) ((char *) q->data + (iq1*nbq1 + iq2*nbq2 + iq3*nbq3)); + q_to_vec_dot(pq, Q_q, D); + + // online softmax / attention + // loop over n_kv and n_head_kv + // ref: https://arxiv.org/pdf/2112.05682.pdf + for (int64_t ic = 0; ic < nek1; ++ic) { + const float mv = mp ? slope*GGML_FP16_TO_FP32(mp[ic]) : 0.0f; + if (mv == -INFINITY) { + continue; + } + + float s; // KQ value + + const char * k_data = (const char *) k->data + ( ic*nbk1 + ik2*nbk2 + ik3*nbk3); + kq_vec_dot(D, &s, 0, k_data, 0, Q_q, 0, 1); + + s = s*scale; // scale KQ value + + if (logit_softcap != 0.0f) { + s = logit_softcap*tanhf(s); + } + + s += mv; // apply mask + + const float Mold = M; + + float ms = 1.0f; // upon new higher max val, scale VKQ and KQ sum with this value + float vs = 1.0f; // post-softmax KQ value, expf(s - M) + + const char * v_data = ((const char *) v->data + (ic*nbv1 + iv2*nbv2 + iv3*nbv3)); + + if (v->type == GGML_TYPE_F16) { + if (s > M) { + // s is new maximum, ms < 1.0f, vs == expf(s - s) == 1.0f + M = s; + ms = expf(Mold - M); + + // V = V*expf(Mold - M) + ggml_vec_scale_f16(D, VKQ16, ms); + } else { + // no new maximum, ms == 1.0f, vs != 1.0f + vs = expf(s - M); + } + + // V += v*expf(s - M) + ggml_vec_mad_f16(D, VKQ16, (const ggml_fp16_t *) v_data, vs); + } else { + if (s > M) { + // s is new maximum, ms < 1.0f, vs == expf(s - s) == 1.0f + M = s; + ms = expf(Mold - M); + + // V = V*expf(Mold - M) + ggml_vec_scale_f32(D, VKQ32, ms); + } else { + // no new maximum, ms == 1.0f, vs != 1.0f + vs = expf(s - M); + } + + v_to_float(v_data, V32, D); + + // V += v*expf(s - M) + ggml_vec_mad_f32(D, VKQ32, V32, vs); + } + + S = S*ms + vs; // scale and increment sum with partial sum + } + + if (v->type == GGML_TYPE_F16) { + for (int64_t d = 0; d < D; ++d) { + VKQ32[d] = GGML_FP16_TO_FP32(VKQ16[d]); + } + } + + // V /= S + const float S_inv = 1.0f/S; + ggml_vec_scale_f32(D, VKQ32, S_inv); + + // dst indices + const int i1 = iq1; + const int i2 = iq2; + const int i3 = iq3; + + // original + //memcpy((char *) dst->data + (i1*nb1 + i2*nb2 + i3*nb3), V, nev0*sizeof(float)); + + // permute(0, 2, 1, 3) + memcpy((char *) dst->data + (i3*ne2*ne1 + i2 + i1*ne1)*nb1, VKQ32, nb1); + } +} + +static void ggml_compute_forward_flash_attn_ext( + const struct ggml_compute_params * params, + const struct ggml_tensor * q, + const struct ggml_tensor * k, + const struct ggml_tensor * v, + const struct ggml_tensor * mask, + struct ggml_tensor * dst) { + switch (dst->op_params[3]) { + case GGML_PREC_DEFAULT: + case GGML_PREC_F32: + { + // uses F32 accumulators + ggml_compute_forward_flash_attn_ext_f16(params, q, k, v, mask, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_flash_attn_back + +static void ggml_compute_forward_flash_attn_back_f32( + const struct ggml_compute_params * params, + const bool masked, + struct ggml_tensor * dst) { + + const struct ggml_tensor * q = dst->src[0]; + const struct ggml_tensor * k = dst->src[1]; + const struct ggml_tensor * v = dst->src[2]; + const struct ggml_tensor * d = dst->src[3]; + + GGML_TENSOR_LOCALS(int64_t, neq, q, ne) + GGML_TENSOR_LOCALS(size_t, nbq, q, nb) + GGML_TENSOR_LOCALS(int64_t, nek, k, ne) + GGML_TENSOR_LOCALS(size_t, nbk, k, nb) + GGML_TENSOR_LOCALS(int64_t, nev, v, ne) + GGML_TENSOR_LOCALS(size_t, nbv, v, nb) + GGML_TENSOR_LOCALS(int64_t, ned, d, ne) + GGML_TENSOR_LOCALS(size_t, nbd, d, nb) + GGML_TENSOR_LOCALS(int64_t, ne, dst, ne) + GGML_TENSOR_LOCALS(size_t, nb, dst, nb) + + const int ith = params->ith; + const int nth = params->nth; + + const int64_t D = neq0; + const int64_t N = neq1; + const int64_t P = nek1 - N; + const int64_t M = P + N; + + const int Mup = ggml_up(M, GGML_SOFT_MAX_UNROLL); + const int mxDM = MAX(D, Mup); + + // GGML_ASSERT(ne0 == D); + // GGML_ASSERT(ne1 == N); + GGML_ASSERT(P >= 0); + + GGML_ASSERT(nbq0 == sizeof(float)); + GGML_ASSERT(nbk0 == sizeof(float)); + GGML_ASSERT(nbv0 == sizeof(float)); + + GGML_ASSERT(neq0 == D); + GGML_ASSERT(nek0 == D); + GGML_ASSERT(nev1 == D); + GGML_ASSERT(ned0 == D); + + GGML_ASSERT(neq1 == N); + GGML_ASSERT(nek1 == N + P); + GGML_ASSERT(nev1 == D); + GGML_ASSERT(ned1 == N); + + // dst cannot be transposed or permuted + GGML_ASSERT(nb0 == sizeof(float)); + GGML_ASSERT(nb0 <= nb1); + GGML_ASSERT(nb1 <= nb2); + GGML_ASSERT(nb2 <= nb3); + + if (ith == 0) { + memset(dst->data, 0, nb0*ne0*ne1*ne2*ne3); + } + ggml_barrier(params->threadpool); + + const int64_t elem_q = ggml_nelements(q); + const int64_t elem_k = ggml_nelements(k); + + enum ggml_type result_type = dst->type; + GGML_ASSERT(ggml_blck_size(result_type) == 1); + const size_t tsize = ggml_type_size(result_type); + + const size_t offs_q = 0; + const size_t offs_k = offs_q + GGML_PAD(elem_q * tsize, GGML_MEM_ALIGN); + const size_t offs_v = offs_k + GGML_PAD(elem_k * tsize, GGML_MEM_ALIGN); + + void * grad_q = (char *) dst->data; + void * grad_k = (char *) dst->data + offs_k; + void * grad_v = (char *) dst->data + offs_v; + + const size_t nbgq1 = nb0*neq0; + const size_t nbgq2 = nb0*neq0*neq1; + const size_t nbgq3 = nb0*neq0*neq1*neq2; + + const size_t nbgk1 = nb0*nek0; + const size_t nbgk2 = nb0*nek0*nek1; + const size_t nbgk3 = nb0*nek0*nek1*neq2; + + const size_t nbgv1 = nb0*nev0; + const size_t nbgv2 = nb0*nev0*nev1; + const size_t nbgv3 = nb0*nev0*nev1*neq2; + + // parallelize by k rows using ggml_vec_dot_f32 + + // total rows in k + const int nr = nek2*nek3; + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + const float scale = 1.0f/sqrtf(D); + + //printf("P=%d N=%d D=%d ir0=%d ir1=%d scale = %f\n", P, N, D, ir0, ir1, scale); + + // how often k2 (and v2) is repeated in q2 + int nrep = neq2/nek2; + + for (int ir = ir0; ir < ir1; ++ir) { + // q indices + const int ik3 = ir/(nek2); + const int ik2 = ir - ik3*nek2; + + const int iq3 = ik3; + const int id3 = ik3; + const int iv3 = ik3; + const int iv2 = ik2; + + for (int irep = 0; irep < nrep; ++irep) { + const int iq2 = ik2 + irep*nek2; + const int id2 = iq2; + + // (ik2 + irep*nek2) % nek2 == ik2 + for (int iq1 = 0; iq1 < neq1; ++iq1) { + const int id1 = iq1; + + // not sure about CACHE_LINE_SIZE_F32.. + // - maybe it must not be multiplied by 2 and excluded from .. in SM 1*(..) offset? + float * S = (float *) params->wdata + ith*2*(mxDM + CACHE_LINE_SIZE_F32) + 0*(mxDM+CACHE_LINE_SIZE_F32); + float * SM = (float *) params->wdata + ith*2*(mxDM + CACHE_LINE_SIZE_F32) + 1*(mxDM+CACHE_LINE_SIZE_F32); + + for (int i = M; i < Mup; ++i) { + S[i] = -INFINITY; + } + + const int64_t masked_begin = masked ? (P + iq1 + 1) : M; + for (int64_t ic = 0; ic < masked_begin; ++ic) { + // k indices + const int ik1 = ic; + + // S indices + const int i1 = ik1; + + ggml_vec_dot_f32(neq0, + S + i1, 0, + (float *) ((char *) k->data + (ik1*nbk1 + ik2*nbk2 + ik3*nbk3)), 0, + (float *) ((char *) q->data + (iq1*nbq1 + iq2*nbq2 + iq3*nbq3)), 0, 1); + } + + // scale + ggml_vec_scale_f32(masked_begin, S, scale); + + for (int64_t i = masked_begin; i < M; i++) { + S[i] = -INFINITY; + } + + // softmax + // exclude known -INF S[..] values from max and loop + // dont forget to set their SM values to zero + { + float max = -INFINITY; + ggml_vec_max_f32(masked_begin, &max, S); + + ggml_float sum = 0.0; + { +#ifdef GGML_SOFT_MAX_ACCELERATE + max = -max; + vDSP_vsadd(SM, 1, &max, SM, 1, Mup); + vvexpf(SM, SM, &Mup); + ggml_vec_sum_f32(Mup, &sum, SM); +#else + sum = ggml_vec_soft_max_f32(Mup, SM, S, max); +#endif + } + + assert(sum > 0.0); + + sum = 1.0/sum; + ggml_vec_scale_f32(masked_begin, SM, sum); + + } + + // step-by-step explanation + { + // forward-process shape grads from backward process + // parallel_for ik2,ik3: + // for irep: + // iq2 = ik2 + irep*nek2 + // k[:D,:M,:,:] [D,M,:,:] grad[k][:D,:M,ik2,ik3] += grad[kcur] + // q[:D,:N,:,:] [D,N,:,:] grad[q][:D,iq1,iq2,iq3] += grad[qcur] + // v[:M,:D,:,:] [M,D,:,:] grad[v][:M,:D,iv2,iv3] += grad[vcur] + // for iq1: + // kcur = k[:D,:M,ik2,ik3] [D,M,1,1] grad[kcur] = grad[S1].T @ qcur + // qcur = q[:D,iq1,iq2,iq3] [D,1,1,1] grad[qcur] = grad[S1] @ kcur + // vcur = v[:M,:D,iv2,iv3] [M,D,1,1] grad[vcur] = grad[S5].T @ S4 + // S0 = -Inf [D,1,1,1] + // ~S1[i] = dot(kcur[:D,i], qcur) + // S1 = qcur @ kcur.T [M,1,1,1] grad[S1] = grad[S2] * scale + // S2 = S1 * scale [M,1,1,1] grad[S2] = diag_mask_zero(grad[S3], P) + // S3 = diag_mask_inf(S2, P) [M,1,1,1] grad[S3] = S4 * (grad[S4] - dot(S4, grad[S4])) + // S4 = softmax(S3) [M,1,1,1] grad[S4] = grad[S5] @ vcur + // ~S5[i] = dot(vcur[:,i], S4) + // S5 = S4 @ vcur.T [D,1,1,1] grad[S5] = d[:D,id1,id2,id3] + // ~dst[i,iq1,iq2,iq3] = S5[i] ^ + // dst[:D,iq1,iq2,iq3] = S5 | grad[dst[:D,iq1,iq2,iq3]] = d[:D,id1,id2,id3] + // dst backward-/ grad[dst] = d + // + // output gradients with their dependencies: + // + // grad[kcur] = grad[S1].T @ qcur + // grad[S1] = diag_mask_zero(grad[S3], P) * scale + // grad[S3] = S4 * (grad[S4] - dot(S4, grad[S4])) + // grad[S4] = grad[S5] @ vcur + // grad[S4] = d[:D,id1,id2,id3] @ vcur + // grad[qcur] = grad[S1] @ kcur + // grad[vcur] = grad[S5].T @ S4 + // grad[vcur] = d[:D,id1,id2,id3].T @ S4 + // + // in post-order: + // + // S1 = qcur @ kcur.T + // S2 = S1 * scale + // S3 = diag_mask_inf(S2, P) + // S4 = softmax(S3) + // grad[S4] = d[:D,id1,id2,id3] @ vcur + // grad[S3] = S4 * (grad[S4] - dot(S4, grad[S4])) + // grad[S1] = diag_mask_zero(grad[S3], P) * scale + // grad[qcur] = grad[S1] @ kcur + // grad[kcur] = grad[S1].T @ qcur + // grad[vcur] = d[:D,id1,id2,id3].T @ S4 + // + // using less variables (SM=S4): + // + // S = diag_mask_inf(qcur @ kcur.T * scale, P) + // SM = softmax(S) + // S = d[:D,iq1,iq2,iq3] @ vcur + // dot_SM_gradSM = dot(SM, S) + // S = SM * (S - dot(SM, S)) + // S = diag_mask_zero(S, P) * scale + // + // grad[q][:D,iq1,iq2,iq3] += S @ kcur + // grad[k][:D,:M,ik2,ik3] += S.T @ qcur + // grad[v][:M,:D,iv2,iv3] += d[:D,id1,id2,id3].T @ SM + } + + // S = gradSM = d[:D,id1,id2,id3] @ vcur[:,:,iv2,iv3] + // S = d[:D,id1,id2,id3] @ vcur[:,:,iv2,iv3] + // for ic: + // S[:M] += vcur[:M,ic,iv2,iv3] * d[ic,id1,id2,id3] + // exclude known future zero S[..] values from operation + ggml_vec_set_f32(masked_begin, S, 0); + for (int64_t ic = 0; ic < D; ++ic) { + ggml_vec_mad_f32(masked_begin, + S, + (float *) ((char *) v->data + ( ic*nbv1 + iv2*nbv2 + iv3*nbv3)), + *(float *) ((char *) d->data + (ic*nbd0 + id1*nbd1 + id2*nbd2 + id3*nbd3))); + } + + // S = SM * (S - dot(SM, S)) + float dot_SM_gradSM = 0; + ggml_vec_dot_f32 (masked_begin, &dot_SM_gradSM, 0, SM, 0, S, 0, 1); + ggml_vec_acc1_f32(M, S, -dot_SM_gradSM); + ggml_vec_mul_f32 (masked_begin, S, S, SM); + + // S = diag_mask_zero(S, P) * scale + // already done by above ggml_vec_set_f32 + + // exclude known zero S[..] values from operation + ggml_vec_scale_f32(masked_begin, S, scale); + + // S shape [M,1] + // SM shape [M,1] + // kcur shape [D,M] + // qcur shape [D,1] + // vcur shape [M,D] + + // grad[q][:D,iq1,iq2,iq3] += S @ kcur + // grad[q][:D,iq1,iq2,iq3] += shape[M,1] @ shape[D,M] + // for ic: + // grad[q][:D,iq1,iq2,iq3] += S[ic] * kcur[:D,ic,ik2,ik3] + // exclude known zero S[..] values from loop + for (int64_t ic = 0; ic < masked_begin; ++ic) { + ggml_vec_mad_f32(D, + (float *) ((char *) grad_q + (iq1*nbgq1 + iq2*nbgq2 + iq3*nbgq3)), + (float *) ((char *) k->data + (ic*nbk1 + ik2*nbk2 + ik3*nbk3)), + S[ic]); + } + + // grad[k][:D,:M,iq2,iq3] += S.T @ qcur + // for ic: + // grad[k][:D,ic,iq2,iq3] += S.T[0,ic] * qcur[:D,0] + // grad[k][:D,ic,iq2,iq3] += S[ic] * qcur[:D,0] + // exclude known zero S[..] values from loop + for (int64_t ic = 0; ic < masked_begin; ++ic) { + ggml_vec_mad_f32(D, + (float *) ((char *) grad_k + (ic*nbgk1 + ik2*nbgk2 + ik3*nbgk3)), + (float *) ((char *) q->data + (iq1*nbq1 + iq2*nbq2 + iq3*nbq3)), + S[ic]); + } + + // grad[v][:M,:D,iv2,iv3] += d[:D,id1,id2,id3].T @ SM + // for ic: + // grad[v][:M,ic,iv2,iv3] += d[:D,id1,id2,id3].T[0,ic] * SM[:M] + // grad[v][:M,ic,iv2,iv3] += d[ic,id1,id2,id3] * SM[:M] + // exclude known zero SM[..] values from mad + for (int64_t ic = 0; ic < D; ++ic) { + ggml_vec_mad_f32(masked_begin, + (float *) ((char *) grad_v + ( ic*nbgv1 + iv2*nbgv2 + iv3*nbgv3)), + SM, + *(float *) ((char *) d->data + (ic*nbd0 + id1*nbd1 + id2*nbd2 + id3*nbd3))); + } + } + } + } +} + +static void ggml_compute_forward_flash_attn_back( + const struct ggml_compute_params * params, + const bool masked, + struct ggml_tensor * dst) { + + const struct ggml_tensor * q = dst->src[0]; + + switch (q->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_flash_attn_back_f32(params, masked, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_ssm_conv + +static void ggml_compute_forward_ssm_conv_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + const struct ggml_tensor * src0 = dst->src[0]; // conv_x + const struct ggml_tensor * src1 = dst->src[1]; // conv1d.weight + + const int ith = params->ith; + const int nth = params->nth; + + const int nc = src1->ne[0]; // d_conv + const int ncs = src0->ne[0]; // d_conv - 1 + n_t + const int nr = src0->ne[1]; // d_inner + const int n_t = dst->ne[1]; // tokens per sequence + const int n_s = dst->ne[2]; // number of sequences in the batch + + GGML_ASSERT( dst->ne[0] == nr); + GGML_ASSERT(src0->nb[0] == sizeof(float)); + GGML_ASSERT(src1->nb[0] == sizeof(float)); + GGML_ASSERT(src0->nb[1] == src0->ne[0]*sizeof(float)); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + const int ir = ir1 - ir0; + + for (int i3 = 0; i3 < n_s; ++i3) { + for (int i2 = 0; i2 < n_t; ++i2) { + // {d_conv - 1 + n_t, d_inner, n_seqs} + // sliding window + const float * s = (const float *) ((const char *) src0->data + ir0*(src0->nb[1]) + i2*(src0->nb[0]) + i3*(src0->nb[2])); // {d_conv, d_inner, n_s} + const float * c = (const float *) ((const char *) src1->data + ir0*(src1->nb[1])); // {d_conv, d_inner} + float * x = (float *) ((char *) dst->data + ir0*(dst->nb[0]) + i2*(dst->nb[1]) + i3*(dst->nb[2])); // {d_inner, n_t, n_s} + + // TODO: transpose the output for smaller strides for big batches? + // d_inner + for (int i1 = 0; i1 < ir; ++i1) { + // rowwise dot product + // NOTE: not using ggml_vec_dot_f32, because its sum is in double precision + float sumf = 0.0f; + + // d_conv + for (int i0 = 0; i0 < nc; ++i0) { + sumf += s[i0 + i1*ncs] * c[i0 + i1*nc]; + } + x[i1] = sumf; + } + } + } +} + +static void ggml_compute_forward_ssm_conv( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + switch (dst->src[0]->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_ssm_conv_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_ssm_scan + +static void ggml_compute_forward_ssm_scan_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + const struct ggml_tensor * src0 = dst->src[0]; // s + const struct ggml_tensor * src1 = dst->src[1]; // x + const struct ggml_tensor * src2 = dst->src[2]; // dt + const struct ggml_tensor * src3 = dst->src[3]; // A + const struct ggml_tensor * src4 = dst->src[4]; // B + const struct ggml_tensor * src5 = dst->src[5]; // C + + const int ith = params->ith; + const int nth = params->nth; + + const int64_t nc = src0->ne[0]; // d_state + const int64_t nr = src0->ne[1]; // d_inner + const int64_t n_t = src1->ne[1]; // number of tokens per sequence + const int64_t n_s = src0->ne[2]; // number of sequences in the batch + + GGML_ASSERT(ggml_nelements(src1) + ggml_nelements(src0) == ggml_nelements(dst)); + GGML_ASSERT(src0->nb[0] == sizeof(float)); + GGML_ASSERT(src1->nb[0] == sizeof(float)); + GGML_ASSERT(src2->nb[0] == sizeof(float)); + GGML_ASSERT(src3->nb[0] == sizeof(float)); + GGML_ASSERT(src4->nb[0] == sizeof(float)); + GGML_ASSERT(src5->nb[0] == sizeof(float)); + // required for the dot product between s and C + GGML_ASSERT(src0->nb[1] == src0->ne[0]*sizeof(float)); + // required for per-sequence offsets for states + GGML_ASSERT(src0->nb[2] == src0->ne[0]*src0->ne[1]*sizeof(float)); + // required to get correct offset for state destination (i.e. src1->nb[3]) + GGML_ASSERT(src1->nb[3] == src1->ne[0]*src1->ne[1]*src1->ne[2]*sizeof(float)); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + const int ir = ir1 - ir0; + + for (int i3 = 0; i3 < n_s; ++i3) { + for (int i2 = 0; i2 < n_t; ++i2) { + const float * s0 = (const float *) ((const char *) src0->data + ir0*(src0->nb[1]) + i3*(src0->nb[2])); // {d_state, d_inner, n_s} + const float * x = (const float *) ((const char *) src1->data + ir0*(src1->nb[0]) + i2*(src1->nb[1]) + i3*(src1->nb[2])); // {d_inner, n_t, n_s} + const float * dt = (const float *) ((const char *) src2->data + ir0*(src2->nb[0]) + i2*(src2->nb[1]) + i3*(src2->nb[2])); // {d_inner, n_t, n_s} + const float * A = (const float *) ((const char *) src3->data + ir0*(src3->nb[1])); // {d_state, d_inner} + const float * B = (const float *) ((const char *) src4->data + i2*(src4->nb[1]) + i3*(src4->nb[2])); // {d_state, n_t, n_s} + const float * C = (const float *) ((const char *) src5->data + i2*(src5->nb[1]) + i3*(src5->nb[2])); // {d_state, n_t, n_s} + float * y = ( float *) (( char *) dst->data + ir0*(src1->nb[0]) + i2*(src1->nb[1]) + i3*(src1->nb[2])); // {d_inner, n_t, n_s} + float * s = ( float *) (( char *) dst->data + ir0*(src0->nb[1]) + i3*(src0->nb[2]) + src1->nb[3]); // {d_state, d_inner, n_s} + + // use the output as the source for the next token-wise iterations + if (i2 > 0) { s0 = s; } + + // d_inner + for (int i1 = 0; i1 < ir; ++i1) { + // ref: https://github.com/state-spaces/mamba/blob/34076d664838588a3c97727b263478ab9f621a07/mamba_ssm/ops/triton/selective_state_update.py#L78 + float dt_soft_plus = dt[i1] <= 20.0f ? log1pf(expf(dt[i1])) : dt[i1]; + float x_dt = x[i1] * dt_soft_plus; + float sumf = 0.0f; + // d_state + for (int i0 = 0; i0 < nc; ++i0) { + int i = i0 + i1*nc; + // state = prev_state * dA + dB * x + float state = (s0[i] * expf(dt_soft_plus * A[i])) + (B[i0] * x_dt); + // y = rowwise_dotprod(state, C) + sumf += state * C[i0]; + s[i] = state; + } + y[i1] = sumf; + } + } + } +} + +static void ggml_compute_forward_ssm_scan( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + switch (dst->src[0]->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_ssm_scan_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_win_part + +static void ggml_compute_forward_win_part_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + UNUSED(params); + + const struct ggml_tensor * src0 = dst->src[0]; + + GGML_TENSOR_LOCALS(int64_t, ne0, src0, ne) + GGML_TENSOR_LOCALS(int64_t, ne, dst, ne) + + const int32_t nep0 = ((const int32_t *)(dst->op_params))[0]; + const int32_t nep1 = ((const int32_t *)(dst->op_params))[1]; + const int32_t w = ((const int32_t *)(dst->op_params))[2]; + + assert(ne00 == ne0); + assert(ne3 == nep0*nep1); + + // TODO: optimize / multi-thread + for (int py = 0; py < nep1; ++py) { + for (int px = 0; px < nep0; ++px) { + const int64_t i3 = py*nep0 + px; + for (int64_t i2 = 0; i2 < ne2; ++i2) { + for (int64_t i1 = 0; i1 < ne1; ++i1) { + for (int64_t i0 = 0; i0 < ne0; ++i0) { + const int64_t i02 = py*w + i2; + const int64_t i01 = px*w + i1; + const int64_t i00 = i0; + + const int64_t i = i3*ne2*ne1*ne0 + i2*ne1*ne0 + i1*ne0 + i0; + const int64_t j = i02*ne01*ne00 + i01*ne00 + i00; + + if (py*w + i2 >= ne02 || px*w + i1 >= ne01) { + ((float *) dst->data)[i] = 0.0f; + } else { + ((float *) dst->data)[i] = ((float *) src0->data)[j]; + } + } + } + } + } + } +} + +static void ggml_compute_forward_win_part( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_win_part_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_win_unpart + +static void ggml_compute_forward_win_unpart_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + UNUSED(params); + + const struct ggml_tensor * src0 = dst->src[0]; + + GGML_TENSOR_LOCALS(int64_t, ne0, src0, ne) + GGML_TENSOR_LOCALS(int64_t, ne, dst, ne) + + const int32_t w = ((const int32_t *)(dst->op_params))[0]; + + // padding + const int px = (w - ne1%w)%w; + //const int py = (w - ne2%w)%w; + + const int npx = (px + ne1)/w; + //const int npy = (py + ne2)/w; + + assert(ne0 == ne00); + + // TODO: optimize / multi-thread + for (int64_t i2 = 0; i2 < ne2; ++i2) { + for (int64_t i1 = 0; i1 < ne1; ++i1) { + for (int64_t i0 = 0; i0 < ne0; ++i0) { + const int ip2 = i2/w; + const int ip1 = i1/w; + + const int64_t i02 = i2%w; + const int64_t i01 = i1%w; + const int64_t i00 = i0; + + const int64_t i = (ip2*npx + ip1)*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00 + i00; + const int64_t j = i2*ne1*ne0 + i1*ne0 + i0; + + ((float *) dst->data)[j] = ((float *) src0->data)[i]; + } + } + } +} + +static void ggml_compute_forward_win_unpart( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_win_unpart_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +//gmml_compute_forward_unary + +static void ggml_compute_forward_unary( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const enum ggml_unary_op op = ggml_get_unary_op(dst); + + switch (op) { + case GGML_UNARY_OP_ABS: + { + ggml_compute_forward_abs(params, dst); + } break; + case GGML_UNARY_OP_SGN: + { + ggml_compute_forward_sgn(params, dst); + } break; + case GGML_UNARY_OP_NEG: + { + ggml_compute_forward_neg(params, dst); + } break; + case GGML_UNARY_OP_STEP: + { + ggml_compute_forward_step(params, dst); + } break; + case GGML_UNARY_OP_TANH: + { + ggml_compute_forward_tanh(params, dst); + } break; + case GGML_UNARY_OP_ELU: + { + ggml_compute_forward_elu(params, dst); + } break; + case GGML_UNARY_OP_RELU: + { + ggml_compute_forward_relu(params, dst); + } break; + case GGML_UNARY_OP_SIGMOID: + { + ggml_compute_forward_sigmoid(params, dst); + } break; + case GGML_UNARY_OP_GELU: + { + ggml_compute_forward_gelu(params, dst); + } break; + case GGML_UNARY_OP_GELU_QUICK: + { + ggml_compute_forward_gelu_quick(params, dst); + } break; + case GGML_UNARY_OP_SILU: + { + ggml_compute_forward_silu(params, dst); + } break; + case GGML_UNARY_OP_HARDSWISH: + { + ggml_compute_forward_hardswish(params, dst); + } break; + case GGML_UNARY_OP_HARDSIGMOID: + { + ggml_compute_forward_hardsigmoid(params, dst); + } break; + case GGML_UNARY_OP_EXP: + { + ggml_compute_forward_exp(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_get_rel_pos + +static void ggml_compute_forward_get_rel_pos_f16( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + UNUSED(params); + + const struct ggml_tensor * src0 = dst->src[0]; + + // ref: https://github.com/facebookresearch/segment-anything/blob/main/segment_anything/modeling/image_encoder.py#L292-L322 + + GGML_TENSOR_UNARY_OP_LOCALS + + const int64_t w = ne1; + + ggml_fp16_t * src0_data = (ggml_fp16_t *) src0->data; + ggml_fp16_t * dst_data = (ggml_fp16_t *) dst->data; + + for (int64_t i2 = 0; i2 < ne2; ++i2) { + for (int64_t i1 = 0; i1 < ne1; ++i1) { + const int64_t pos = (w - i1 - 1) + i2; + for (int64_t i0 = 0; i0 < ne0; ++i0) { + dst_data[i2*ne1*ne0 + i1*ne0 + i0] = src0_data[pos*ne00 + i0]; + } + } + } +} + +static void ggml_compute_forward_get_rel_pos( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F16: + case GGML_TYPE_BF16: + { + ggml_compute_forward_get_rel_pos_f16(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_add_rel_pos + +static void ggml_compute_forward_add_rel_pos_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + const struct ggml_tensor * src2 = dst->src[2]; + + const bool inplace = (bool) ((int32_t *) dst->op_params)[0]; + if (!inplace) { + if (params->ith == 0) { + memcpy((char *) dst->data, (char *) src0->data, ggml_nbytes(dst)); + } + ggml_barrier(params->threadpool); + } + // ref: https://github.com/facebookresearch/segment-anything/blob/main/segment_anything/modeling/image_encoder.py#L357-L359 + + float * src1_data = (float *) src1->data; + float * src2_data = (float *) src2->data; + float * dst_data = (float *) dst->data; + + const int64_t ne10 = src1->ne[0]; + const int64_t ne11 = src1->ne[1]; + const int64_t ne12 = src1->ne[2]; + const int64_t ne13 = src1->ne[3]; + + const int ith = params->ith; + const int nth = params->nth; + + // total patches in dst + const int np = ne13; + + // patches per thread + const int dp = (np + nth - 1)/nth; + + // patch range for this thread + const int ip0 = dp*ith; + const int ip1 = MIN(ip0 + dp, np); + + for (int64_t i13 = ip0; i13 < ip1; ++i13) { + for (int64_t i12 = 0; i12 < ne12; ++i12) { + for (int64_t i11 = 0; i11 < ne11; ++i11) { + const int64_t jp1 = i13*ne12*ne11*ne10 + i12*ne11*ne10 + i11*ne10; + for (int64_t i10 = 0; i10 < ne10; ++i10) { + const int64_t jp0 = jp1 + i10; + const float src1_e = src1_data[jp0]; + const float src2_e = src2_data[jp0]; + + const int64_t jdh = jp0 * ne10; + const int64_t jdw = jdh - (ne10 - 1) * i10; + + for (int64_t j = 0; j < ne10; ++j) { + dst_data[jdh + j ] += src2_e; + dst_data[jdw + j*ne10] += src1_e; + } + } + } + } + } +} + +static void ggml_compute_forward_add_rel_pos( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_add_rel_pos_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_rwkv_wkv + +static void ggml_compute_forward_rwkv_wkv_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + const size_t T = dst->src[1]->ne[3]; + const size_t C = dst->ne[0]; + const size_t H = dst->src[1]->ne[2]; + const size_t n_seqs = dst->src[5]->ne[1]; + + float * dst_data = (float *) dst->data; + float * state = ((float *) dst->data) + C * T; + + if (params->ith != 0) { + return; + } + + memset(dst_data, 0, T * C * sizeof(float)); + + float * k = (float *) dst->src[0]->data; + float * v = (float *) dst->src[1]->data; + float * r = (float *) dst->src[2]->data; + float * time_faaaa = (float *) dst->src[3]->data; + float * time_decay = (float *) dst->src[4]->data; + + size_t t_stride = H * (C / H); + + size_t h_stride = C / H; + size_t h_stride_2d = (C / H) * (C / H); + + // basically fused operations: + // dst = r @ (time_faaaa * (k @ v) + state), + // state = time_decay * state + (k @ v), + // recursive through each token + for (size_t t = 0; t < T; t++) { + size_t t_offset = t * t_stride; + size_t state_offset = (C / H) * C * (t / (T / n_seqs)); + float * state_cur = state + state_offset; + float * state_prev = t % (T / n_seqs) ? state_cur : (float*)dst->src[5]->data + state_offset; + + for (size_t h = 0; h < H; h++) { + size_t h_offset = h * h_stride; + size_t t_h_offset = t_offset + h_offset; + size_t h_2d_offset = h * h_stride_2d; + + for (size_t i = 0; i < C / H; i++) { + size_t t_h_i_offset = t_h_offset + i; + size_t h_i_offset = h_offset + i; + size_t h_2d_i_offset = h_2d_offset + i * h_stride; + + float k_val = k[t_h_i_offset]; + float r_val = r[t_h_i_offset]; + float time_faaaa_val = time_faaaa[h_i_offset]; + // RWKV v6: different time_decay for each token. + float time_decay_val = time_decay[t_h_i_offset]; + + for (size_t j = 0; j < C / H; j ++) { + size_t t_h_j_offset = t_h_offset + j; + size_t h_2d_i_j_offset = h_2d_i_offset + j; + + float v_val = v[t_h_j_offset]; + float kv_val = v_val * k_val; + float prev_state_val = state_prev[h_2d_i_j_offset]; + float temp_val = kv_val * time_faaaa_val + prev_state_val; + dst_data[t_h_j_offset] += temp_val * r_val; + state_cur[h_2d_i_j_offset] = prev_state_val * time_decay_val + kv_val; + } + } + } + } +} + +static void ggml_compute_forward_rwkv_wkv( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_rwkv_wkv_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_map_unary + +static void ggml_compute_forward_map_unary_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst, + const ggml_unary_op_f32_t fun) { + + const struct ggml_tensor * src0 = dst->src[0]; + + if (params->ith != 0) { + return; + } + + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(dst)); + assert(ggml_are_same_shape(src0, dst)); + + const int n = ggml_nrows(src0); + const int nc = src0->ne[0]; + + for (int i = 0; i < n; i++) { + fun(nc, + (float *) ((char *) dst->data + i*( dst->nb[1])), + (float *) ((char *) src0->data + i*(src0->nb[1]))); + } +} + +static void ggml_compute_forward_map_unary( + const struct ggml_compute_params * params, + struct ggml_tensor * dst, + const ggml_unary_op_f32_t fun) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_map_unary_f32(params, dst, fun); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_map_binary + +static void ggml_compute_forward_map_binary_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst, + const ggml_binary_op_f32_t fun) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + if (params->ith != 0) { + return; + } + + assert(ggml_is_contiguous_1(src0)); + assert(ggml_is_contiguous_1(src1)); + assert(ggml_is_contiguous_1(dst)); + assert(ggml_are_same_shape(src0, src1) && ggml_are_same_shape(src0, dst)); + + const int n = ggml_nrows(src0); + const int nc = src0->ne[0]; + + for (int i = 0; i < n; i++) { + fun(nc, + (float *) ((char *) dst->data + i*( dst->nb[1])), + (float *) ((char *) src0->data + i*(src0->nb[1])), + (float *) ((char *) src1->data + i*(src1->nb[1]))); + } +} + +static void ggml_compute_forward_map_binary( + const struct ggml_compute_params * params, + struct ggml_tensor * dst, + const ggml_binary_op_f32_t fun) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_map_binary_f32(params, dst, fun); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_map_custom1 + +static void ggml_compute_forward_map_custom1_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst, + const ggml_custom1_op_f32_t fun) { + + const struct ggml_tensor * a = dst->src[0]; + + if (params->ith != 0) { + return; + } + + fun(dst, a); +} + +// ggml_compute_forward_map_custom2 + +static void ggml_compute_forward_map_custom2_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst, + const ggml_custom2_op_f32_t fun) { + + const struct ggml_tensor * a = dst->src[0]; + const struct ggml_tensor * b = dst->src[1]; + + if (params->ith != 0) { + return; + } + + fun(dst, a, b); +} + +// ggml_compute_forward_map_custom3 + +static void ggml_compute_forward_map_custom3_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst, + const ggml_custom3_op_f32_t fun) { + + const struct ggml_tensor * a = dst->src[0]; + const struct ggml_tensor * b = dst->src[1]; + const struct ggml_tensor * c = dst->src[1]; + + if (params->ith != 0) { + return; + } + + fun(dst, a, b, c); +} + +// ggml_compute_forward_map_custom1 + +static void ggml_compute_forward_map_custom1( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * a = dst->src[0]; + + struct ggml_map_custom1_op_params p; + memcpy(&p, dst->op_params, sizeof(p)); + + p.fun(dst, a, params->ith, params->nth, p.userdata); +} + +// ggml_compute_forward_map_custom2 + +static void ggml_compute_forward_map_custom2( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * a = dst->src[0]; + const struct ggml_tensor * b = dst->src[1]; + + struct ggml_map_custom2_op_params p; + memcpy(&p, dst->op_params, sizeof(p)); + + p.fun(dst, a, b, params->ith, params->nth, p.userdata); +} + +// ggml_compute_forward_map_custom3 + +static void ggml_compute_forward_map_custom3( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * a = dst->src[0]; + const struct ggml_tensor * b = dst->src[1]; + const struct ggml_tensor * c = dst->src[2]; + + struct ggml_map_custom3_op_params p; + memcpy(&p, dst->op_params, sizeof(p)); + + p.fun(dst, a, b, c, params->ith, params->nth, p.userdata); +} + +// ggml_compute_forward_cross_entropy_loss + +static void ggml_compute_forward_cross_entropy_loss_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + + GGML_ASSERT(ggml_is_contiguous(src0)); + GGML_ASSERT(ggml_is_contiguous(src1)); + GGML_ASSERT(ggml_is_scalar(dst)); + GGML_ASSERT(ggml_are_same_shape(src0, src1)); + + const int ith = params->ith; + const int nth = params->nth; + + float * sums = (float *) params->wdata; + + // TODO: handle transposed/permuted matrices + const int nc = src0->ne[0]; + const int nr = ggml_nrows(src0); + + GGML_ASSERT(params->wsize >= sizeof(float) * (nth + nth * nc)); + + if (ith == 0) { + memset(sums, 0, sizeof(float) * (nth + nth * nc)); + } + ggml_barrier(params->threadpool); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + for (int i1 = ir0; i1 < ir1; i1++) { + float * s0 = (float *)((char *) src0->data + i1*src0->nb[1]); + float * s1 = (float *)((char *) src1->data + i1*src1->nb[1]); + float * st = ((float *) params->wdata) + nth + ith*nc; + +#ifndef NDEBUG + for (int i = 0; i < nc; ++i) { + //printf("p[%d] = %f\n", i, p[i]); + assert(!isnan(s0[i])); + assert(!isnan(s1[i])); + } +#endif + + float max = -INFINITY; + ggml_vec_max_f32(nc, &max, s0); + ggml_float sum = ggml_vec_log_soft_max_f32(nc, st, s0, max); + assert(sum >= 0.0); + + ggml_vec_add1_f32(nc, st, st, -sum); + ggml_vec_mul_f32(nc, st, st, s1); + + float st_sum = 0.0f; + ggml_vec_sum_f32(nc, &st_sum, st); + sums[ith] += st_sum; + +#ifndef NDEBUG + for (int i = 0; i < nc; ++i) { + assert(!isnan(st[i])); + assert(!isinf(st[i])); + } +#endif + } + ggml_barrier(params->threadpool); + + if (ith == 0) { + float * dp = (float *) dst->data; + ggml_vec_sum_f32(nth, dp, sums); + dp[0] *= -1.0f / (float) nr; + } +} + +static void ggml_compute_forward_cross_entropy_loss( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_cross_entropy_loss_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +// ggml_compute_forward_cross_entropy_loss_back + +static void ggml_compute_forward_cross_entropy_loss_back_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + const struct ggml_tensor * opt0 = dst->src[2]; + + GGML_ASSERT(ggml_is_contiguous(dst)); + GGML_ASSERT(ggml_is_contiguous(src0)); + GGML_ASSERT(ggml_is_contiguous(src1)); + GGML_ASSERT(ggml_is_contiguous(opt0)); + GGML_ASSERT(ggml_are_same_shape(src0, src1) && ggml_are_same_shape(src0, dst)); + + const int64_t ith = params->ith; + const int64_t nth = params->nth; + + // TODO: handle transposed/permuted matrices + const int64_t nc = src0->ne[0]; + const int64_t nr = ggml_nrows(src0); + + // rows per thread + const int64_t dr = (nr + nth - 1)/nth; + + // row range for this thread + const int64_t ir0 = dr*ith; + const int64_t ir1 = MIN(ir0 + dr, nr); + + const float d_by_nr = ((const float *) opt0->data)[0] / (float) nr; + + for (int64_t i1 = ir0; i1 < ir1; i1++) { + float * ds0 = (float *)((char *) dst->data + i1*dst->nb[1]); + float * s0 = (float *)((char *) src0->data + i1*src0->nb[1]); + float * s1 = (float *)((char *) src1->data + i1*src1->nb[1]); + +#ifndef NDEBUG + for (int i = 0; i < nc; ++i) { + //printf("p[%d] = %f\n", i, p[i]); + assert(!isnan(s0[i])); + assert(!isnan(s1[i])); + } +#endif + + // soft_max + float max = -INFINITY; + ggml_vec_max_f32(nc, &max, s0); + ggml_float sum = ggml_vec_soft_max_f32(nc, ds0, s0, max); + assert(sum > 0.0); + ggml_vec_scale_f32(nc, ds0, 1.0/sum); + + // grad(src0) = (softmax(src0) - src1) * grad(cross_entropy_loss(src0, src1)) / nr + ggml_vec_sub_f32(nc, ds0, ds0, s1); + ggml_vec_scale_f32(nc, ds0, d_by_nr); + +#ifndef NDEBUG + for (int i = 0; i < nc; ++i) { + assert(!isnan(ds0[i])); + assert(!isinf(ds0[i])); + } +#endif + } +} + +static void ggml_compute_forward_cross_entropy_loss_back( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_cross_entropy_loss_back_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} + +static void ggml_compute_forward_opt_step_adamw_f32( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src0_grad = dst->src[1]; + const struct ggml_tensor * src0_grad_m = dst->src[2]; + const struct ggml_tensor * src0_grad_v = dst->src[3]; + GGML_ASSERT(ggml_are_same_shape(src0, src0_grad)); + + const int ith = params->ith; + const int nth = params->nth; + + const int nr = ggml_nrows(src0); + + GGML_TENSOR_UNARY_OP_LOCALS + GGML_ASSERT(nb00 == sizeof(float)); + + // rows per thread + const int dr = (nr + nth - 1)/nth; + + // row range for this thread + const int ir0 = dr*ith; + const int ir1 = MIN(ir0 + dr, nr); + + /* const float gnorm = 1.0f; */ + int64_t iter; memcpy(&iter, &dst->op_params[0], sizeof(int64_t)); + const float alpha = ggml_get_op_params_f32(dst, 2); + const float beta1 = ggml_get_op_params_f32(dst, 3); + const float beta2 = ggml_get_op_params_f32(dst, 4); + const float eps = ggml_get_op_params_f32(dst, 5); + const float wd = ggml_get_op_params_f32(dst, 6); + + const float beta1h = alpha/(1.0f - powf(beta1, iter)); + const float beta2h = 1.0f/(1.0f - powf(beta2, iter)); + + for (int ir = ir0; ir < ir1; ++ir) { + const int64_t i03 = ir/(ne02*ne01); + const int64_t i02 = (ir - i03*ne02*ne01)/ne01; + const int64_t i01 = (ir - i03*ne02*ne01 - i02*ne01); + + const size_t offset = i03*nb03 + i02*nb02 + i01*nb01; + + float * w = (float *) ((char *) src0->data + offset); // weight + const float * g = (const float *) ((const char *) src0_grad->data + offset); // grad + float * m = (float *) ((char *) src0_grad_m->data + offset); + float * v = (float *) ((char *) src0_grad_v->data + offset); + + for (int i00 = 0; i00 < ne00; ++i00) { + m[i00] = m[i00]*beta1 + g[i00]*(1.0f - beta1); + v[i00] = v[i00]*beta2 + g[i00]*g[i00]*(1.0f - beta2); + + const float mh = m[i00]*beta1h; + const float vh = sqrtf(v[i00]*beta2h) + eps; + + // The weight decay is applied independently of the Adam momenta m and v. + // This is NOT equivalent to l2 regularization that adds w[i00]*w[i00] to the loss. + // See: https://arxiv.org/pdf/1711.05101v3.pdf + w[i00] = w[i00]*(1.0f - alpha*wd) - mh/vh; + } + } + + ggml_barrier(params->threadpool); + if (ith != 0) { + return; + } + + iter++; + memcpy(&dst->op_params[0], &iter, sizeof(int64_t)); +} + +static void ggml_compute_forward_opt_step_adamw( + const struct ggml_compute_params * params, + struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_opt_step_adamw_f32(params, dst); + } break; + default: + { + GGML_ABORT("fatal error"); + } + } +} +///////////////////////////////// + +static void ggml_compute_forward(struct ggml_compute_params * params, struct ggml_tensor * tensor) { + GGML_ASSERT(params); + + if (tensor->op == GGML_OP_NONE || ggml_is_empty(tensor)) { + return; + } + + switch (tensor->op) { + case GGML_OP_DUP: + { + ggml_compute_forward_dup(params, tensor); + } break; + case GGML_OP_ADD: + { + ggml_compute_forward_add(params, tensor); + } break; + case GGML_OP_ADD1: + { + ggml_compute_forward_add1(params, tensor); + } break; + case GGML_OP_ACC: + { + ggml_compute_forward_acc(params, tensor); + } break; + case GGML_OP_SUB: + { + ggml_compute_forward_sub(params, tensor); + } break; + case GGML_OP_MUL: + { + ggml_compute_forward_mul(params, tensor); + } break; + case GGML_OP_DIV: + { + ggml_compute_forward_div(params, tensor); + } break; + case GGML_OP_SQR: + { + ggml_compute_forward_sqr(params, tensor); + } break; + case GGML_OP_SQRT: + { + ggml_compute_forward_sqrt(params, tensor); + } break; + case GGML_OP_LOG: + { + ggml_compute_forward_log(params, tensor); + } break; + case GGML_OP_SIN: + { + ggml_compute_forward_sin(params, tensor); + } break; + case GGML_OP_COS: + { + ggml_compute_forward_cos(params, tensor); + } break; + case GGML_OP_SUM: + { + ggml_compute_forward_sum(params, tensor); + } break; + case GGML_OP_SUM_ROWS: + { + ggml_compute_forward_sum_rows(params, tensor); + } break; + case GGML_OP_MEAN: + { + ggml_compute_forward_mean(params, tensor); + } break; + case GGML_OP_ARGMAX: + { + ggml_compute_forward_argmax(params, tensor); + } break; + case GGML_OP_REPEAT: + { + ggml_compute_forward_repeat(params, tensor); + } break; + case GGML_OP_REPEAT_BACK: + { + ggml_compute_forward_repeat_back(params, tensor); + } break; + case GGML_OP_CONCAT: + { + ggml_compute_forward_concat(params, tensor); + } break; + case GGML_OP_SILU_BACK: + { + ggml_compute_forward_silu_back(params, tensor); + } break; + case GGML_OP_NORM: + { + ggml_compute_forward_norm(params, tensor); + } break; + case GGML_OP_RMS_NORM: + { + ggml_compute_forward_rms_norm(params, tensor); + } break; + case GGML_OP_RMS_NORM_BACK: + { + ggml_compute_forward_rms_norm_back(params, tensor); + } break; + case GGML_OP_GROUP_NORM: + { + ggml_compute_forward_group_norm(params, tensor); + } break; + case GGML_OP_MUL_MAT: + { + ggml_compute_forward_mul_mat(params, tensor); + } break; + case GGML_OP_MUL_MAT_ID: + { + ggml_compute_forward_mul_mat_id(params, tensor); + } break; + case GGML_OP_OUT_PROD: + { + ggml_compute_forward_out_prod(params, tensor); + } break; + case GGML_OP_SCALE: + { + ggml_compute_forward_scale(params, tensor); + } break; + case GGML_OP_SET: + { + ggml_compute_forward_set(params, tensor); + } break; + case GGML_OP_CPY: + { + ggml_compute_forward_cpy(params, tensor); + } break; + case GGML_OP_CONT: + { + ggml_compute_forward_cont(params, tensor); + } break; + case GGML_OP_RESHAPE: + { + ggml_compute_forward_reshape(params, tensor); + } break; + case GGML_OP_VIEW: + { + ggml_compute_forward_view(params, tensor); + } break; + case GGML_OP_PERMUTE: + { + ggml_compute_forward_permute(params, tensor); + } break; + case GGML_OP_TRANSPOSE: + { + ggml_compute_forward_transpose(params, tensor); + } break; + case GGML_OP_GET_ROWS: + { + ggml_compute_forward_get_rows(params, tensor); + } break; + case GGML_OP_GET_ROWS_BACK: + { + ggml_compute_forward_get_rows_back(params, tensor); + } break; + case GGML_OP_DIAG: + { + ggml_compute_forward_diag(params, tensor); + } break; + case GGML_OP_DIAG_MASK_INF: + { + ggml_compute_forward_diag_mask_inf(params, tensor); + } break; + case GGML_OP_DIAG_MASK_ZERO: + { + ggml_compute_forward_diag_mask_zero(params, tensor); + } break; + case GGML_OP_SOFT_MAX: + { + ggml_compute_forward_soft_max(params, tensor); + } break; + case GGML_OP_SOFT_MAX_BACK: + { + ggml_compute_forward_soft_max_back(params, tensor); + } break; + case GGML_OP_ROPE: + { + ggml_compute_forward_rope(params, tensor); + } break; + case GGML_OP_ROPE_BACK: + { + ggml_compute_forward_rope_back(params, tensor); + } break; + case GGML_OP_CLAMP: + { + ggml_compute_forward_clamp(params, tensor); + } break; + case GGML_OP_CONV_TRANSPOSE_1D: + { + ggml_compute_forward_conv_transpose_1d(params, tensor); + } break; + case GGML_OP_IM2COL: + { + ggml_compute_forward_im2col(params, tensor); + } break; + case GGML_OP_IM2COL_BACK: + { + ggml_compute_forward_im2col_back_f32(params, tensor); + } break; + case GGML_OP_CONV_TRANSPOSE_2D: + { + ggml_compute_forward_conv_transpose_2d(params, tensor); + } break; + case GGML_OP_POOL_1D: + { + ggml_compute_forward_pool_1d(params, tensor); + } break; + case GGML_OP_POOL_2D: + { + ggml_compute_forward_pool_2d(params, tensor); + } break; + case GGML_OP_POOL_2D_BACK: + { + ggml_compute_forward_pool_2d_back(params, tensor); + } break; + case GGML_OP_UPSCALE: + { + ggml_compute_forward_upscale(params, tensor); + } break; + case GGML_OP_PAD: + { + ggml_compute_forward_pad(params, tensor); + } break; + case GGML_OP_UNPAD: + { + ggml_compute_forward_unpad(params, tensor); + } break; + case GGML_OP_ARANGE: + { + ggml_compute_forward_arange(params, tensor); + } break; + case GGML_OP_TIMESTEP_EMBEDDING: + { + ggml_compute_forward_timestep_embedding(params, tensor); + } break; + case GGML_OP_ARGSORT: + { + ggml_compute_forward_argsort(params, tensor); + } break; + case GGML_OP_LEAKY_RELU: + { + ggml_compute_forward_leaky_relu(params, tensor); + } break; + case GGML_OP_FLASH_ATTN_EXT: + { + ggml_compute_forward_flash_attn_ext(params, tensor->src[0], tensor->src[1], tensor->src[2], tensor->src[3], tensor); + } break; + case GGML_OP_FLASH_ATTN_BACK: + { + int32_t t = ggml_get_op_params_i32(tensor, 0); + GGML_ASSERT(t == 0 || t == 1); + bool masked = t != 0; + ggml_compute_forward_flash_attn_back(params, masked, tensor); + } break; + case GGML_OP_SSM_CONV: + { + ggml_compute_forward_ssm_conv(params, tensor); + } break; + case GGML_OP_SSM_SCAN: + { + ggml_compute_forward_ssm_scan(params, tensor); + } break; + case GGML_OP_WIN_PART: + { + ggml_compute_forward_win_part(params, tensor); + } break; + case GGML_OP_WIN_UNPART: + { + ggml_compute_forward_win_unpart(params, tensor); + } break; + case GGML_OP_UNARY: + { + ggml_compute_forward_unary(params, tensor); + } break; + case GGML_OP_GET_REL_POS: + { + ggml_compute_forward_get_rel_pos(params, tensor); + } break; + case GGML_OP_ADD_REL_POS: + { + ggml_compute_forward_add_rel_pos(params, tensor); + } break; + case GGML_OP_RWKV_WKV: + { + ggml_compute_forward_rwkv_wkv(params, tensor); + } break; + case GGML_OP_MAP_UNARY: + { + ggml_unary_op_f32_t fun; + memcpy(&fun, tensor->op_params, sizeof(fun)); + ggml_compute_forward_map_unary(params, tensor, fun); + } + break; + case GGML_OP_MAP_BINARY: + { + ggml_binary_op_f32_t fun; + memcpy(&fun, tensor->op_params, sizeof(fun)); + ggml_compute_forward_map_binary(params, tensor, fun); + } + break; + case GGML_OP_MAP_CUSTOM1_F32: + { + ggml_custom1_op_f32_t fun; + memcpy(&fun, tensor->op_params, sizeof(fun)); + ggml_compute_forward_map_custom1_f32(params, tensor, fun); + } + break; + case GGML_OP_MAP_CUSTOM2_F32: + { + ggml_custom2_op_f32_t fun; + memcpy(&fun, tensor->op_params, sizeof(fun)); + ggml_compute_forward_map_custom2_f32(params, tensor, fun); + } + break; + case GGML_OP_MAP_CUSTOM3_F32: + { + ggml_custom3_op_f32_t fun; + memcpy(&fun, tensor->op_params, sizeof(fun)); + ggml_compute_forward_map_custom3_f32(params, tensor, fun); + } + break; + case GGML_OP_MAP_CUSTOM1: + { + ggml_compute_forward_map_custom1(params, tensor); + } + break; + case GGML_OP_MAP_CUSTOM2: + { + ggml_compute_forward_map_custom2(params, tensor); + } + break; + case GGML_OP_MAP_CUSTOM3: + { + ggml_compute_forward_map_custom3(params, tensor); + } + break; + case GGML_OP_CROSS_ENTROPY_LOSS: + { + ggml_compute_forward_cross_entropy_loss(params, tensor); + } + break; + case GGML_OP_CROSS_ENTROPY_LOSS_BACK: + { + ggml_compute_forward_cross_entropy_loss_back(params, tensor); + } + break; + case GGML_OP_OPT_STEP_ADAMW: + { + ggml_compute_forward_opt_step_adamw(params, tensor); + } + break; + case GGML_OP_NONE: + { + // nop + } break; + case GGML_OP_COUNT: + { + GGML_ABORT("fatal error"); + } + } +} + +//////////////////////////////////////////////////////////////////////////////// + +struct ggml_hash_set ggml_hash_set_new(size_t size) { + size = ggml_hash_size(size); + struct ggml_hash_set result; + result.size = size; + result.keys = GGML_MALLOC(sizeof(struct ggml_tensor *) * size); + result.used = GGML_CALLOC(ggml_bitset_size(size), sizeof(ggml_bitset_t)); + return result; +} + +void ggml_hash_set_reset(struct ggml_hash_set * hash_set) { + memset(hash_set->used, 0, sizeof(ggml_bitset_t) * ggml_bitset_size(hash_set->size)); +} + +void ggml_hash_set_free(struct ggml_hash_set * hash_set) { + GGML_FREE(hash_set->used); + GGML_FREE(hash_set->keys); +} + +size_t ggml_hash_size(size_t min_sz) { + // next primes after powers of two + static const size_t primes[] = { + 2, 3, 5, 11, 17, 37, 67, 131, 257, 521, 1031, + 2053, 4099, 8209, 16411, 32771, 65537, 131101, + 262147, 524309, 1048583, 2097169, 4194319, 8388617, + 16777259, 33554467, 67108879, 134217757, 268435459, + 536870923, 1073741827, 2147483659 + }; + static const size_t n_primes = sizeof(primes)/sizeof(primes[0]); + + // find the smallest prime that is larger or equal than min_sz + size_t l = 0; + size_t r = n_primes; + while (l < r) { + size_t m = (l + r)/2; + if (primes[m] < min_sz) { + l = m + 1; + } else { + r = m; + } + } + size_t sz = l < n_primes ? primes[l] : min_sz | 1; + return sz; +} + +struct hash_map { + struct ggml_hash_set set; + struct ggml_tensor ** vals; +}; + +static struct hash_map * ggml_new_hash_map(size_t size) { + struct hash_map * result = GGML_MALLOC(sizeof(struct hash_map)); + result->set = ggml_hash_set_new(size); + result->vals = GGML_CALLOC(result->set.size, sizeof(struct ggml_tensor *)); + return result; +} + +static void ggml_hash_map_free(struct hash_map * map) { + ggml_hash_set_free(&map->set); + GGML_FREE(map->vals); + GGML_FREE(map); +} + +// gradient checkpointing + +static struct ggml_tensor * ggml_recompute_graph_node( + struct ggml_context * ctx, + struct ggml_cgraph * graph, + struct hash_map * replacements, + struct ggml_tensor * node) { + + if (node == NULL) { + return NULL; + } + + if (node->flags & GGML_TENSOR_FLAG_PARAM) { + return node; + } + + if (!ggml_hash_contains(&graph->visited_hash_set, node)) { + return node; + } + + int count_children = 0; + for (int k = 0; k < GGML_MAX_SRC; ++k) { + if (node->src[k]) { + ++count_children; + } + } + + if (count_children == 0) { + return node; + } + + size_t i = ggml_hash_find(&replacements->set, node); + GGML_ASSERT(i != GGML_HASHSET_FULL); // assert that not full + if (replacements->set.keys[i] == node) { + return replacements->vals[i]; + } + + struct ggml_tensor * clone = ggml_new_tensor(ctx, node->type, GGML_MAX_DIMS, node->ne); + + // insert clone into replacements + GGML_ASSERT(replacements->set.keys[i] == NULL); // assert that we don't overwrite + replacements->set.keys[i] = node; + replacements->vals[i] = clone; + + clone->op = node->op; + clone->grad = node->grad; + clone->flags = node->flags; + clone->extra = node->extra; + for (int k = 0; k < GGML_MAX_DIMS; ++k) { + clone->nb[k] = node->nb[k]; + } + for (int k = 0; k < GGML_MAX_SRC; ++k) { + clone->src[k] = ggml_recompute_graph_node(ctx, graph, replacements, node->src[k]); + } + if (node->view_src != NULL) { + clone->data = (node->view_src->data == NULL) + ? NULL // view_src not yet allocated + : (char *) node->view_src->data // view_src already allocated + + node->view_offs; + clone->view_src = node->view_src; + clone->view_offs = node->view_offs; + } + + GGML_ASSERT(sizeof(node->op_params) == sizeof(int32_t) * (GGML_MAX_OP_PARAMS / sizeof(int32_t))); + GGML_ASSERT(sizeof(node->name) == GGML_MAX_NAME); + memcpy(clone->op_params, node->op_params, sizeof(node->op_params)); + ggml_format_name(clone, "%s (clone)", ggml_get_name(node)); + + return clone; +} + +void ggml_build_backward_gradient_checkpointing( + struct ggml_context * ctx, + struct ggml_cgraph * gf, + struct ggml_cgraph * gb, + struct ggml_cgraph * gb_tmp, + struct ggml_tensor * * checkpoints, + int n_checkpoints) { + ggml_graph_cpy(gf, gb_tmp); + ggml_build_backward_expand(ctx, gf, gb_tmp, false); + + if (n_checkpoints <= 0) { + ggml_graph_cpy(gb_tmp, gb); + return; + } + + struct hash_map * replacements = ggml_new_hash_map(gf->n_nodes + gf->n_leafs + n_checkpoints); + + // insert checkpoints in replacements + for (int i = 0; i < n_checkpoints; ++i) { + size_t k = ggml_hash_find(&replacements->set, checkpoints[i]); + GGML_ASSERT(k != GGML_HASHSET_FULL); // assert that not full + GGML_ASSERT(replacements->set.keys[k] == NULL); // assert that we don't overwrite + replacements->set.keys[k] = checkpoints[i]; + replacements->vals[k] = checkpoints[i]; + } + + ggml_graph_cpy(gf, gb); + // rewrite gb_tmp->nodes[gf->n_nodes:gb_tmp->n_nodes], + // replacing references to gb_tmp->nodes[0:gf->n_nodes] ( == gf->nodes[0:gf->n_nodes]), + // by recomputing them from checkpoints + for (int i = gf->n_nodes; in_nodes; ++i) { + struct ggml_tensor * node = gb_tmp->nodes[i]; + for (int k = 0; k < GGML_MAX_SRC; ++k) { + // insert new tensors recomputing src, reusing already made replacements, + // remember replacements: remember new tensors with mapping from corresponding gf nodes + // recurse for input tensors, + // unless (i.e. terminating when) input tensors are replacements (like checkpoints) + node->src[k] = ggml_recompute_graph_node(ctx, gf, replacements, node->src[k]); + } + // insert rewritten backward node with replacements made into resulting backward graph gb + ggml_build_forward_expand(gb, node); + } + + ggml_hash_map_free(replacements); +} + +// utility functions to change gradients +// if a is in acc_table, modify gradients in-place and mark result as gradient accumulator +// else if a is in zero_table, replace a +// else, just add/subtract/etc. the gradients + +static struct ggml_tensor * ggml_add_or_set( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + struct ggml_hash_set * zero_table, + struct ggml_hash_set * acc_table) { + if (ggml_hash_contains(acc_table, a)) { + struct ggml_tensor * ret = ggml_add_impl(ctx, a, b, true); + const size_t insert_result = ggml_hash_insert(acc_table, ret); + GGML_ASSERT(insert_result != GGML_HASHSET_FULL); + GGML_ASSERT(insert_result != GGML_HASHSET_ALREADY_EXISTS); + return ret; + } + if (ggml_hash_contains(zero_table, a)) { + return b; + } + return ggml_add_impl(ctx, a, b, false); +} + +static struct ggml_tensor * ggml_acc_or_set( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + const size_t nb1, + const size_t nb2, + const size_t nb3, + const size_t offset, + struct ggml_hash_set * zero_table, + struct ggml_hash_set * acc_table) { + if (ggml_hash_contains(acc_table, a)) { + struct ggml_tensor * ret = ggml_acc_impl(ctx, a, b, nb1, nb2, nb3, offset, true); + const size_t insert_result = ggml_hash_insert(acc_table, ret); + GGML_ASSERT(insert_result != GGML_HASHSET_FULL); + GGML_ASSERT(insert_result != GGML_HASHSET_ALREADY_EXISTS); + return ret; + } + if (ggml_hash_contains(zero_table, a)) { + struct ggml_tensor * a_zero = ggml_scale(ctx, a, 0.0f); // FIXME this is going to produce NaN if a contains inf/NaN + return ggml_acc_impl(ctx, a_zero, b, nb1, nb2, nb3, offset, false); + } + return ggml_acc_impl(ctx, a, b, nb1, nb2, nb3, offset, false); +} + +static struct ggml_tensor * ggml_add1_or_set( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + struct ggml_hash_set * zero_table, + struct ggml_hash_set * acc_table) { + if (ggml_hash_contains(acc_table, a)) { + struct ggml_tensor * ret = ggml_add1_impl(ctx, a, b, true); + const size_t insert_result = ggml_hash_insert(acc_table, ret); + GGML_ASSERT(insert_result != GGML_HASHSET_FULL); + GGML_ASSERT(insert_result != GGML_HASHSET_ALREADY_EXISTS); + return ret; + } + if (ggml_hash_contains(zero_table, a)) { + return ggml_repeat(ctx, b, a); + } + return ggml_add1_impl(ctx, a, b, false); +} + +static struct ggml_tensor * ggml_sub_or_set( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + struct ggml_hash_set * zero_table, + struct ggml_hash_set * acc_table) { + if (ggml_hash_contains(acc_table, a)) { + struct ggml_tensor * ret = ggml_sub_impl(ctx, a, b, true); + const size_t insert_result = ggml_hash_insert(acc_table, ret); + GGML_ASSERT(insert_result != GGML_HASHSET_FULL); + GGML_ASSERT(insert_result != GGML_HASHSET_ALREADY_EXISTS); + return ret; + } + if (ggml_hash_contains(zero_table, a)) { + return ggml_neg(ctx, b); + } + return ggml_sub_impl(ctx, a, b, false); +} + +static void ggml_compute_backward(struct ggml_context * ctx, struct ggml_tensor * tensor, struct ggml_hash_set * zero_table, struct ggml_hash_set * acc_table) { + struct ggml_tensor * src0 = tensor->src[0]; + struct ggml_tensor * src1 = tensor->src[1]; + struct ggml_tensor * src2 = tensor->src[2]; + + switch (tensor->op) { + case GGML_OP_DUP: + { + if (src0->grad) { + src0->grad = ggml_add_or_set(ctx, src0->grad, tensor->grad, zero_table, acc_table); + } + } break; + case GGML_OP_ADD: + { + if (src0->grad) { + src0->grad = ggml_add_or_set(ctx, src0->grad, tensor->grad, zero_table, acc_table); + } + if (src1->grad) { + if (ggml_are_same_shape(src0, src1)) { + src1->grad = ggml_add_or_set(ctx, src1->grad, tensor->grad, zero_table, acc_table); + } else { + src1->grad = ggml_add_or_set(ctx, src1->grad, ggml_repeat_back(ctx, tensor->grad, src1), zero_table, acc_table); + } + } + } break; + case GGML_OP_ADD1: + { + if (src0->grad) { + src0->grad = ggml_add_or_set(ctx, src0->grad, tensor->grad, zero_table, acc_table); + } + if (src1->grad) { + src1->grad = ggml_add_or_set(ctx, + src1->grad, + ggml_mean(ctx, tensor->grad), // TODO: should probably be sum instead of mean + zero_table, acc_table); + } + } break; + case GGML_OP_ACC: + { + if (src0->grad) { + src0->grad = ggml_add_or_set(ctx, src0->grad, tensor->grad, zero_table, acc_table); + } + if (src1->grad) { + const size_t nb1 = ((int32_t *) tensor->op_params)[0]; + const size_t nb2 = ((int32_t *) tensor->op_params)[1]; + const size_t nb3 = ((int32_t *) tensor->op_params)[2]; + const size_t offset = ((int32_t *) tensor->op_params)[3]; + + struct ggml_tensor * tensor_grad_view = ggml_view_4d(ctx, + tensor->grad, + src1->grad->ne[0], + src1->grad->ne[1], + src1->grad->ne[2], + src1->grad->ne[3], + nb1, nb2, nb3, offset); + + src1->grad = + ggml_add_or_set(ctx, + src1->grad, + ggml_reshape(ctx, + ggml_cont(ctx, tensor_grad_view), + src1->grad), + zero_table, acc_table); + } + } break; + case GGML_OP_SUB: + { + if (src0->grad) { + src0->grad = ggml_add_or_set(ctx, src0->grad, tensor->grad, zero_table, acc_table); + } + if (src1->grad) { + src1->grad = ggml_sub_or_set(ctx, src1->grad, tensor->grad, zero_table, acc_table); + } + } break; + case GGML_OP_MUL: + { + if (src0->grad) { + src0->grad = + ggml_add_or_set(ctx, + src0->grad, + ggml_mul(ctx, src1, tensor->grad), + zero_table, acc_table); + } + if (src1->grad) { + src1->grad = + ggml_add_or_set(ctx, + src1->grad, + ggml_mul(ctx, src0, tensor->grad), + zero_table, acc_table); + } + } break; + case GGML_OP_DIV: + { + if (src0->grad) { + src0->grad = + ggml_add_or_set(ctx, + src0->grad, + ggml_div(ctx, tensor->grad, src1), + zero_table, acc_table); + } + if (src1->grad) { + src1->grad = + ggml_sub_or_set(ctx, + src1->grad, + ggml_mul(ctx, + tensor->grad, + ggml_div(ctx, tensor, src1)), + zero_table, acc_table); + } + } break; + case GGML_OP_SQR: + { + if (src0->grad) { + src0->grad = + ggml_add_or_set(ctx, + src0->grad, + ggml_scale(ctx, + ggml_mul(ctx, src0, tensor->grad), + 2.0f), + zero_table, acc_table); + } + } break; + case GGML_OP_SQRT: + { + if (src0->grad) { + src0->grad = + ggml_add_or_set(ctx, + src0->grad, + ggml_scale(ctx, + ggml_div(ctx, + tensor->grad, + tensor), + 0.5f), + zero_table, acc_table); + } + } break; + case GGML_OP_LOG: + { + if (src0->grad) { + src0->grad = + ggml_add_or_set(ctx, + src0->grad, + ggml_div(ctx, + tensor->grad, + src0), + zero_table, acc_table); + } + } break; + case GGML_OP_SIN: + { + if (src0->grad) { + src0->grad = + ggml_add_or_set(ctx, + src0->grad, + ggml_mul(ctx, + tensor->grad, + ggml_cos(ctx, src0)), + zero_table, acc_table); + } + } break; + case GGML_OP_COS: + { + if (src0->grad) { + src0->grad = + ggml_sub_or_set(ctx, + src0->grad, + ggml_mul(ctx, + tensor->grad, + ggml_sin(ctx, src0)), + zero_table, acc_table); + } + } break; + case GGML_OP_SUM: + { + if (src0->grad) { + src0->grad = + ggml_add1_or_set(ctx, + src0->grad, + tensor->grad, + zero_table, acc_table); + } + } break; + case GGML_OP_SUM_ROWS: + { + if (src0->grad) { + src0->grad = + ggml_add_or_set(ctx, + src0->grad, + ggml_repeat(ctx, + tensor->grad, + src0->grad), + zero_table, acc_table); + } + } break; + case GGML_OP_MEAN: + case GGML_OP_ARGMAX: + { + GGML_ABORT("fatal error"); // TODO: implement + } + case GGML_OP_REPEAT: + { + // necessary for llama + if (src0->grad) { + src0->grad = ggml_add_or_set(ctx, + src0->grad, + ggml_repeat_back(ctx, tensor->grad, src0->grad), + zero_table, acc_table); + } + } break; + case GGML_OP_REPEAT_BACK: + { + if (src0->grad) { + // TODO: test this + src0->grad = ggml_add_or_set(ctx, + src0->grad, + ggml_repeat(ctx, tensor->grad, src0->grad), + zero_table, acc_table); + } + } break; + case GGML_OP_CONCAT: + { + GGML_ABORT("fatal error"); // TODO: implement + } + case GGML_OP_SILU_BACK: + { + GGML_ABORT("fatal error"); // TODO: not implemented + } + case GGML_OP_NORM: + { + GGML_ABORT("fatal error"); // TODO: not implemented + } + case GGML_OP_RMS_NORM: + { + // necessary for llama + if (src0->grad) { + float eps; + memcpy(&eps, tensor->op_params, sizeof(float)); + + src0->grad = ggml_add_or_set(ctx, + src0->grad, + ggml_rms_norm_back(ctx, src0, tensor->grad, eps), + zero_table, acc_table); + } + } break; + case GGML_OP_RMS_NORM_BACK: + { + GGML_ABORT("fatal error"); // TODO: not implemented + } + case GGML_OP_GROUP_NORM: + { + GGML_ABORT("fatal error"); // TODO: not implemented + } + case GGML_OP_MUL_MAT: + { + // https://cs231n.github.io/optimization-2/#staged + // # forward pass + // s0 = np.random.randn(5, 10) + // s1 = np.random.randn(10, 3) + // t = s0.dot(s1) + + // # now suppose we had the gradient on t from above in the circuit + // dt = np.random.randn(*t.shape) # same shape as t + // ds0 = dt.dot(s1.T) #.T gives the transpose of the matrix + // ds1 = t.T.dot(dt) + + // tensor.shape [m,p,qq,rr] + // src0.shape [n,m,q1,r1] + // src1.shape [n,p,qq,rr] + + // necessary for llama + if (src0->grad) { + struct ggml_tensor * s1_tg = + ggml_out_prod(ctx, // [n,m,qq,rr] + src1, // [n,p,qq,rr] + tensor->grad); // [m,p,qq,rr] + const int64_t qq = s1_tg->ne[2]; + const int64_t rr = s1_tg->ne[3]; + const int64_t q1 = src0->ne[2]; + const int64_t r1 = src0->ne[3]; + const bool ne2_broadcasted = qq > q1; + const bool ne3_broadcasted = rr > r1; + if (ne2_broadcasted || ne3_broadcasted) { + // sum broadcast repetitions of s1_tg into shape of src0 + s1_tg = ggml_repeat_back(ctx, s1_tg, src0); + } + src0->grad = + ggml_add_or_set(ctx, + src0->grad, // [n,m,q1,r1] + s1_tg, // [n,m,q1,r1] + zero_table, acc_table); + } + if (src1->grad) { + src1->grad = + ggml_add_or_set(ctx, + src1->grad, // [n,p,qq,rr] + // ggml_mul_mat(ctx, // [n,p,qq,rr] + // ggml_cont(ctx, // [m,n,q1,r1] + // ggml_transpose(ctx, src0)), // [m,n,q1,r1] + // tensor->grad), // [m,p,qq,rr] + + // // when src0 is bigger than tensor->grad (this is mostly the case in llama), + // // avoid transpose of src0, rather transpose smaller tensor->grad + // // and then use ggml_out_prod + ggml_out_prod(ctx, // [n,p,qq,rr] + src0, // [n,m,q1,r1] + ggml_transpose(ctx, // [p,m,qq,rr] + tensor->grad)), // [m,p,qq,rr] + zero_table, acc_table); + } + } break; + case GGML_OP_MUL_MAT_ID: + { + GGML_ABORT("fatal error"); // TODO: not implemented + } + case GGML_OP_OUT_PROD: + { + GGML_ABORT("fatal error"); // TODO: not implemented + } + case GGML_OP_SCALE: + { + // necessary for llama + if (src0->grad) { + float s; + memcpy(&s, tensor->op_params, sizeof(float)); + + src0->grad = + ggml_add_or_set(ctx, + src0->grad, + ggml_scale_impl(ctx, tensor->grad, s, false), + zero_table, acc_table); + } + } break; + case GGML_OP_SET: + { + const size_t nb1 = ((int32_t *) tensor->op_params)[0]; + const size_t nb2 = ((int32_t *) tensor->op_params)[1]; + const size_t nb3 = ((int32_t *) tensor->op_params)[2]; + const size_t offset = ((int32_t *) tensor->op_params)[3]; + + struct ggml_tensor * tensor_grad_view = NULL; + + if (src0->grad || src1->grad) { + GGML_ASSERT(src0->type == tensor->type); + GGML_ASSERT(tensor->grad->type == tensor->type); + GGML_ASSERT(!src1->grad || src1->grad->type == tensor->grad->type); + + tensor_grad_view = ggml_view_4d(ctx, + tensor->grad, src1->ne[0], src1->ne[1], src1->ne[2], src1->ne[3], + nb1, nb2, nb3, offset); + } + + if (src0->grad) { + src0->grad = ggml_add_or_set(ctx, + src0->grad, + ggml_acc_impl(ctx, + tensor->grad, + ggml_neg(ctx, tensor_grad_view), + nb1, nb2, nb3, offset, false), + zero_table, acc_table); + } + + if (src1->grad) { + src1->grad = + ggml_add_or_set(ctx, + src1->grad, + ggml_reshape(ctx, + ggml_cont(ctx, tensor_grad_view), + src1->grad), + zero_table, acc_table); + } + } break; + case GGML_OP_CPY: + { + // necessary for llama + // cpy overwrites value of src1 by src0 and returns view(src1) + // the overwriting is mathematically equivalent to: + // tensor = src0 * 1 + src1 * 0 + if (src0->grad) { + // dsrc0 = dtensor * 1 + src0->grad = ggml_add_or_set(ctx, src0->grad, tensor->grad, zero_table, acc_table); + } + if (src1->grad) { + // dsrc1 = dtensor * 0 -> noop + } + } break; + case GGML_OP_CONT: + { + // same as cpy + if (src0->grad) { + GGML_ASSERT(ggml_is_contiguous(src0->grad)); + GGML_ASSERT(ggml_is_contiguous(tensor->grad)); + src0->grad = ggml_add_or_set(ctx, src0->grad, tensor->grad, zero_table, acc_table); + } + } break; + case GGML_OP_RESHAPE: + { + // necessary for llama + if (src0->grad) { + src0->grad = + ggml_add_or_set(ctx, src0->grad, + ggml_reshape(ctx, + ggml_is_contiguous(tensor->grad) + ? tensor->grad + : ggml_cont(ctx, tensor->grad), + src0->grad), + zero_table, acc_table); + } + } break; + case GGML_OP_VIEW: + { + // necessary for llama + if (src0->grad) { + size_t offset; + + memcpy(&offset, tensor->op_params, sizeof(offset)); + + size_t nb1 = tensor->nb[1]; + size_t nb2 = tensor->nb[2]; + size_t nb3 = tensor->nb[3]; + + if (src0->type != src0->grad->type) { + // gradient is typically F32, but src0 could be other type + size_t ng = ggml_element_size(src0->grad); + size_t n0 = ggml_element_size(src0); + GGML_ASSERT(offset % n0 == 0); + GGML_ASSERT(nb1 % n0 == 0); + GGML_ASSERT(nb2 % n0 == 0); + GGML_ASSERT(nb3 % n0 == 0); + offset = (offset / n0) * ng; + nb1 = (nb1 / n0) * ng; + nb2 = (nb2 / n0) * ng; + nb3 = (nb3 / n0) * ng; + } + + src0->grad = ggml_acc_or_set(ctx, src0->grad, tensor->grad, nb1, nb2, nb3, offset, zero_table, acc_table); + } + } break; + case GGML_OP_PERMUTE: + { + // necessary for llama + if (src0->grad) { + int32_t * axes = (int32_t *) tensor->op_params; + int axis0 = axes[0] & 0x3; + int axis1 = axes[1] & 0x3; + int axis2 = axes[2] & 0x3; + int axis3 = axes[3] & 0x3; + int axes_backward[4] = {0,0,0,0}; + axes_backward[axis0] = 0; + axes_backward[axis1] = 1; + axes_backward[axis2] = 2; + axes_backward[axis3] = 3; + src0->grad = + ggml_add_or_set(ctx, src0->grad, + ggml_permute(ctx, + tensor->grad, + axes_backward[0], + axes_backward[1], + axes_backward[2], + axes_backward[3]), + zero_table, acc_table); + } + } break; + case GGML_OP_TRANSPOSE: + { + // necessary for llama + if (src0->grad) { + src0->grad = + ggml_add_or_set(ctx, src0->grad, + ggml_transpose(ctx, tensor->grad), + zero_table, acc_table); + } + } break; + case GGML_OP_GET_ROWS: + { + // necessary for llama (only for tokenizer) + if (src0->grad) { + src0->grad = + ggml_add_or_set(ctx, src0->grad, + // last ggml_get_rows_back argument src0->grad is only + // necessary to setup correct output shape + ggml_get_rows_back(ctx, tensor->grad, src1, src0->grad), + zero_table, acc_table); + } + if (src1->grad) { + // noop + } + } break; + case GGML_OP_GET_ROWS_BACK: + { + GGML_ABORT("fatal error"); // TODO: not implemented + } + case GGML_OP_DIAG: + { + GGML_ABORT("fatal error"); // TODO: not implemented + } + case GGML_OP_DIAG_MASK_INF: + { + // necessary for llama + if (src0->grad) { + const int n_past = ((int32_t *) tensor->op_params)[0]; + src0->grad = + ggml_add_or_set(ctx, src0->grad, + /* ggml_diag_mask_inf_impl() shouldn't be here */ + /* ref: https://github.com/ggerganov/llama.cpp/pull/4203#discussion_r1412377992 */ + ggml_diag_mask_zero_impl(ctx, tensor->grad, n_past, false), + zero_table, acc_table); + } + } break; + case GGML_OP_DIAG_MASK_ZERO: + { + // necessary for llama + if (src0->grad) { + const int n_past = ((int32_t *) tensor->op_params)[0]; + src0->grad = + ggml_add_or_set(ctx, src0->grad, + ggml_diag_mask_zero_impl(ctx, tensor->grad, n_past, false), + zero_table, acc_table); + } + } break; + case GGML_OP_SOFT_MAX: + { + // necessary for llama + if (src0->grad) { + src0->grad = + ggml_add_or_set(ctx, src0->grad, + ggml_soft_max_back(ctx, tensor->grad, tensor), + zero_table, acc_table); + } + GGML_ASSERT((!src1 || !src1->grad) && "backward pass for softmax mask not implemented"); + } break; + case GGML_OP_SOFT_MAX_BACK: + { + GGML_ABORT("fatal error"); // TODO: not implemented + } + case GGML_OP_ROPE: + { + // necessary for llama + if (src0->grad) { + //const int n_past = ((int32_t *) tensor->op_params)[0]; + const int n_dims = ((int32_t *) tensor->op_params)[1]; + const int mode = ((int32_t *) tensor->op_params)[2]; + //const int n_ctx = ((int32_t *) tensor->op_params)[3]; + const int n_ctx_orig = ((int32_t *) tensor->op_params)[4]; + float freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow; + + memcpy(&freq_base, (int32_t *) tensor->op_params + 5, sizeof(float)); + memcpy(&freq_scale, (int32_t *) tensor->op_params + 6, sizeof(float)); + memcpy(&ext_factor, (int32_t *) tensor->op_params + 7, sizeof(float)); + memcpy(&attn_factor, (int32_t *) tensor->op_params + 8, sizeof(float)); + memcpy(&beta_fast, (int32_t *) tensor->op_params + 9, sizeof(float)); + memcpy(&beta_slow, (int32_t *) tensor->op_params + 10, sizeof(float)); + + src0->grad = ggml_add_or_set(ctx, + src0->grad, + ggml_rope_back(ctx, + tensor->grad, + src1, + src2, + n_dims, + mode, + n_ctx_orig, + freq_base, + freq_scale, + ext_factor, + attn_factor, + beta_fast, + beta_slow), + zero_table, acc_table); + } + GGML_ASSERT((!src2 || !src2->grad) && "gradients for freq factors not implemented"); + } break; + case GGML_OP_ROPE_BACK: + { + if (src0->grad) { + //const int n_past = ((int32_t *) tensor->op_params)[0]; + const int n_dims = ((int32_t *) tensor->op_params)[1]; + const int mode = ((int32_t *) tensor->op_params)[2]; + //const int n_ctx = ((int32_t *) tensor->op_params)[3]; + const int n_ctx_orig = ((int32_t *) tensor->op_params)[4]; + float freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow; + + memcpy(&freq_base, (int32_t *) tensor->op_params + 5, sizeof(float)); + memcpy(&freq_scale, (int32_t *) tensor->op_params + 6, sizeof(float)); + memcpy(&ext_factor, (int32_t *) tensor->op_params + 7, sizeof(float)); + memcpy(&attn_factor, (int32_t *) tensor->op_params + 8, sizeof(float)); + memcpy(&beta_fast, (int32_t *) tensor->op_params + 9, sizeof(float)); + memcpy(&beta_slow, (int32_t *) tensor->op_params + 10, sizeof(float)); + + src0->grad = ggml_add_or_set(ctx, + src0->grad, + ggml_rope_impl(ctx, + tensor->grad, + src1, + src2, + n_dims, + mode, + n_ctx_orig, + freq_base, + freq_scale, + ext_factor, + attn_factor, + beta_fast, + beta_slow, + false), + zero_table, acc_table); + } + } break; + case GGML_OP_CLAMP: + { + GGML_ABORT("fatal error"); // TODO: not implemented + } + case GGML_OP_CONV_TRANSPOSE_1D: + { + GGML_ABORT("fatal error"); // TODO: not implemented + } + case GGML_OP_IM2COL: + { + if (src1->grad) { + const int32_t s0 = ggml_get_op_params_i32(tensor, 0); + const int32_t s1 = ggml_get_op_params_i32(tensor, 1); + const int32_t p0 = ggml_get_op_params_i32(tensor, 2); + const int32_t p1 = ggml_get_op_params_i32(tensor, 3); + const int32_t d0 = ggml_get_op_params_i32(tensor, 4); + const int32_t d1 = ggml_get_op_params_i32(tensor, 5); + const bool is_2D = ggml_get_op_params_i32(tensor, 6) == 1; + + src1->grad = ggml_add_or_set(ctx, + src1->grad, + ggml_im2col_back(ctx, src0, tensor->grad, src1->ne, s0, s1, p0, p1, d0, d1, is_2D), + zero_table, acc_table); + } + } break; + case GGML_OP_IM2COL_BACK: + { + GGML_ABORT("fatal error"); // TODO: not implemented + } + case GGML_OP_CONV_TRANSPOSE_2D: + { + GGML_ABORT("fatal error"); // TODO: not implemented + } + case GGML_OP_POOL_1D: + { + GGML_ABORT("fatal error"); // TODO: not implemented + } + case GGML_OP_POOL_2D: + { + if (src0->grad) { + const enum ggml_op_pool op = ggml_get_op_params_i32(tensor, 0); + const int32_t k0 = ggml_get_op_params_i32(tensor, 1); + const int32_t k1 = ggml_get_op_params_i32(tensor, 2); + const int32_t s0 = ggml_get_op_params_i32(tensor, 3); + const int32_t s1 = ggml_get_op_params_i32(tensor, 4); + const int32_t p0 = ggml_get_op_params_i32(tensor, 5); + const int32_t p1 = ggml_get_op_params_i32(tensor, 6); + + src0->grad = ggml_add_or_set(ctx, + src0->grad, + ggml_pool_2d_back(ctx, tensor->grad, src0, op, k0, k1, s0, s1, p0, p1), + zero_table, acc_table); + } + } break; + case GGML_OP_POOL_2D_BACK: + { + GGML_ABORT("fatal error"); // TODO: not implemented + } + case GGML_OP_UPSCALE: + { + GGML_ABORT("fatal error"); // TODO: not implemented + } + case GGML_OP_PAD: + { + GGML_ABORT("fatal error"); // TODO: not implemented + } + case GGML_OP_UNPAD: + { + GGML_ABORT("fatal error"); // TODO: not implemented + } + case GGML_OP_ARANGE: + { + GGML_ABORT("fatal error"); // TODO: not implemented + } + case GGML_OP_TIMESTEP_EMBEDDING: + { + GGML_ABORT("fatal error"); // TODO: not implemented + } + case GGML_OP_ARGSORT: + { + GGML_ABORT("fatal error"); // TODO: not implemented + } + case GGML_OP_LEAKY_RELU: + { + GGML_ABORT("fatal error"); // TODO: not implemented + } + case GGML_OP_FLASH_ATTN_EXT: + { + GGML_ABORT("FA backward pass not adapted after rework"); + struct ggml_tensor * flash_grad = NULL; + if (src0->grad || src1->grad || tensor->src[2]->grad) { + int32_t t = ggml_get_op_params_i32(tensor, 0); + GGML_ASSERT(t == 0 || t == 1); + bool masked = t != 0; + flash_grad = + ggml_flash_attn_back(ctx, + src0, + src1, + tensor->src[2], + tensor->grad, + masked); + } + + const int64_t elem_q = ggml_nelements(src0); + const int64_t elem_k = ggml_nelements(src1); + const int64_t elem_v = ggml_nelements(src2); + + enum ggml_type result_type = flash_grad->type; + GGML_ASSERT(ggml_blck_size(result_type) == 1); + const size_t tsize = ggml_type_size(result_type); + + const size_t offs_q = 0; + const size_t offs_k = offs_q + GGML_PAD(elem_q * tsize, GGML_MEM_ALIGN); + const size_t offs_v = offs_k + GGML_PAD(elem_k * tsize, GGML_MEM_ALIGN); + + if (src0->grad) { + struct ggml_tensor * view_q = ggml_view_1d(ctx, flash_grad, elem_q, offs_q); + struct ggml_tensor * grad_q = ggml_reshape(ctx, view_q, src0); + src0->grad = ggml_add_or_set(ctx, + src0->grad, + grad_q, + zero_table, acc_table); + } + if (src1->grad) { + struct ggml_tensor * view_k = ggml_view_1d(ctx, flash_grad, elem_k, offs_k); + struct ggml_tensor * grad_k = ggml_reshape(ctx, view_k, src1); + src1->grad = ggml_add_or_set(ctx, + src1->grad, + grad_k, + zero_table, acc_table); + } + if (src2->grad) { + struct ggml_tensor * view_v = ggml_view_1d(ctx, flash_grad, elem_v, offs_v); + struct ggml_tensor * grad_v = ggml_reshape(ctx, view_v, src2); + src2->grad = ggml_add_or_set(ctx, + src2->grad, + grad_v, + zero_table, acc_table); + } + } break; + case GGML_OP_FLASH_ATTN_BACK: + { + GGML_ABORT("fatal error"); // not supported + } + case GGML_OP_SSM_CONV: + case GGML_OP_SSM_SCAN: + { + GGML_ABORT("fatal error"); // TODO: not implemented + } + case GGML_OP_WIN_PART: + case GGML_OP_WIN_UNPART: + case GGML_OP_UNARY: + { + switch (ggml_get_unary_op(tensor)) { + case GGML_UNARY_OP_ABS: + { + if (src0->grad) { + src0->grad = + ggml_add_or_set(ctx, + src0->grad, + ggml_mul(ctx, + ggml_sgn(ctx, src0), + tensor->grad), + zero_table, acc_table); + } + } break; + case GGML_UNARY_OP_SGN: + { + if (src0->grad) { + // noop + } + } break; + case GGML_UNARY_OP_NEG: + { + if (src0->grad) { + src0->grad = ggml_sub_or_set(ctx, src0->grad, tensor->grad, zero_table, acc_table); + } + } break; + case GGML_UNARY_OP_STEP: + { + if (src0->grad) { + // noop + } + } break; + case GGML_UNARY_OP_TANH: + { + GGML_ABORT("fatal error"); // TODO: not implemented + } + case GGML_UNARY_OP_ELU: + { + GGML_ABORT("fatal error"); // TODO: not implemented + } + case GGML_UNARY_OP_RELU: + { + if (src0->grad) { + src0->grad = ggml_add_or_set(ctx, + src0->grad, + ggml_mul(ctx, + ggml_step(ctx, src0), + tensor->grad), + zero_table, acc_table); + } + } break; + case GGML_UNARY_OP_SIGMOID: + { + GGML_ABORT("fatal error"); // TODO: not implemented + } + case GGML_UNARY_OP_GELU: + { + GGML_ABORT("fatal error"); // TODO: not implemented + } + case GGML_UNARY_OP_GELU_QUICK: + { + GGML_ABORT("fatal error"); // TODO: not implemented + } + case GGML_UNARY_OP_SILU: + { + // necessary for llama + if (src0->grad) { + src0->grad = ggml_add_or_set(ctx, + src0->grad, + ggml_silu_back(ctx, src0, tensor->grad), + zero_table, acc_table); + } + } break; + case GGML_UNARY_OP_EXP: + { + if (src0->grad) { + src0->grad = ggml_add_or_set(ctx, + src0->grad, + ggml_mul(ctx, tensor, tensor->grad), + zero_table, acc_table); + } + } break; + default: + GGML_ABORT("fatal error"); + } + } break; + case GGML_OP_GET_REL_POS: + case GGML_OP_ADD_REL_POS: + case GGML_OP_RWKV_WKV: + case GGML_OP_MAP_UNARY: + case GGML_OP_MAP_BINARY: + case GGML_OP_MAP_CUSTOM1_F32: + case GGML_OP_MAP_CUSTOM2_F32: + case GGML_OP_MAP_CUSTOM3_F32: + case GGML_OP_MAP_CUSTOM1: + case GGML_OP_MAP_CUSTOM2: + case GGML_OP_MAP_CUSTOM3: + { + GGML_ABORT("fatal error"); // not supported + } + case GGML_OP_CROSS_ENTROPY_LOSS: + { + if (src0->grad) { + src0->grad = ggml_add_or_set(ctx, + src0->grad, + ggml_cross_entropy_loss_back(ctx, + src0, + src1, + tensor->grad), + zero_table, acc_table); + } + GGML_ASSERT(!src1->grad && "backward pass for labels not implemented"); + } break; + case GGML_OP_CROSS_ENTROPY_LOSS_BACK: + { + GGML_ABORT("fatal error"); // not supported + } + case GGML_OP_OPT_STEP_ADAMW: + { + GGML_ABORT("fatal error"); // not supported + } + case GGML_OP_NONE: + { + // nop + } break; + case GGML_OP_COUNT: + { + GGML_ABORT("fatal error"); + } + } + + for (int i = 0; i < GGML_MAX_SRC; ++i) { + if (tensor->src[i] && tensor->src[i]->grad) { + GGML_ASSERT(ggml_are_same_shape(tensor->src[i], tensor->src[i]->grad)); + } + } +} + +static void ggml_visit_parents(struct ggml_cgraph * cgraph, struct ggml_tensor * node) { + if (node->grad == NULL) { + // this usually happens when we generate intermediate nodes from constants in the backward pass + // it can also happen during forward pass, if the user performs computations with constants + if (node->op != GGML_OP_NONE) { + //GGML_PRINT_DEBUG("%s: warning: node %p has no grad, but op %d\n", __func__, (void *) node, node->op); + } + } + + // check if already visited + if (ggml_hash_insert(&cgraph->visited_hash_set, node) == GGML_HASHSET_ALREADY_EXISTS) { + return; + } + + for (int i = 0; i < GGML_MAX_SRC; ++i) { + const int k = + (cgraph->order == GGML_CGRAPH_EVAL_ORDER_LEFT_TO_RIGHT) ? i : + (cgraph->order == GGML_CGRAPH_EVAL_ORDER_RIGHT_TO_LEFT) ? (GGML_MAX_SRC-1-i) : + /* unknown order, just fall back to using i*/ i; + if (node->src[k]) { + ggml_visit_parents(cgraph, node->src[k]); + } + } + + if (node->op == GGML_OP_NONE && !(node->flags & GGML_TENSOR_FLAG_PARAM)) { + // reached a leaf node, not part of the gradient graph (e.g. a constant) + GGML_ASSERT(cgraph->n_leafs < cgraph->size); + + if (strlen(node->name) == 0) { + ggml_format_name(node, "leaf_%d", cgraph->n_leafs); + } + + cgraph->leafs[cgraph->n_leafs] = node; + cgraph->n_leafs++; + } else { + GGML_ASSERT(cgraph->n_nodes < cgraph->size); + + if (strlen(node->name) == 0) { + ggml_format_name(node, "node_%d", cgraph->n_nodes); + } + + cgraph->nodes[cgraph->n_nodes] = node; + cgraph->n_nodes++; + } +} + +static void ggml_build_forward_impl(struct ggml_cgraph * cgraph, struct ggml_tensor * tensor, bool expand) { + if (!expand) { + // TODO: this branch isn't accessible anymore, maybe move this to ggml_build_forward_expand + ggml_graph_clear(cgraph); + } + + const int n0 = cgraph->n_nodes; + + ggml_visit_parents(cgraph, tensor); + + const int n_new = cgraph->n_nodes - n0; + GGML_PRINT_DEBUG("%s: visited %d new nodes\n", __func__, n_new); + + if (n_new > 0) { + // the last added node should always be starting point + GGML_ASSERT(cgraph->nodes[cgraph->n_nodes - 1] == tensor); + } +} + +void ggml_build_forward_expand(struct ggml_cgraph * cgraph, struct ggml_tensor * tensor) { + ggml_build_forward_impl(cgraph, tensor, true); +} + +void ggml_build_backward_expand(struct ggml_context * ctx, struct ggml_cgraph * gf, struct ggml_cgraph * gb, bool accumulate) { + GGML_ASSERT(gf->n_nodes > 0); + GGML_ASSERT(gf->grads); + + for (int i = 0; i < gf->n_nodes; ++i) { + struct ggml_tensor * node = gf->nodes[i]; + + bool needs_grad = node->flags & GGML_TENSOR_FLAG_PARAM; + bool ignore_src[GGML_MAX_SRC] = {false}; + switch (node->op) { + // gradients in node->src[0] for one reason or another have no effect on output gradients + case GGML_OP_IM2COL: // only used for its shape + case GGML_OP_IM2COL_BACK: // same as IM2COL + ignore_src[0] = true; + break; + case GGML_OP_UNARY: { + const enum ggml_unary_op uop = ggml_get_unary_op(node); + // SGN and STEP unary ops are piecewise constant + if (uop == GGML_UNARY_OP_SGN || uop == GGML_UNARY_OP_STEP) { + ignore_src[0] = true; + } + } break; + + // gradients in node->src[1] for one reason or another have no effect on output gradients + case GGML_OP_CPY: // gradients in CPY target are irrelevant + case GGML_OP_GET_ROWS: // row indices not differentiable + case GGML_OP_GET_ROWS_BACK: // same as for GET_ROWS + case GGML_OP_ROPE: // positions not differentiable + ignore_src[1] = true; + break; + + default: + break; + } + for (int j = 0; j < GGML_MAX_SRC; ++j) { + if (!node->src[j] || !node->src[j]->grad || ignore_src[j]) { + continue; + } + GGML_ASSERT(node->src[j]->type == GGML_TYPE_F32 || node->src[j]->type == GGML_TYPE_F16); + needs_grad = true; + break; + } + if (!needs_grad) { + continue; + } + + // inplace operations are currently not supported + GGML_ASSERT(!node->view_src || node->op == GGML_OP_CPY || node->op == GGML_OP_VIEW || + node->op == GGML_OP_RESHAPE || node->op == GGML_OP_PERMUTE || node->op == GGML_OP_TRANSPOSE); + + // create a new tensor with the same type and shape as the node and set it as grad + node->grad = ggml_dup_tensor(ctx, node); + } + + // keep tables of original gradients for replacement/accumulation logic + struct ggml_hash_set zero_table = ggml_hash_set_new(gf->size); + struct ggml_hash_set acc_table = ggml_hash_set_new(gf->size); + for (int i = 0; i < gf->n_nodes; i++) { + struct ggml_tensor * node = gf->nodes[i]; + + if (node->grad) { + { + const size_t insert_result = ggml_hash_insert(&zero_table, node->grad); + GGML_ASSERT(insert_result != GGML_HASHSET_FULL); + GGML_ASSERT(insert_result != GGML_HASHSET_ALREADY_EXISTS); + } + + // only gradients of trainable parameters should be accumulated + if (accumulate && (node->flags & GGML_TENSOR_FLAG_PARAM)) { + const size_t insert_result = ggml_hash_insert(&acc_table, node->grad); + GGML_ASSERT(insert_result != GGML_HASHSET_FULL); + GGML_ASSERT(insert_result != GGML_HASHSET_ALREADY_EXISTS); + } + } + } + + for (int i = gf->n_nodes - 1; i >= 0; i--) { + struct ggml_tensor * node = gf->nodes[i]; + + // inplace operations to add gradients are not created by ggml_compute_backward except for gradient accumulation + // use allocator to automatically make inplace operations + if (node->grad) { + ggml_compute_backward(ctx, node, &zero_table, &acc_table); + } + } + + for (int i = 0; i < gf->n_nodes; i++) { + struct ggml_tensor * node = gf->nodes[i]; + + if (node->flags & GGML_TENSOR_FLAG_PARAM) { + GGML_PRINT_DEBUG("%s: found root node %p\n", __func__, (void *) node); + ggml_build_forward_expand(gb, node->grad); + } + } + + ggml_hash_set_free(&zero_table); + ggml_hash_set_free(&acc_table); +} + +void ggml_build_opt_adamw( + struct ggml_context * ctx, + struct ggml_cgraph * gf, + struct ggml_cgraph * gb, + float alpha, + float beta1, + float beta2, + float eps, + float wd) { + for (int i = 0; i < gf->n_nodes; i++) { + struct ggml_tensor * node = gf->nodes[i]; + + if (node->flags & GGML_TENSOR_FLAG_PARAM) { + GGML_PRINT_DEBUG("%s: found root node %p\n", __func__, (void *) node); + struct ggml_tensor * opt_step = ggml_opt_step_adamw(ctx, node, node->grad, alpha, beta1, beta2, eps, wd); + ggml_build_forward_expand(gb, opt_step); + } + } +} + + +static void * incr_ptr_aligned(void ** p, size_t size, size_t align) { + void * ptr = *p; + ptr = (void *) GGML_PAD((uintptr_t) ptr, align); + *p = (void *) ((char *) ptr + size); + return ptr; +} + +static size_t ggml_graph_nbytes(size_t size, bool grads) { + size_t hash_size = ggml_hash_size(size * 2); + void * p = 0; + incr_ptr_aligned(&p, sizeof(struct ggml_cgraph), 1); + incr_ptr_aligned(&p, size * sizeof(struct ggml_tensor *), sizeof(struct ggml_tensor *)); // nodes + incr_ptr_aligned(&p, size * sizeof(struct ggml_tensor *), sizeof(struct ggml_tensor *)); // leafs + incr_ptr_aligned(&p, hash_size * sizeof(struct ggml_tensor *), sizeof(struct ggml_tensor *)); // hash keys + if (grads) { + incr_ptr_aligned(&p, size * sizeof(struct ggml_tensor *), sizeof(struct ggml_tensor *)); // grads + } + incr_ptr_aligned(&p, ggml_bitset_size(hash_size) * sizeof(ggml_bitset_t), sizeof(ggml_bitset_t)); + + size_t nbytes = (size_t) p; + return nbytes; +} + +size_t ggml_graph_overhead_custom(size_t size, bool grads) { + return GGML_OBJECT_SIZE + GGML_PAD(ggml_graph_nbytes(size, grads), GGML_MEM_ALIGN); +} + +size_t ggml_graph_overhead(void) { + return ggml_graph_overhead_custom(GGML_DEFAULT_GRAPH_SIZE, false); +} + +struct ggml_cgraph * ggml_new_graph_custom(struct ggml_context * ctx, size_t size, bool grads) { + const size_t obj_size = ggml_graph_nbytes(size, grads); + struct ggml_object * obj = ggml_new_object(ctx, GGML_OBJECT_TYPE_GRAPH, obj_size); + struct ggml_cgraph * cgraph = (struct ggml_cgraph *) ((char *) ctx->mem_buffer + obj->offs); + + // the size of the hash table is doubled since it needs to hold both nodes and leafs + size_t hash_size = ggml_hash_size(size * 2); + + void * p = cgraph + 1; + + struct ggml_tensor ** nodes_ptr = incr_ptr_aligned(&p, size * sizeof(struct ggml_tensor *), sizeof(struct ggml_tensor *)); + struct ggml_tensor ** leafs_ptr = incr_ptr_aligned(&p, size * sizeof(struct ggml_tensor *), sizeof(struct ggml_tensor *)); + struct ggml_tensor ** hash_keys_ptr = incr_ptr_aligned(&p, hash_size * sizeof(struct ggml_tensor *), sizeof(struct ggml_tensor *)); + struct ggml_tensor ** grads_ptr = grads ? incr_ptr_aligned(&p, size * sizeof(struct ggml_tensor *), sizeof(struct ggml_tensor *)) : NULL; + ggml_bitset_t * hash_used = incr_ptr_aligned(&p, ggml_bitset_size(hash_size) * sizeof(ggml_bitset_t), sizeof(ggml_bitset_t)); + + // check that we allocated the correct amount of memory + assert(obj_size == (size_t)((char *)p - (char *)cgraph)); + + *cgraph = (struct ggml_cgraph) { + /*.size =*/ size, + /*.n_nodes =*/ 0, + /*.n_leafs =*/ 0, + /*.nodes =*/ nodes_ptr, + /*.grads =*/ grads_ptr, + /*.leafs =*/ leafs_ptr, + /*.hash_table =*/ { hash_size, hash_used, hash_keys_ptr }, + /*.order =*/ GGML_CGRAPH_EVAL_ORDER_LEFT_TO_RIGHT, + }; + + ggml_hash_set_reset(&cgraph->visited_hash_set); + + return cgraph; +} + +struct ggml_cgraph * ggml_new_graph(struct ggml_context * ctx) { + return ggml_new_graph_custom(ctx, GGML_DEFAULT_GRAPH_SIZE, false); +} + +struct ggml_cgraph ggml_graph_view(struct ggml_cgraph * cgraph0, int i0, int i1) { + struct ggml_cgraph cgraph = { + /*.size =*/ 0, + /*.n_nodes =*/ i1 - i0, + /*.n_leafs =*/ 0, + /*.nodes =*/ cgraph0->nodes + i0, + /*.grads =*/ cgraph0->grads ? cgraph0->grads + i0 : NULL, + /*.leafs =*/ NULL, + /*.hash_table =*/ { 0, NULL, NULL }, + /*.order =*/ cgraph0->order, + }; + + return cgraph; +} + +void ggml_graph_cpy(struct ggml_cgraph * src, struct ggml_cgraph * dst) { + GGML_ASSERT(dst->size >= src->n_leafs); + GGML_ASSERT(dst->size >= src->n_nodes); + GGML_ASSERT(dst->visited_hash_set.size >= src->visited_hash_set.size); + + dst->n_leafs = src->n_leafs; + dst->n_nodes = src->n_nodes; + dst->order = src->order; + + for (int i = 0; i < src->n_leafs; ++i) { + dst->leafs[i] = src->leafs[i]; + } + + for (int i = 0; i < src->n_nodes; ++i) { + dst->nodes[i] = src->nodes[i]; + } + + if (src->grads) { + GGML_ASSERT(dst->grads != NULL); + for (int i = 0; i < src->n_nodes; ++i) { + dst->grads[i] = src->grads[i]; + } + } + + for (size_t i = 0; i < src->visited_hash_set.size; ++i) { + // copy all hashset keys (tensors) that are in use + if (ggml_bitset_get(src->visited_hash_set.used, i)) { + ggml_hash_insert(&dst->visited_hash_set, src->visited_hash_set.keys[i]); + } + } +} + +struct ggml_cgraph * ggml_graph_dup(struct ggml_context * ctx, struct ggml_cgraph * cgraph) { + struct ggml_cgraph * result = ggml_new_graph_custom(ctx, cgraph->size, cgraph->grads != NULL); + ggml_graph_cpy(cgraph, result); + return result; +} + +void ggml_graph_reset(struct ggml_cgraph * cgraph) { + GGML_ASSERT(cgraph->grads != NULL); + + for (int i = 0; i < cgraph->n_nodes; i++) { + struct ggml_tensor * node = cgraph->nodes[i]; + + // initial gradients of loss should be 1, 0 otherwise + if (node->grad) { + if (node->flags & GGML_TENSOR_FLAG_LOSS) { + GGML_ASSERT(node->grad->buffer); + GGML_ASSERT(node->type == GGML_TYPE_F32); + GGML_ASSERT(ggml_is_scalar(node)); + + const float onef = 1.0f; + ggml_backend_tensor_set(node->grad, &onef, 0, ggml_nbytes(node->grad)); + } else { + ggml_set_zero(node->grad); + } + } + + GGML_ASSERT(node); + if (node->op == GGML_OP_OPT_STEP_ADAMW) { + // set iteration to 1 and clear momenta + ggml_set_op_params_i32(node, 0, 1); + ggml_set_zero(node->src[2]); + ggml_set_zero(node->src[3]); + } + } +} + +void ggml_graph_clear(struct ggml_cgraph * cgraph) { + cgraph->n_leafs = 0; + cgraph->n_nodes = 0; + ggml_hash_set_reset(&cgraph->visited_hash_set); +} + +int ggml_graph_size(struct ggml_cgraph * cgraph) { + return cgraph->size; +} + +struct ggml_tensor * ggml_graph_node(struct ggml_cgraph * cgraph, int i) { + if (i < 0) { + GGML_ASSERT(cgraph->n_nodes + i >= 0); + return cgraph->nodes[cgraph->n_nodes + i]; + } + + GGML_ASSERT(i < cgraph->n_nodes); + return cgraph->nodes[i]; +} + +struct ggml_tensor ** ggml_graph_nodes(struct ggml_cgraph * cgraph) { + return cgraph->nodes; +} + +int ggml_graph_n_nodes(struct ggml_cgraph * cgraph) { + return cgraph->n_nodes; +} + +void ggml_graph_add_node(struct ggml_cgraph * cgraph, struct ggml_tensor * tensor) { + GGML_ASSERT(cgraph->size > cgraph->n_nodes); + cgraph->nodes[cgraph->n_nodes] = tensor; + cgraph->n_nodes++; +} + +// Android's libc implementation "bionic" does not support setting affinity +#if defined(__gnu_linux__) +static void set_numa_thread_affinity(int thread_n) { + if (!ggml_is_numa()) { + return; + } + + int node_num; + int rv; + size_t setsize = CPU_ALLOC_SIZE(g_state.numa.total_cpus); + + switch(g_state.numa.numa_strategy) { + case GGML_NUMA_STRATEGY_DISTRIBUTE: + // run thread on node_num thread_n / (threads per node) + node_num = thread_n % g_state.numa.n_nodes; + break; + case GGML_NUMA_STRATEGY_ISOLATE: + // run thread on current_node + node_num = g_state.numa.current_node; + break; + case GGML_NUMA_STRATEGY_NUMACTL: + // use the cpuset that numactl gave us + rv = pthread_setaffinity_np(pthread_self(), setsize, &g_state.numa.cpuset); + if (rv) { + fprintf(stderr, "warning: pthread_setaffinity_np() failed: %s\n",strerror(rv)); + } + return; + default: + return; + } + + struct ggml_numa_node * node = &g_state.numa.nodes[node_num]; + + cpu_set_t * cpus = CPU_ALLOC(g_state.numa.total_cpus); + CPU_ZERO_S(setsize, cpus); + for (size_t i = 0; i < node->n_cpus; ++i) { + CPU_SET_S(node->cpus[i], setsize, cpus); + } + + rv = pthread_setaffinity_np(pthread_self(), setsize, cpus); + if (rv) { + fprintf(stderr, "warning: pthread_setaffinity_np() failed: %s\n", strerror(rv)); + } + + CPU_FREE(cpus); +} + +static void clear_numa_thread_affinity(void) { + if (!ggml_is_numa()) { + return; + } + + size_t setsize = CPU_ALLOC_SIZE(g_state.numa.total_cpus); + + cpu_set_t * cpus = CPU_ALLOC(g_state.numa.total_cpus); + CPU_ZERO_S(setsize, cpus); + for (unsigned i = 0; i < g_state.numa.total_cpus; ++i) { + CPU_SET_S(i, setsize, cpus); + } + + int rv = pthread_setaffinity_np(pthread_self(), setsize, cpus); + if (rv) { + fprintf(stderr, "warning: pthread_setaffinity_np() failed: %s\n", strerror(rv)); + } + + CPU_FREE(cpus); +} +#else +// TODO: Windows etc. +// (the linux implementation may also work on BSD, someone should test) +static void set_numa_thread_affinity(int thread_n) { UNUSED(thread_n); } +static void clear_numa_thread_affinity(void) {} +#endif + +static int ggml_get_n_tasks(struct ggml_tensor * node, int n_threads) { + int n_tasks = 0; + + if (ggml_is_empty(node)) { + // no need to multi-thread a no-op + n_tasks = 1; + return n_tasks; + } + + switch (node->op) { + case GGML_OP_CPY: + case GGML_OP_DUP: + case GGML_OP_CONT: + case GGML_OP_ADD: + case GGML_OP_ADD1: + case GGML_OP_ACC: + { + n_tasks = n_threads; + } break; + case GGML_OP_SUB: + case GGML_OP_SQR: + case GGML_OP_SQRT: + case GGML_OP_LOG: + case GGML_OP_SIN: + case GGML_OP_COS: + case GGML_OP_SUM: + case GGML_OP_SUM_ROWS: + case GGML_OP_MEAN: + case GGML_OP_ARGMAX: + case GGML_OP_REPEAT: + case GGML_OP_REPEAT_BACK: + case GGML_OP_LEAKY_RELU: + { + n_tasks = 1; + } break; + case GGML_OP_UNARY: + switch (ggml_get_unary_op(node)) { + case GGML_UNARY_OP_ABS: + case GGML_UNARY_OP_SGN: + case GGML_UNARY_OP_NEG: + case GGML_UNARY_OP_STEP: + case GGML_UNARY_OP_TANH: + case GGML_UNARY_OP_ELU: + case GGML_UNARY_OP_RELU: + case GGML_UNARY_OP_SIGMOID: + case GGML_UNARY_OP_HARDSWISH: + case GGML_UNARY_OP_HARDSIGMOID: + case GGML_UNARY_OP_EXP: + { + n_tasks = 1; + } break; + + case GGML_UNARY_OP_GELU: + case GGML_UNARY_OP_GELU_QUICK: + case GGML_UNARY_OP_SILU: + { + n_tasks = n_threads; + } break; + default: + GGML_ABORT("fatal error"); + } + break; + case GGML_OP_SILU_BACK: + case GGML_OP_MUL: + case GGML_OP_DIV: + case GGML_OP_NORM: + case GGML_OP_RMS_NORM: + case GGML_OP_RMS_NORM_BACK: + case GGML_OP_GROUP_NORM: + case GGML_OP_CONCAT: + case GGML_OP_MUL_MAT: + case GGML_OP_MUL_MAT_ID: + case GGML_OP_OUT_PROD: + { + n_tasks = n_threads; + } break; + case GGML_OP_GET_ROWS: + { + // FIXME: get_rows can use additional threads, but the cost of launching additional threads + // decreases performance with GPU offloading + //n_tasks = n_threads; + n_tasks = 1; + } break; + case GGML_OP_SCALE: + case GGML_OP_SET: + case GGML_OP_RESHAPE: + case GGML_OP_VIEW: + case GGML_OP_PERMUTE: + case GGML_OP_TRANSPOSE: + case GGML_OP_GET_ROWS_BACK: + case GGML_OP_DIAG: + { + n_tasks = 1; + } break; + case GGML_OP_DIAG_MASK_ZERO: + case GGML_OP_DIAG_MASK_INF: + case GGML_OP_SOFT_MAX_BACK: + case GGML_OP_ROPE: + case GGML_OP_ROPE_BACK: + case GGML_OP_ADD_REL_POS: + { + n_tasks = n_threads; + } break; + case GGML_OP_CLAMP: + { + n_tasks = 1; //TODO + } break; + case GGML_OP_SOFT_MAX: + { + n_tasks = MIN(n_threads, ggml_nrows(node->src[0])); + } break; + case GGML_OP_IM2COL: + case GGML_OP_IM2COL_BACK: + case GGML_OP_CONV_TRANSPOSE_1D: + case GGML_OP_CONV_TRANSPOSE_2D: + { + n_tasks = n_threads; + } break; + case GGML_OP_POOL_1D: + case GGML_OP_POOL_2D: + case GGML_OP_POOL_2D_BACK: + { + n_tasks = 1; + } break; + case GGML_OP_UPSCALE: + case GGML_OP_PAD: + case GGML_OP_UNPAD: + case GGML_OP_ARANGE: + case GGML_OP_TIMESTEP_EMBEDDING: + case GGML_OP_ARGSORT: + case GGML_OP_FLASH_ATTN_EXT: + case GGML_OP_FLASH_ATTN_BACK: + case GGML_OP_SSM_CONV: + case GGML_OP_SSM_SCAN: + { + n_tasks = n_threads; + } break; + case GGML_OP_WIN_PART: + case GGML_OP_WIN_UNPART: + case GGML_OP_GET_REL_POS: + case GGML_OP_RWKV_WKV: + case GGML_OP_MAP_UNARY: + case GGML_OP_MAP_BINARY: + case GGML_OP_MAP_CUSTOM1_F32: + case GGML_OP_MAP_CUSTOM2_F32: + case GGML_OP_MAP_CUSTOM3_F32: + { + n_tasks = 1; + } break; + case GGML_OP_MAP_CUSTOM1: + { + struct ggml_map_custom1_op_params p; + memcpy(&p, node->op_params, sizeof(p)); + if (p.n_tasks == GGML_N_TASKS_MAX) { + n_tasks = n_threads; + } else { + n_tasks = MIN(p.n_tasks, n_threads); + } + } break; + case GGML_OP_MAP_CUSTOM2: + { + struct ggml_map_custom2_op_params p; + memcpy(&p, node->op_params, sizeof(p)); + if (p.n_tasks == GGML_N_TASKS_MAX) { + n_tasks = n_threads; + } else { + n_tasks = MIN(p.n_tasks, n_threads); + } + } break; + case GGML_OP_MAP_CUSTOM3: + { + struct ggml_map_custom3_op_params p; + memcpy(&p, node->op_params, sizeof(p)); + if (p.n_tasks == GGML_N_TASKS_MAX) { + n_tasks = n_threads; + } else { + n_tasks = MIN(p.n_tasks, n_threads); + } + } break; + case GGML_OP_CROSS_ENTROPY_LOSS: + case GGML_OP_CROSS_ENTROPY_LOSS_BACK: + case GGML_OP_OPT_STEP_ADAMW: + { + n_tasks = n_threads; + } break; + case GGML_OP_NONE: + { + n_tasks = 1; + } break; + case GGML_OP_COUNT: + { + GGML_ABORT("fatal error"); + } + default: + { + fprintf(stderr, "%s: op not implemented: ", __func__); + if (node->op < GGML_OP_COUNT) { + fprintf(stderr, "%s\n", ggml_op_name(node->op)); + } else { + fprintf(stderr, "%d\n", node->op); + } + GGML_ABORT("fatal error"); + } + } + + assert(n_tasks > 0); + + return n_tasks; +} + +static thread_ret_t ggml_graph_compute_secondary_thread(void* data); + +#if defined(_WIN32) +#include "windows.h" + +// TODO: support > 64 CPUs +bool ggml_thread_apply_affinity(bool * mask) { + HANDLE h = GetCurrentThread(); + uint64_t bitmask = 0ULL; + + assert(GGML_MAX_N_THREADS >= 64); + + for (int32_t i = 0; i < 8; i++) { + int32_t idx = i * 8; + uint8_t val = 0; + val |= mask[idx + 0] << 0; + val |= mask[idx + 1] << 1; + val |= mask[idx + 2] << 2; + val |= mask[idx + 3] << 3; + val |= mask[idx + 4] << 4; + val |= mask[idx + 5] << 5; + val |= mask[idx + 6] << 6; + val |= mask[idx + 7] << 7; + bitmask |= (uint64_t)val << idx; + } + + for (int32_t i = 64; i < GGML_MAX_N_THREADS; i++) { + if (mask[i]) { + fprintf(stderr, "warn: setting thread-affinity for > 64 CPUs isn't supported on windows!\n"); + break; + } + } + + DWORD_PTR m = (DWORD_PTR)bitmask; + + m = SetThreadAffinityMask(h, m); + + return m != 0; +} + +static bool ggml_thread_apply_priority(int32_t prio) { + // Note that on Windows the Process Priority Class must be updated in order to set Thread priority. + // This is up to the applications. + DWORD p = THREAD_PRIORITY_NORMAL; + switch (prio) { + case GGML_SCHED_PRIO_NORMAL: p = THREAD_PRIORITY_NORMAL; break; + case GGML_SCHED_PRIO_MEDIUM: p = THREAD_PRIORITY_ABOVE_NORMAL; break; + case GGML_SCHED_PRIO_HIGH: p = THREAD_PRIORITY_HIGHEST; break; + case GGML_SCHED_PRIO_REALTIME: p = THREAD_PRIORITY_TIME_CRITICAL; break; + } + + if (prio == GGML_SCHED_PRIO_NORMAL) { + // Keep inherited policy/priority + return true; + } + + if (!SetThreadPriority(GetCurrentThread(), p)) { + fprintf(stderr, "warn: failed to set thread priority %d : (%d)\n", prio, (int) GetLastError()); + return false; + } + + return true; +} + +#elif defined(__APPLE__) +#include +#include + +static bool ggml_thread_apply_affinity(const bool * mask) { + // Not supported on Apple platforms + UNUSED(mask); + return true; +} + +static bool ggml_thread_apply_priority(int32_t prio) { + struct sched_param p; + int32_t policy = SCHED_OTHER; + switch (prio) { + case GGML_SCHED_PRIO_NORMAL: policy = SCHED_OTHER; p.sched_priority = 0; break; + case GGML_SCHED_PRIO_MEDIUM: policy = SCHED_FIFO; p.sched_priority = 40; break; + case GGML_SCHED_PRIO_HIGH: policy = SCHED_FIFO; p.sched_priority = 80; break; + case GGML_SCHED_PRIO_REALTIME: policy = SCHED_FIFO; p.sched_priority = 90; break; + } + + if (prio == GGML_SCHED_PRIO_NORMAL) { + // Keep inherited policy/priority + return true; + } + + int32_t err = pthread_setschedparam(pthread_self(), policy, &p); + if (err != 0) { + fprintf(stderr, "warn: failed to set thread priority %d : %s (%d)\n", prio, strerror(err), err); + return false; + } + + return true; +} + +#elif defined(__gnu_linux__) +// TODO: this may not work on BSD, to be verified + +static bool ggml_thread_apply_affinity(const bool * mask) { + cpu_set_t cpuset; + int err; + + CPU_ZERO(&cpuset); + + for (uint32_t i = 0; i < GGML_MAX_N_THREADS; i++) { + if (mask[i]) { + GGML_PRINT_DEBUG("Thread %lx: adding %d to cpuset\n", pthread_self(), i); + CPU_SET(i, &cpuset); + } + } + +#ifdef __ANDROID__ + err = sched_setaffinity(0, sizeof(cpuset), &cpuset); + if (err < 0) { + err = errno; + } +#else + err = pthread_setaffinity_np(pthread_self(), sizeof(cpuset), &cpuset); +#endif + if (err != 0) { + fprintf(stderr, "warn: failed to set affinity mask 0x%llx : %s (%d)\n", (unsigned long long)mask, strerror(err), err); + return false; + } + + return true; +} + +static bool ggml_thread_apply_priority(int32_t prio) { + struct sched_param p; + int32_t policy = SCHED_OTHER; + switch (prio) { + case GGML_SCHED_PRIO_NORMAL: policy = SCHED_OTHER; p.sched_priority = 0; break; + case GGML_SCHED_PRIO_MEDIUM: policy = SCHED_FIFO; p.sched_priority = 40; break; + case GGML_SCHED_PRIO_HIGH: policy = SCHED_FIFO; p.sched_priority = 80; break; + case GGML_SCHED_PRIO_REALTIME: policy = SCHED_FIFO; p.sched_priority = 90; break; + } + + if (prio == GGML_SCHED_PRIO_NORMAL) { + // Keep inherited policy/priority + return true; + } + + int32_t err = pthread_setschedparam(pthread_self(), policy, &p); + if (err != 0) { + fprintf(stderr, "warn: failed to set thread priority %d : %s (%d)\n", prio, strerror(err), err); + return false; + } + + return true; +} + +#else // unsupported platforms + +static bool ggml_thread_apply_affinity(const bool * mask) { + UNUSED(mask); + return true; +} + +static bool ggml_thread_apply_priority(int32_t prio) { + UNUSED(prio); + return true; +} + +#endif + +static bool ggml_thread_cpumask_is_valid(const bool * mask) { + for (int i = 0; i < GGML_MAX_N_THREADS; i++) { + if (mask[i]) { return true; } + } + return false; +} + +static void ggml_thread_cpumask_next(const bool * global_mask, bool * local_mask, bool strict, int32_t* iter) { + if (!strict) { + memcpy(local_mask, global_mask, GGML_MAX_N_THREADS); + return; + } else { + memset(local_mask, 0, GGML_MAX_N_THREADS); + int32_t base_idx = *iter; + for (int32_t i = 0; i < GGML_MAX_N_THREADS; i++) { + int32_t idx = base_idx + i; + if (idx >= GGML_MAX_N_THREADS) { + // Just a cheaper modulo + idx -= GGML_MAX_N_THREADS; + } + if (global_mask[idx]) { + local_mask[idx] = 1; + *iter = idx + 1; + return; + } + } + } +} + +void ggml_threadpool_free(struct ggml_threadpool* threadpool) { + if (!threadpool) return; + +#ifndef GGML_USE_OPENMP + struct ggml_compute_state* workers = threadpool->workers; + const int n_threads = threadpool->n_threads_max; + + ggml_mutex_lock(&threadpool->mutex); + + threadpool->stop = true; + threadpool->pause = false; + + ggml_cond_broadcast(&threadpool->cond); + ggml_mutex_unlock(&threadpool->mutex); + + for (int j = 1; j < n_threads; j++) { + int32_t rc = ggml_thread_join(workers[j].thrd, NULL); + GGML_ASSERT(rc == GGML_EXIT_SUCCESS || rc == GGML_EXIT_ABORTED); + UNUSED(rc); + } + + ggml_mutex_destroy(&threadpool->mutex); + ggml_cond_destroy(&threadpool->cond); +#endif // GGML_USE_OPENMP + + GGML_ALIGNED_FREE(threadpool->workers); + GGML_ALIGNED_FREE(threadpool); +} + +#ifndef GGML_USE_OPENMP +// pause/resume must be called under mutex +static void ggml_threadpool_pause_locked(struct ggml_threadpool * threadpool) { + GGML_PRINT_DEBUG("Pausing threadpool\n"); + threadpool->pause = true; + ggml_cond_broadcast(&threadpool->cond); +} + +static void ggml_threadpool_resume_locked(struct ggml_threadpool * threadpool) { + GGML_PRINT_DEBUG("Resuming threadpool\n"); + threadpool->pause = false; + ggml_cond_broadcast(&threadpool->cond); +} +#endif + +void ggml_threadpool_pause(struct ggml_threadpool * threadpool) { +#ifndef GGML_USE_OPENMP + ggml_mutex_lock(&threadpool->mutex); + if (!threadpool->pause) { + ggml_threadpool_pause_locked(threadpool); + } + ggml_mutex_unlock(&threadpool->mutex); +#else + UNUSED(threadpool); +#endif +} + +void ggml_threadpool_resume(struct ggml_threadpool * threadpool) { +#ifndef GGML_USE_OPENMP + ggml_mutex_lock(&threadpool->mutex); + if (threadpool->pause) { + ggml_threadpool_resume_locked(threadpool); + } + ggml_mutex_unlock(&threadpool->mutex); +#else + UNUSED(threadpool); +#endif +} + +struct ggml_cplan ggml_graph_plan( + const struct ggml_cgraph * cgraph, + int n_threads, + struct ggml_threadpool * threadpool) { + + if (threadpool == NULL) { + GGML_PRINT_DEBUG("Threadpool is not specified. Will create a disposable threadpool : n_threads %d\n", n_threads); + } + if (n_threads <= 0) { + n_threads = threadpool ? threadpool->n_threads_max : GGML_DEFAULT_N_THREADS; + } + + size_t work_size = 0; + + struct ggml_cplan cplan; + memset(&cplan, 0, sizeof(struct ggml_cplan)); + + int max_tasks = 1; + + // thread scheduling for the different operations + work buffer size estimation + for (int i = 0; i < cgraph->n_nodes; i++) { + struct ggml_tensor * node = cgraph->nodes[i]; + + const int n_tasks = ggml_get_n_tasks(node, n_threads); + + max_tasks = MAX(max_tasks, n_tasks); + + size_t cur = 0; + + switch (node->op) { + case GGML_OP_CPY: + case GGML_OP_DUP: + { + if (ggml_is_quantized(node->type) || + // F16 -> BF16 and BF16 -> F16 copies go through intermediate F32 + (node->src[0]->type == GGML_TYPE_F16 && node->src[1] && node->src[1]->type == GGML_TYPE_BF16) || + (node->src[0]->type == GGML_TYPE_BF16 && node->src[1] && node->src[1]->type == GGML_TYPE_F16)) { + cur = ggml_type_size(GGML_TYPE_F32) * node->ne[0] * n_tasks; + } + } break; + case GGML_OP_ADD: + case GGML_OP_ADD1: + { + if (ggml_is_quantized(node->src[0]->type)) { + cur = ggml_type_size(GGML_TYPE_F32) * node->src[0]->ne[0] * n_tasks; + } + } break; + case GGML_OP_ACC: + { + if (ggml_is_quantized(node->src[0]->type)) { + cur = ggml_type_size(GGML_TYPE_F32) * node->src[1]->ne[0] * n_tasks; + } + } break; + case GGML_OP_MUL_MAT: + { + const enum ggml_type vec_dot_type = type_traits[node->src[0]->type].vec_dot_type; + + if (node->src[1]->type != vec_dot_type) { + cur = ggml_row_size(vec_dot_type, ggml_nelements(node->src[1])); + } + } break; + case GGML_OP_MUL_MAT_ID: + { + cur = 0; + const struct ggml_tensor * src0 = node->src[0]; + const struct ggml_tensor * src1 = node->src[1]; + const enum ggml_type vec_dot_type = type_traits[src0->type].vec_dot_type; + if (src1->type != vec_dot_type) { + cur += ggml_row_size(vec_dot_type, ggml_nelements(src1)); + } + const int n_as = src0->ne[2]; + cur += GGML_PAD(cur, sizeof(int64_t)); // align + cur += n_as * sizeof(int64_t); // matrix_row_counts + cur += n_as * src1->ne[2] * sizeof(int64_t); // matrix_rows + } break; + case GGML_OP_OUT_PROD: + { + if (ggml_is_quantized(node->src[0]->type)) { + cur = ggml_type_size(GGML_TYPE_F32) * node->src[0]->ne[0] * n_tasks; + } + } break; + case GGML_OP_SOFT_MAX: + case GGML_OP_ROPE: + { + cur = ggml_type_size(GGML_TYPE_F32) * node->ne[0] * n_tasks; + } break; + case GGML_OP_CONV_TRANSPOSE_1D: + { + GGML_ASSERT(node->src[0]->ne[3] == 1); + GGML_ASSERT(node->src[1]->ne[2] == 1); + GGML_ASSERT(node->src[1]->ne[3] == 1); + + const int64_t ne00 = node->src[0]->ne[0]; // K + const int64_t ne01 = node->src[0]->ne[1]; // Cout + const int64_t ne02 = node->src[0]->ne[2]; // Cin + + const int64_t ne10 = node->src[1]->ne[0]; // L + const int64_t ne11 = node->src[1]->ne[1]; // Cin + + if ((node->src[0]->type == GGML_TYPE_F16 || + node->src[0]->type == GGML_TYPE_BF16) && + node->src[1]->type == GGML_TYPE_F32) { + cur += sizeof(ggml_fp16_t)*ne00*ne01*ne02; + cur += sizeof(ggml_fp16_t)*ne10*ne11; + } else if (node->src[0]->type == GGML_TYPE_F32 && + node->src[1]->type == GGML_TYPE_F32) { + cur += sizeof(float)*ne00*ne01*ne02; + cur += sizeof(float)*ne10*ne11; + } else { + GGML_ABORT("fatal error"); + } + } break; + case GGML_OP_CONV_TRANSPOSE_2D: + { + const int64_t ne00 = node->src[0]->ne[0]; // W + const int64_t ne01 = node->src[0]->ne[1]; // H + const int64_t ne02 = node->src[0]->ne[2]; // Channels Out + const int64_t ne03 = node->src[0]->ne[3]; // Channels In + + const int64_t ne10 = node->src[1]->ne[0]; // W + const int64_t ne11 = node->src[1]->ne[1]; // H + const int64_t ne12 = node->src[1]->ne[2]; // Channels In + + cur += sizeof(ggml_fp16_t)*ne00*ne01*ne02*ne03; + cur += sizeof(ggml_fp16_t)*ne10*ne11*ne12; + } break; + case GGML_OP_FLASH_ATTN_EXT: + { + const int64_t ne00 = node->src[0]->ne[0]; // D + + cur = 3*sizeof(float)*ne00*n_tasks; // 3x head size/thread + } break; + case GGML_OP_FLASH_ATTN_BACK: + { + const int64_t D = node->src[0]->ne[0]; + const int64_t ne11 = ggml_up(node->src[1]->ne[1], GGML_SOFT_MAX_UNROLL); + const int64_t mxDn = MAX(D, ne11) * 2; // *2 because of S and SM in ggml_compute_forward_flash_attn_back + if (node->src[1]->type == GGML_TYPE_F32) { + cur = sizeof(float)*mxDn*n_tasks; // TODO: this can become (n_tasks-1) + cur += sizeof(float)*mxDn*n_tasks; // this is overestimated by x2 + } else if (node->src[1]->type == GGML_TYPE_F16) { + cur = sizeof(float)*mxDn*n_tasks; // TODO: this can become (n_tasks-1) + cur += sizeof(float)*mxDn*n_tasks; // this is overestimated by x2 + } else if (node->src[1]->type == GGML_TYPE_BF16) { + cur = sizeof(float)*mxDn*n_tasks; // TODO: this can become (n_tasks-1) + cur += sizeof(float)*mxDn*n_tasks; // this is overestimated by x2 + } + } break; + + case GGML_OP_CROSS_ENTROPY_LOSS: + { + cur = ggml_type_size(node->type)*(n_tasks + node->src[0]->ne[0]*n_tasks); + } break; + case GGML_OP_COUNT: + { + GGML_ABORT("fatal error"); + } + default: + break; + } + + work_size = MAX(work_size, cur); + } + + if (work_size > 0) { + work_size += CACHE_LINE_SIZE*(n_threads); + } + + cplan.threadpool = threadpool; + cplan.n_threads = MIN(max_tasks, n_threads); + cplan.work_size = work_size; + cplan.work_data = NULL; + + return cplan; +} + +static thread_ret_t ggml_graph_compute_thread(void * data) { + struct ggml_compute_state * state = (struct ggml_compute_state *) data; + struct ggml_threadpool * tp = state->threadpool; + + const struct ggml_cgraph * cgraph = tp->cgraph; + const struct ggml_cplan * cplan = tp->cplan; + + set_numa_thread_affinity(state->ith); + + struct ggml_compute_params params = { + /*.ith =*/ state->ith, + /*.nth =*/ atomic_load_explicit(&tp->n_threads_cur, memory_order_relaxed), + /*.wsize =*/ cplan->work_size, + /*.wdata =*/ cplan->work_data, + /*.threadpool=*/ tp, + }; + + for (int node_n = 0; node_n < cgraph->n_nodes && !tp->abort; node_n++) { + struct ggml_tensor * node = cgraph->nodes[node_n]; + + ggml_compute_forward(¶ms, node); + + if (state->ith == 0 && cplan->abort_callback && + cplan->abort_callback(cplan->abort_callback_data)) { + tp->abort = true; + tp->ec = GGML_STATUS_ABORTED; + } + + ggml_barrier(state->threadpool); + } + + return 0; +} + +#ifndef GGML_USE_OPENMP + +// check if thread is active +static inline bool ggml_graph_compute_thread_active(struct ggml_compute_state * state) { + struct ggml_threadpool * threadpool = state->threadpool; + int n_threads = atomic_load_explicit(&threadpool->n_threads_cur, memory_order_relaxed); + return (state->ith < n_threads); +} + +// check if thread is ready to proceed (exit from polling or sleeping) +static inline bool ggml_graph_compute_thread_ready(struct ggml_compute_state * state) { + struct ggml_threadpool * threadpool = state->threadpool; + + if (state->pending || threadpool->stop || threadpool->pause) { return true; } + + // check for new graph/work + int new_graph = atomic_load_explicit(&threadpool->n_graph, memory_order_relaxed); + if (new_graph != state->last_graph) { + state->pending = ggml_graph_compute_thread_active(state); + state->last_graph = new_graph; + } + + return state->pending; +} + +// sync thread state after polling +static inline void ggml_graph_compute_thread_sync(struct ggml_compute_state * state) { + // TSAN doesn't support standalone fence yet, we use a dummy read-modify-write instead + #ifdef GGML_TSAN_ENABLED + atomic_fetch_add_explicit(&state->threadpool->n_graph, 0, memory_order_seq_cst); + #else + atomic_thread_fence(memory_order_seq_cst); + #endif + UNUSED(state); +} + +static inline bool ggml_graph_compute_poll_for_work(struct ggml_compute_state * state) { + struct ggml_threadpool * threadpool = state->threadpool; + + // Skip polling for unused threads + if (!ggml_graph_compute_thread_active(state)) { + return state->pending; + } + + // This seems to make 0 ... 100 a decent range for polling level across modern processors. + // Perhaps, we can adjust it dynamically based on load and things. + const uint64_t n_rounds = 1024UL * 128 * threadpool->poll; + + for (uint64_t i=0; !ggml_graph_compute_thread_ready(state) && i < n_rounds; i++) { + // No new work. Keep polling. + ggml_thread_cpu_relax(); + } + + return state->pending; +} + +static inline bool ggml_graph_compute_check_for_work(struct ggml_compute_state * state) { + struct ggml_threadpool * threadpool = state->threadpool; + + if (ggml_graph_compute_poll_for_work(state)) { + ggml_graph_compute_thread_sync(state); + return state->pending; + } + + ggml_mutex_lock_shared(&threadpool->mutex); + while (!ggml_graph_compute_thread_ready(state)) { + // No new work. Wait for the signal. + GGML_PRINT_DEBUG("thread #%d waiting for work (sleeping)\n", state->ith); + ggml_cond_wait(&threadpool->cond, &threadpool->mutex); + } + ggml_mutex_unlock_shared(&threadpool->mutex); + + return state->pending; +} + +static thread_ret_t ggml_graph_compute_secondary_thread(void* data) { + struct ggml_compute_state * state = (struct ggml_compute_state *) data; + struct ggml_threadpool * threadpool = state->threadpool; + + ggml_thread_apply_priority(threadpool->prio); + if (ggml_thread_cpumask_is_valid(state->cpumask)) { + ggml_thread_apply_affinity(state->cpumask); + } + + while (true) { + // Check if we need to sleep + while (threadpool->pause) { + GGML_PRINT_DEBUG("thread #%d inside pause loop\n", state->ith); + ggml_mutex_lock_shared(&threadpool->mutex); + if (threadpool->pause) { + ggml_cond_wait(&threadpool->cond, &threadpool->mutex); + } + GGML_PRINT_DEBUG("thread #%d resuming after wait\n", state->ith); + ggml_mutex_unlock_shared(&threadpool->mutex); + } + + // This needs to be checked for after the cond_wait + if (threadpool->stop) break; + + // Check if there is new work + // The main thread is the only one that can dispatch new work + + ggml_graph_compute_check_for_work(state); + if (state->pending) { + state->pending = false; + + ggml_graph_compute_thread(state); + } + } + + return (thread_ret_t) 0; +} + +// Start processing new graph +static void ggml_graph_compute_kickoff(struct ggml_threadpool * threadpool, int n_threads) +{ + // Always take the mutex here because the worker threads are doing hybrid poll/wait + + ggml_mutex_lock(&threadpool->mutex); + + GGML_PRINT_DEBUG("threadpool: n_threads_cur %d n_threads %d\n", threadpool->n_threads_cur, n_threads); + + // Update the number of active threads + atomic_store_explicit(&threadpool->n_threads_cur, n_threads, memory_order_relaxed); + + // Indicate the graph is ready to be processed + // We need the full seq-cst fence here because of the polling threads (used in thread_sync) + atomic_fetch_add_explicit(&threadpool->n_graph, 1, memory_order_seq_cst); + + if (threadpool->pause) { + // Update main thread prio and affinity to match the threadpool settings + ggml_thread_apply_priority(threadpool->prio); + if (ggml_thread_cpumask_is_valid(threadpool->workers[0].cpumask)) { + ggml_thread_apply_affinity(threadpool->workers[0].cpumask); + } + + // resume does cond broadcast + ggml_threadpool_resume_locked(threadpool); + } else { + ggml_cond_broadcast(&threadpool->cond); + } + + ggml_mutex_unlock(&threadpool->mutex); +} + +#endif // GGML_USE_OPENMP + +void ggml_threadpool_params_init(struct ggml_threadpool_params * p, int n_threads) { + p->n_threads = n_threads; + p->prio = 0; // default priority (usually means normal or inherited) + p->poll = 50; // hybrid-polling enabled + p->strict_cpu = false; // no strict placement (all threads share same cpumask) + p->paused = false; // threads are ready to go + memset(p->cpumask, 0, GGML_MAX_N_THREADS); // all-zero means use the default affinity (usually inherited) +} + +struct ggml_threadpool_params ggml_threadpool_params_default(int n_threads) { + struct ggml_threadpool_params p; + ggml_threadpool_params_init(&p, n_threads); + return p; +} + +bool ggml_threadpool_params_match(const struct ggml_threadpool_params * p0, const struct ggml_threadpool_params * p1) { + if (p0->n_threads != p1->n_threads ) return false; + if (p0->prio != p1->prio ) return false; + if (p0->poll != p1->poll ) return false; + if (p0->strict_cpu != p1->strict_cpu ) return false; + return memcmp(p0->cpumask, p1->cpumask, GGML_MAX_N_THREADS) == 0; +} + +static struct ggml_threadpool * ggml_threadpool_new_impl( + struct ggml_threadpool_params * tpp, + struct ggml_cgraph * cgraph, + struct ggml_cplan * cplan) { + + struct ggml_threadpool * threadpool = + GGML_ALIGNED_MALLOC(sizeof(struct ggml_threadpool)); + { + threadpool->cgraph = cgraph; + threadpool->cplan = cplan; + threadpool->n_graph = 0; + threadpool->n_barrier = 0; + threadpool->n_barrier_passed = 0; + threadpool->current_chunk = 0; + threadpool->stop = false; + threadpool->pause = tpp->paused; + threadpool->abort = false; + threadpool->workers = NULL; + threadpool->n_threads_max = tpp->n_threads; + threadpool->n_threads_cur = tpp->n_threads; + threadpool->poll = tpp->poll; + threadpool->prio = tpp->prio; + threadpool->ec = GGML_STATUS_SUCCESS; + } + + // Allocate and init workers state + const size_t workers_size = sizeof(struct ggml_compute_state) * tpp->n_threads; + struct ggml_compute_state * workers = GGML_ALIGNED_MALLOC(workers_size); + + memset(workers, 0, workers_size); + for (int j = 0; j < tpp->n_threads; j++) { + workers[j].threadpool = threadpool; + workers[j].ith = j; + } + + threadpool->workers = workers; + +#ifndef GGML_USE_OPENMP + ggml_mutex_init(&threadpool->mutex); + ggml_cond_init(&threadpool->cond); + + // Spin the threads for all workers, and update CPU placements. + // Place the main thread last (towards the higher numbered CPU cores). + + int32_t cpumask_iter = 0; + + for (int j = 1; j < tpp->n_threads; j++) { + ggml_thread_cpumask_next(tpp->cpumask, workers[j].cpumask, tpp->strict_cpu, &cpumask_iter); + + int32_t rc = ggml_thread_create(&workers[j].thrd, NULL, ggml_graph_compute_secondary_thread, &workers[j]); + GGML_ASSERT(rc == 0); + } + + ggml_thread_cpumask_next(tpp->cpumask, workers[0].cpumask, tpp->strict_cpu, &cpumask_iter); + + if (!threadpool->pause) { + // Update main thread prio and affinity at the start, otherwise we'll do it in resume + ggml_thread_apply_priority(threadpool->prio); + if (ggml_thread_cpumask_is_valid(threadpool->workers[0].cpumask)) { + ggml_thread_apply_affinity(threadpool->workers[0].cpumask); + } + } +#endif // GGML_USE_OPENMP + + return threadpool; +} + +struct ggml_threadpool * ggml_threadpool_new(struct ggml_threadpool_params * tpp) { + return ggml_threadpool_new_impl(tpp, NULL, NULL); +} + +enum ggml_status ggml_graph_compute(struct ggml_cgraph * cgraph, struct ggml_cplan * cplan) { + GGML_ASSERT(cplan); + GGML_ASSERT(cplan->n_threads > 0); + GGML_ASSERT(cplan->work_size == 0 || cplan->work_data != NULL); + + int n_threads = cplan->n_threads; + struct ggml_threadpool * threadpool = cplan->threadpool; + + bool disposable_threadpool = false; + + if (threadpool == NULL) { + GGML_PRINT_DEBUG("Threadpool is not specified. Will create a disposable threadpool : n_threads %d\n", n_threads); + disposable_threadpool = true; + + struct ggml_threadpool_params ttp = ggml_threadpool_params_default(n_threads); + threadpool = ggml_threadpool_new_impl(&ttp, cgraph, cplan); + } else { + // Reset some of the parameters that need resetting + // No worker threads should be accessing the parameters below at this stage + threadpool->cgraph = cgraph; + threadpool->cplan = cplan; + threadpool->current_chunk = 0; + threadpool->abort = false; + threadpool->ec = GGML_STATUS_SUCCESS; + } + +#ifdef GGML_USE_OPENMP + if (n_threads > 1) { + #pragma omp parallel num_threads(n_threads) + { + #pragma omp single + { + // update the number of threads from the actual number of threads that we got from OpenMP + n_threads = omp_get_num_threads(); + atomic_store_explicit(&threadpool->n_threads_cur, n_threads, memory_order_relaxed); + } + + ggml_graph_compute_thread(&threadpool->workers[omp_get_thread_num()]); + } + } else { + atomic_store_explicit(&threadpool->n_threads_cur, 1, memory_order_relaxed); + ggml_graph_compute_thread(&threadpool->workers[0]); + } +#else + if (n_threads > threadpool->n_threads_max) { + GGML_PRINT("WARNING: cplan requested more threads (%d) than available (%d)\n", n_threads, threadpool->n_threads_max); + n_threads = threadpool->n_threads_max; + } + + // Kick all threads to start the new graph + ggml_graph_compute_kickoff(threadpool, n_threads); + + // This is a work thread too + ggml_graph_compute_thread(&threadpool->workers[0]); +#endif + + // don't leave affinity set on the main thread + clear_numa_thread_affinity(); + + enum ggml_status ret = threadpool->ec; + + if (disposable_threadpool) { + ggml_threadpool_free(threadpool); + } + + return ret; +} + +enum ggml_status ggml_graph_compute_with_ctx(struct ggml_context * ctx, struct ggml_cgraph * cgraph, int n_threads) { + struct ggml_cplan cplan = ggml_graph_plan(cgraph, n_threads, NULL); + + struct ggml_object * obj = ggml_new_object(ctx, GGML_OBJECT_TYPE_WORK_BUFFER, cplan.work_size); + + cplan.work_data = (uint8_t *)ctx->mem_buffer + obj->offs; + + return ggml_graph_compute(cgraph, &cplan); +} + +struct ggml_tensor * ggml_graph_get_tensor(struct ggml_cgraph * cgraph, const char * name) { + for (int i = 0; i < cgraph->n_leafs; i++) { + struct ggml_tensor * leaf = cgraph->leafs[i]; + + if (strcmp(leaf->name, name) == 0) { + return leaf; + } + } + + for (int i = 0; i < cgraph->n_nodes; i++) { + struct ggml_tensor * node = cgraph->nodes[i]; + + if (strcmp(node->name, name) == 0) { + return node; + } + } + + return NULL; +} + +static void ggml_graph_export_leaf(const struct ggml_tensor * tensor, FILE * fout) { + const int64_t * ne = tensor->ne; + const size_t * nb = tensor->nb; + + fprintf(fout, "%-6s %-12s %8d %" PRId64 " %" PRId64 " %" PRId64 " %" PRId64 " %16zu %16zu %16zu %16zu %16p %32s\n", + ggml_type_name(tensor->type), + ggml_op_name (tensor->op), + ggml_n_dims(tensor), + ne[0], ne[1], ne[2], ne[3], + nb[0], nb[1], nb[2], nb[3], + tensor->data, + tensor->name); +} + +static void ggml_graph_export_node(const struct ggml_tensor * tensor, const char * arg, FILE * fout) { + const int64_t * ne = tensor->ne; + const size_t * nb = tensor->nb; + + fprintf(fout, "%-6s %-6s %-12s %8d %" PRId64 " %" PRId64 " %" PRId64 " %" PRId64 " %16zu %16zu %16zu %16zu %16p %32s\n", + arg, + ggml_type_name(tensor->type), + ggml_op_name (tensor->op), + ggml_n_dims(tensor), + ne[0], ne[1], ne[2], ne[3], + nb[0], nb[1], nb[2], nb[3], + tensor->data, + tensor->name); +} + +void ggml_graph_export(const struct ggml_cgraph * cgraph, const char * fname) { + uint64_t size_eval = 0; + + // compute size of intermediate results + // TODO: does not take into account scratch buffers !!!! + for (int i = 0; i < cgraph->n_nodes; ++i) { + size_eval += ggml_nbytes_pad(cgraph->nodes[i]); + } + + // print + { + FILE * fout = stdout; + + fprintf(fout, "\n"); + fprintf(fout, "%-16s %8x\n", "magic", GGML_FILE_MAGIC); + fprintf(fout, "%-16s %8d\n", "version", GGML_FILE_VERSION); + fprintf(fout, "%-16s %8d\n", "leafs", cgraph->n_leafs); + fprintf(fout, "%-16s %8d\n", "nodes", cgraph->n_nodes); + fprintf(fout, "%-16s %" PRIu64 "\n", "eval", size_eval); + + // header + fprintf(fout, "\n"); + fprintf(fout, "%-6s %-12s %8s %8s %8s %8s %8s %16s %16s %16s %16s %16s %16s\n", + "TYPE", "OP", "NDIMS", "NE0", "NE1", "NE2", "NE3", "NB0", "NB1", "NB2", "NB3", "DATA", "NAME"); + + for (int i = 0; i < cgraph->n_leafs; ++i) { + ggml_graph_export_leaf(cgraph->leafs[i], fout); + + GGML_ASSERT(cgraph->leafs[i]->op == GGML_OP_NONE); + GGML_ASSERT(cgraph->leafs[i]->src[0] == NULL); + GGML_ASSERT(cgraph->leafs[i]->src[1] == NULL); + } + + // header + fprintf(fout, "\n"); + fprintf(fout, "%-6s %-6s %-12s %8s %8s %8s %8s %8s %16s %16s %16s %16s %8s %16s %16s\n", + "ARG", "TYPE", "OP", "NDIMS", "NE0", "NE1", "NE2", "NE3", "NB0", "NB1", "NB2", "NB3", "NTASKS", "DATA", "NAME"); + + for (int i = 0; i < cgraph->n_nodes; ++i) { + ggml_graph_export_node(cgraph->nodes[i], "DST", fout); + + for (int j = 0; j < GGML_MAX_SRC; ++j) { + if (cgraph->nodes[i]->src[j]) { + ggml_graph_export_node(cgraph->nodes[i]->src[j], "SRC", fout); + } + } + + fprintf(fout, "\n"); + } + + fprintf(fout, "\n"); + } + + // write binary data + { + FILE * fout = ggml_fopen(fname, "wb"); + + if (!fout) { + fprintf(stderr, "%s: failed to open %s: %s\n", __func__, fname, strerror(errno)); + return; + } + + // header + { + const uint32_t magic = GGML_FILE_MAGIC; + const uint32_t version = GGML_FILE_VERSION; + const uint32_t n_leafs = cgraph->n_leafs; + const uint32_t n_nodes = cgraph->n_nodes; + + fwrite(&magic, sizeof(uint32_t), 1, fout); + fwrite(&version, sizeof(uint32_t), 1, fout); + fwrite(&n_leafs, sizeof(uint32_t), 1, fout); + fwrite(&n_nodes, sizeof(uint32_t), 1, fout); + fwrite(&size_eval, sizeof(uint64_t), 1, fout); + } + + // leafs + { + for (int i = 0; i < cgraph->n_leafs; ++i) { + const struct ggml_tensor * tensor = cgraph->leafs[i]; + + const uint32_t type = tensor->type; + const uint32_t op = tensor->op; + const int32_t flags = tensor->flags; + + fwrite(&type, sizeof(uint32_t), 1, fout); + fwrite(&op, sizeof(uint32_t), 1, fout); + fwrite(&flags, sizeof(int32_t), 1, fout); + + for (int j = 0; j < GGML_MAX_DIMS; ++j) { + const uint64_t ne = tensor->ne[j]; + const uint64_t nb = tensor->nb[j]; + + fwrite(&ne, sizeof(uint64_t), 1, fout); + fwrite(&nb, sizeof(uint64_t), 1, fout); + } + + fwrite(tensor->name, sizeof(char), GGML_MAX_NAME, fout); + fwrite(tensor->op_params, sizeof(char), GGML_MAX_OP_PARAMS, fout); + + // dump the data + // TODO: pad this to 32 byte boundary + { + const size_t size = ggml_nbytes(tensor); + + fwrite(tensor->data, sizeof(char), size, fout); + } + } + } + + // nodes + { + for (int i = 0; i < cgraph->n_nodes; ++i) { + const struct ggml_tensor * tensor = cgraph->nodes[i]; + + const uint32_t type = tensor->type; + const uint32_t op = tensor->op; + const int32_t flags = tensor->flags; + + fwrite(&type, sizeof(uint32_t), 1, fout); + fwrite(&op, sizeof(uint32_t), 1, fout); + fwrite(&flags, sizeof(int32_t), 1, fout); + + for (int j = 0; j < GGML_MAX_DIMS; ++j) { + const uint64_t ne = tensor->ne[j]; + const uint64_t nb = tensor->nb[j]; + + fwrite(&ne, sizeof(uint64_t), 1, fout); + fwrite(&nb, sizeof(uint64_t), 1, fout); + } + + fwrite(tensor->name, sizeof(char), GGML_MAX_NAME, fout); + fwrite(tensor->op_params, sizeof(char), GGML_MAX_OP_PARAMS, fout); + + // output the op arguments + { + struct ggml_tensor * args[GGML_MAX_SRC] = { NULL }; + + for (int j = 0; j < GGML_MAX_SRC; ++j) { + args[j] = tensor->src[j]; + } + + for (int j = 0; j < GGML_MAX_SRC; ++j) { + if (args[j]) { + int32_t idx = -1; + + // check if leaf + { + for (int k = 0; k < cgraph->n_leafs; ++k) { + if (args[j] == cgraph->leafs[k]) { + idx = k; + break; + } + } + } + + // check if node + if (idx == -1) { + for (int k = 0; k < cgraph->n_nodes; ++k) { + if (args[j] == cgraph->nodes[k]) { + idx = cgraph->n_leafs + k; + break; + } + } + } + + if (idx == -1) { + fprintf(stderr, "%s: failed to find tensor, arg = %d, node = %d\n", __func__, j, i); + fclose(fout); + return; + } + + fwrite(&idx, sizeof(int32_t), 1, fout); + } else { + const int32_t nul = -1; + + fwrite(&nul, sizeof(int32_t), 1, fout); + } + } + } + + // dump the data + // TODO: pad this to 32 byte boundary + if ((flags & GGML_TENSOR_FLAG_PARAM)) { + const size_t size = ggml_nbytes(tensor); + + fwrite(tensor->data, sizeof(char), size, fout); + } + } + } + + fclose(fout); + } +} + +struct ggml_cgraph * ggml_graph_import(const char * fname, struct ggml_context ** ctx_data, struct ggml_context ** ctx_eval) { + assert(*ctx_data == NULL); + assert(*ctx_eval == NULL); + + struct ggml_cgraph * result = NULL; + + struct ggml_tensor * data = NULL; + + // read file into data + { + FILE * fin = ggml_fopen(fname, "rb"); + if (!fin) { + fprintf(stderr, "%s: failed to open %s: %s\n", __func__, fname, strerror(errno)); + return result; + } + + size_t fsize = 0; + + fseek(fin, 0, SEEK_END); + fsize = ftell(fin); + fseek(fin, 0, SEEK_SET); + + // create the data context + { + const size_t overhead = 1*ggml_tensor_overhead(); + + struct ggml_init_params params = { + .mem_size = fsize + overhead, + .mem_buffer = NULL, + .no_alloc = false, + }; + + *ctx_data = ggml_init(params); + + if (!*ctx_data) { + fprintf(stderr, "%s: failed to create ggml context\n", __func__); + fclose(fin); + return result; + } + } + + data = ggml_new_tensor_1d(*ctx_data, GGML_TYPE_I8, fsize); + + { + const size_t ret = fread(data->data, sizeof(char), fsize, fin); + if (ret != fsize) { + fprintf(stderr, "%s: failed to read %s\n", __func__, fname); + fclose(fin); + return result; + } + } + + fclose(fin); + } + + // populate result + { + char * ptr = (char *) data->data; + + const uint32_t magic = *(const uint32_t *) ptr; ptr += sizeof(magic); + + if (magic != GGML_FILE_MAGIC) { + fprintf(stderr, "%s: invalid magic number, got %08x\n", __func__, magic); + return result; + } + + const uint32_t version = *(const uint32_t *) ptr; ptr += sizeof(version); + + if (version != GGML_FILE_VERSION) { + fprintf(stderr, "%s: invalid version number\n", __func__); + return result; + } + + const uint32_t n_leafs = *(const uint32_t *) ptr; ptr += sizeof(n_leafs); + const uint32_t n_nodes = *(const uint32_t *) ptr; ptr += sizeof(n_nodes); + const uint64_t size_eval = *(const uint64_t *) ptr; ptr += sizeof(size_eval); + const int graph_size = MAX(n_leafs, n_nodes); + + // create the data context + { + const size_t overhead = (n_leafs + n_nodes)*ggml_tensor_overhead() + ggml_graph_overhead_custom(graph_size, false); + + struct ggml_init_params params = { + .mem_size = size_eval + overhead, + .mem_buffer = NULL, + .no_alloc = true, + }; + + *ctx_eval = ggml_init(params); + + if (!*ctx_eval) { + fprintf(stderr, "%s: failed to create ggml context\n", __func__); + return result; + } + } + + result = ggml_new_graph_custom(*ctx_eval, graph_size, false); + + result->n_leafs = n_leafs; + result->n_nodes = n_nodes; + + + // leafs + { + uint32_t type; + uint32_t op; + int32_t flags; + + for (uint32_t i = 0; i < n_leafs; ++i) { + type = *(const uint32_t *) ptr; ptr += sizeof(type); + op = *(const uint32_t *) ptr; ptr += sizeof(op); + flags = *(const int32_t *) ptr; ptr += sizeof(flags); + + int64_t ne[GGML_MAX_DIMS]; + size_t nb[GGML_MAX_DIMS]; + + for (int j = 0; j < GGML_MAX_DIMS; ++j) { + uint64_t ne_cur; + uint64_t nb_cur; + + ne_cur = *(const uint64_t *) ptr; ptr += sizeof(ne_cur); + nb_cur = *(const uint64_t *) ptr; ptr += sizeof(nb_cur); + + ne[j] = ne_cur; + nb[j] = nb_cur; + } + + struct ggml_tensor * tensor = ggml_new_tensor(*ctx_eval, (enum ggml_type) type, GGML_MAX_DIMS, ne); + + tensor->op = (enum ggml_op) op; + tensor->flags = flags; + + memcpy(tensor->name, ptr, GGML_MAX_NAME); ptr += GGML_MAX_NAME; + memcpy(tensor->op_params, ptr, GGML_MAX_OP_PARAMS); ptr += GGML_MAX_OP_PARAMS; + + for (int j = 0; j < GGML_MAX_DIMS; ++j) { + tensor->nb[j] = nb[j]; + } + + tensor->data = (void *) ptr; ptr += ggml_nbytes(tensor); + + result->leafs[i] = tensor; + + fprintf(stderr, "%s: loaded leaf %u: '%16s', %9zu bytes\n", __func__, i, tensor->name, ggml_nbytes(tensor)); + } + } + + ggml_set_no_alloc(*ctx_eval, false); + + // nodes + { + uint32_t type; + uint32_t op; + int32_t flags; + + for (uint32_t i = 0; i < n_nodes; ++i) { + type = *(const uint32_t *) ptr; ptr += sizeof(type); + op = *(const uint32_t *) ptr; ptr += sizeof(op); + flags = *(const int32_t *) ptr; ptr += sizeof(flags); + + enum ggml_op eop = (enum ggml_op) op; + + int64_t ne[GGML_MAX_DIMS]; + size_t nb[GGML_MAX_DIMS]; + + for (int j = 0; j < GGML_MAX_DIMS; ++j) { + uint64_t ne_cur; + uint64_t nb_cur; + + ne_cur = *(const uint64_t *) ptr; ptr += sizeof(ne_cur); + nb_cur = *(const uint64_t *) ptr; ptr += sizeof(nb_cur); + + ne[j] = ne_cur; + nb[j] = nb_cur; + } + + const char * ptr_name = ptr; ptr += GGML_MAX_NAME; + const char * ptr_op_params = ptr; ptr += GGML_MAX_OP_PARAMS; + + const int32_t * ptr_arg_idx = (const int32_t *) ptr; ptr += GGML_MAX_SRC*sizeof(int32_t); + + struct ggml_tensor * args[GGML_MAX_SRC] = { NULL }; + + // parse args + for (int j = 0; j < GGML_MAX_SRC; ++j) { + const int32_t arg_idx = ptr_arg_idx[j]; + + if (arg_idx == -1) { + continue; + } + + if (arg_idx < result->n_leafs) { + args[j] = result->leafs[arg_idx]; + } else { + args[j] = result->nodes[arg_idx - result->n_leafs]; + } + } + + // create the tensor + // "view" operations are handled differently + // TODO: handle inplace ops - currently a copy is always made + + struct ggml_tensor * tensor = NULL; + + switch (eop) { + // TODO: implement other view ops + case GGML_OP_RESHAPE: + { + tensor = ggml_reshape_4d(*ctx_eval, args[0], ne[0], ne[1], ne[2], ne[3]); + } break; + case GGML_OP_VIEW: + { + tensor = ggml_view_4d(*ctx_eval, args[0], ne[0], ne[1], ne[2], ne[3], 0, 0, 0, 0); + + size_t offs; + memcpy(&offs, ptr_op_params, sizeof(offs)); + + tensor->data = ((char *) tensor->data) + offs; + } break; + case GGML_OP_TRANSPOSE: + { + tensor = ggml_transpose(*ctx_eval, args[0]); + } break; + case GGML_OP_PERMUTE: + { + tensor = ggml_view_4d(*ctx_eval, args[0], ne[0], ne[1], ne[2], ne[3], 0, 0, 0, 0); + } break; + default: + { + tensor = ggml_new_tensor(*ctx_eval, (enum ggml_type) type, GGML_MAX_DIMS, ne); + + tensor->op = eop; + } break; + } + + memcpy(tensor->name, ptr_name, GGML_MAX_NAME); + memcpy(tensor->op_params, ptr_op_params, GGML_MAX_OP_PARAMS); + + for (int j = 0; j < GGML_MAX_DIMS; ++j) { + tensor->nb[j] = nb[j]; + } + + for (int j = 0; j < GGML_MAX_SRC; ++j) { + tensor->src[j] = args[j]; + } + + result->nodes[i] = tensor; + + // TODO tensor data is be duplicated due to ggml_new_tensor call above + if (flags & GGML_TENSOR_FLAG_PARAM) { + tensor->data = (void *) ptr; ptr += ggml_nbytes(tensor); + } + + fprintf(stderr, "%s: loaded node %u: '%16s', %9zu bytes\n", __func__, i, tensor->name, ggml_nbytes(tensor)); + } + } + } + + return result; +} + +void ggml_graph_print(const struct ggml_cgraph * cgraph) { + GGML_PRINT("=== GRAPH ===\n"); + + GGML_PRINT("n_nodes = %d\n", cgraph->n_nodes); + for (int i = 0; i < cgraph->n_nodes; i++) { + struct ggml_tensor * node = cgraph->nodes[i]; + + GGML_PRINT(" - %3d: [ %5" PRId64 ", %5" PRId64 ", %5" PRId64 "] %16s %s\n", + i, + node->ne[0], node->ne[1], node->ne[2], + ggml_op_name(node->op), (node->flags & GGML_TENSOR_FLAG_PARAM) ? "x" : node->grad ? "g" : " "); + } + + GGML_PRINT("n_leafs = %d\n", cgraph->n_leafs); + for (int i = 0; i < cgraph->n_leafs; i++) { + struct ggml_tensor * node = cgraph->leafs[i]; + + GGML_PRINT(" - %3d: [ %5" PRId64 ", %5" PRId64 "] %8s %16s\n", + i, + node->ne[0], node->ne[1], + ggml_op_name(node->op), + ggml_get_name(node)); + } + + GGML_PRINT("========================================\n"); +} + +// check if node is part of the graph +static bool ggml_graph_find(const struct ggml_cgraph * cgraph, const struct ggml_tensor * node) { + if (cgraph == NULL) { + return true; + } + + for (int i = 0; i < cgraph->n_nodes; i++) { + if (cgraph->nodes[i] == node) { + return true; + } + } + + return false; +} + +static struct ggml_tensor * ggml_graph_get_parent(const struct ggml_cgraph * cgraph, const struct ggml_tensor * node) { + for (int i = 0; i < cgraph->n_nodes; i++) { + struct ggml_tensor * parent = cgraph->nodes[i]; + + if (parent->grad == node) { + return parent; + } + } + + return NULL; +} + +static void ggml_graph_dump_dot_node_edge(FILE * fp, const struct ggml_cgraph * gb, struct ggml_tensor * node, struct ggml_tensor * parent, const char * label) { + struct ggml_tensor * gparent = ggml_graph_get_parent(gb, node); + struct ggml_tensor * gparent0 = ggml_graph_get_parent(gb, parent); + fprintf(fp, " \"%p\":%s -> \"%p\":%s [ arrowhead = %s; style = %s; label = \"%s\"; ]\n", + gparent0 ? (void *) gparent0 : (void *) parent, + gparent0 ? "g" : "x", + gparent ? (void *) gparent : (void *) node, + gparent ? "g" : "x", + gparent ? "empty" : "vee", + gparent ? "dashed" : "solid", + label); +} + +static void ggml_graph_dump_dot_leaf_edge(FILE * fp, struct ggml_tensor * node, struct ggml_tensor * parent, const char * label) { + fprintf(fp, " \"%p\":%s -> \"%p\":%s [ label = \"%s\"; ]\n", + (void *) parent, "x", + (void *) node, "x", + label); +} + +void ggml_graph_dump_dot(const struct ggml_cgraph * gb, const struct ggml_cgraph * gf, const char * filename) { + char color[16]; + + FILE * fp = ggml_fopen(filename, "w"); + GGML_ASSERT(fp); + + fprintf(fp, "digraph G {\n"); + fprintf(fp, " newrank = true;\n"); + fprintf(fp, " rankdir = TB;\n"); + + for (int i = 0; i < gb->n_nodes; i++) { + struct ggml_tensor * node = gb->nodes[i]; + + if (ggml_graph_get_parent(gb, node) != NULL) { + continue; + } + + if (node->flags & GGML_TENSOR_FLAG_PARAM) { + snprintf(color, sizeof(color), "yellow"); + } else if (node->grad) { + if (ggml_graph_find(gf, node)) { + snprintf(color, sizeof(color), "green"); + } else { + snprintf(color, sizeof(color), "lightblue"); + } + } else { + snprintf(color, sizeof(color), "white"); + } + + fprintf(fp, " \"%p\" [ " + "style = filled; fillcolor = %s; shape = record; " + "label=\"", + (void *) node, color); + + if (strlen(node->name) > 0) { + fprintf(fp, "%s (%s)|", node->name, ggml_type_name(node->type)); + } else { + fprintf(fp, "(%s)|", ggml_type_name(node->type)); + } + + if (ggml_is_matrix(node)) { + fprintf(fp, "%d [%" PRId64 ", %" PRId64 "] | %s", i, node->ne[0], node->ne[1], ggml_op_symbol(node->op)); + } else { + fprintf(fp, "%d [%" PRId64 ", %" PRId64 ", %" PRId64 "] | %s", i, node->ne[0], node->ne[1], node->ne[2], ggml_op_symbol(node->op)); + } + + if (node->grad) { + fprintf(fp, " | %s\"; ]\n", ggml_op_symbol(node->grad->op)); + } else { + fprintf(fp, "\"; ]\n"); + } + } + + for (int i = 0; i < gb->n_leafs; i++) { + struct ggml_tensor * node = gb->leafs[i]; + + snprintf(color, sizeof(color), "pink"); + + fprintf(fp, " \"%p\" [ " + "style = filled; fillcolor = %s; shape = record; " + "label=\"", + (void *) node, color); + + if (strlen(node->name) > 0) { + fprintf(fp, "%s (%s)|", node->name, ggml_type_name(node->type)); + } else { + fprintf(fp, "(%s)|", ggml_type_name(node->type)); + } + + fprintf(fp, "CONST %d [%" PRId64 ", %" PRId64 "]", i, node->ne[0], node->ne[1]); + if (ggml_nelements(node) < 5 && node->data != NULL) { + fprintf(fp, " | ("); + for (int j = 0; j < ggml_nelements(node); j++) { + if (node->type == GGML_TYPE_I8 || node->type == GGML_TYPE_I16 || node->type == GGML_TYPE_I32) { + fprintf(fp, "%d", ggml_get_i32_1d(node, j)); + } + else if (node->type == GGML_TYPE_F32 || + node->type == GGML_TYPE_F16 || + node->type == GGML_TYPE_BF16) { + fprintf(fp, "%.1e", (double)ggml_get_f32_1d(node, j)); + } + else { + fprintf(fp, "#"); + } + if (j < ggml_nelements(node) - 1) { + fprintf(fp, ", "); + } + } + fprintf(fp, ")"); + } + fprintf(fp, "\"; ]\n"); + } + + for (int i = 0; i < gb->n_nodes; i++) { + struct ggml_tensor * node = gb->nodes[i]; + + for (int j = 0; j < GGML_MAX_SRC; j++) { + if (node->src[j]) { + char label[16]; + snprintf(label, sizeof(label), "src %d", j); + ggml_graph_dump_dot_node_edge(fp, gb, node, node->src[j], label); + } + } + } + + for (int i = 0; i < gb->n_leafs; i++) { + struct ggml_tensor * node = gb->leafs[i]; + + for (int j = 0; j < GGML_MAX_SRC; j++) { + if (node->src[j]) { + char label[16]; + snprintf(label, sizeof(label), "src %d", j); + ggml_graph_dump_dot_leaf_edge(fp, node, node->src[j], label); + } + } + } + + fprintf(fp, "}\n"); + + fclose(fp); + + GGML_PRINT("%s: dot -Tpng %s -o %s.png && open %s.png\n", __func__, filename, filename, filename); +} + +//////////////////////////////////////////////////////////////////////////////// + +static void ggml_opt_set_params(int np, struct ggml_tensor * const ps[], const float * x) { + int i = 0; + for (int p = 0; p < np; ++p) { + const int64_t ne = ggml_nelements(ps[p]) ; + // TODO: add function to set tensor from array + for (int64_t j = 0; j < ne; ++j) { + ggml_set_f32_1d(ps[p], j, x[i++]); + } + } +} + +static void ggml_opt_get_params(int np, struct ggml_tensor * const ps[], float * x) { + int i = 0; + for (int p = 0; p < np; ++p) { + const int64_t ne = ggml_nelements(ps[p]) ; + // TODO: add function to get all elements at once + for (int64_t j = 0; j < ne; ++j) { + x[i++] = ggml_get_f32_1d(ps[p], j); + } + } +} + +static void ggml_opt_get_grad(int np, struct ggml_tensor * const ps[], float * g) { + int64_t i = 0; + for (int p = 0; p < np; ++p) { + const int64_t ne = ggml_nelements(ps[p]) ; + // TODO: add function to get all elements at once + for (int64_t j = 0; j < ne; ++j) { + g[i++] = ggml_get_f32_1d(ps[p]->grad, j); + } + } +} + +static void ggml_opt_acc_grad(int np, struct ggml_tensor * const ps[], float * g, float scale) { + int64_t i = 0; + for (int p = 0; p < np; ++p) { + const int64_t ne = ggml_nelements(ps[p]) ; + // TODO: add function to get all elements at once + for (int64_t j = 0; j < ne; ++j) { + g[i++] += ggml_get_f32_1d(ps[p]->grad, j) * scale; + } + } +} + +// +// Using AdamW - ref: https://arxiv.org/pdf/1711.05101v3.pdf +// +// (Original Adam - ref: https://arxiv.org/pdf/1412.6980.pdf) +// + +static enum ggml_opt_result ggml_opt_adam( + struct ggml_context * ctx, + struct ggml_opt_context * opt, + struct ggml_opt_params params, + struct ggml_tensor * f, + struct ggml_cgraph * gf, + struct ggml_cgraph * gb, + ggml_opt_callback callback, + void * callback_data) { + GGML_ASSERT(ggml_is_scalar(f)); + GGML_ASSERT(f->type == GGML_TYPE_F32); + + // these will store the parameters we want to optimize + struct ggml_tensor * ps[GGML_MAX_PARAMS]; + + int np = 0; + int64_t nx = 0; + for (int i = 0; i < gf->n_nodes; ++i) { + if (gf->nodes[i]->flags & GGML_TENSOR_FLAG_PARAM) { + GGML_PRINT_DEBUG("found param %d: grad->op = %d\n", np, gf->nodes[i]->grad->op); + + GGML_ASSERT(np < GGML_MAX_PARAMS); + + ps[np++] = gf->nodes[i]; + nx += ggml_nelements(gf->nodes[i]); + } + } + + if ((opt->params.type != params.type) || (opt->nx != nx) || (opt->params.past != params.past)) { + int iter = opt->iter; + ggml_opt_init(opt->ctx, opt, params, nx); + opt->iter = iter; + } + + // constants + float sched = params.adam.sched; + const float alpha = params.adam.alpha; + const float decay = params.adam.decay * alpha; + const float beta1 = params.adam.beta1; + const float beta2 = params.adam.beta2; + const float eps = params.adam.eps; + const float gclip = params.adam.gclip; + const int decay_min_ndim = params.adam.decay_min_ndim; + const int n_accum = MAX(1, params.n_gradient_accumulation); + const float accum_norm = 1.0f / (float) n_accum; + + float * g = opt->adam.g->data; // gradients + float * m = opt->adam.m->data; // first moment + float * v = opt->adam.v->data; // second moment + + float * pf = params.past > 0 ? opt->adam.pf->data : NULL; // past function values + + struct ggml_cplan cplan = ggml_graph_plan(gb, params.n_threads, NULL); + struct ggml_object * obj = ggml_new_object(ctx, GGML_OBJECT_TYPE_WORK_BUFFER, cplan.work_size); + cplan.work_data = (uint8_t *)ctx->mem_buffer + obj->offs; + + bool cancel = false; + + // compute the function value + float fx = 0; + ggml_set_zero(opt->adam.g); + for (int accum_step = 0; accum_step < n_accum; ++accum_step) { + if (callback) { + callback(callback_data, accum_step, &sched, &cancel); + if (cancel) { + return GGML_OPT_RESULT_CANCEL; + } + } + // ggml_graph_reset (gf); + ggml_set_f32 (f->grad, 1.0f); + ggml_graph_compute(gb, &cplan); + ggml_opt_acc_grad(np, ps, g, accum_norm); + fx += ggml_get_f32_1d(f, 0); + } + fx *= accum_norm; + + opt->adam.fx_prev = fx; + opt->adam.fx_best = opt->adam.fx_prev; + if (pf) { + pf[opt->iter % params.past] = opt->adam.fx_prev; + } + + opt->loss_before = opt->adam.fx_prev; + opt->loss_after = opt->adam.fx_prev; + + // initialize + if (opt->just_initialized) { + opt->adam.n_no_improvement = 0; + opt->just_initialized = false; + } + + float * fx_best = &opt->adam.fx_best; + float * fx_prev = &opt->adam.fx_prev; + int * n_no_improvement = &opt->adam.n_no_improvement; + + int iter0 = opt->iter; + + // run the optimizer + for (int t = 0; t < params.adam.n_iter; ++t) { + opt->iter = iter0 + t + 1; + GGML_PRINT_DEBUG ("=== iter %d ===\n", t); + + GGML_PRINT_DEBUG ("f = %10.6f\n", ggml_get_f32_1d(f, 0)); + GGML_PRINT_DEBUG_5("df/dx0 = %10.6f\n", ggml_get_f32_1d(ps[0]->grad, 0)); + GGML_PRINT_DEBUG_5("df/dx1 = %10.6f\n", ggml_get_f32_1d(ps[1]->grad, 0)); + + for (int i = 0; i < np; ++i) { + GGML_PRINT_DEBUG("param %d: %10.6f, g = %10.6f\n", i, + ggml_get_f32_1d(ps[i], 0), ggml_get_f32_1d(ps[i]->grad, 0)); + } + + const int64_t t_start_wall = ggml_time_us(); + const int64_t t_start_cpu = ggml_cycles(); + UNUSED(t_start_wall); + UNUSED(t_start_cpu); + + { + float gnorm = 1.0f; + if (gclip > 0.0f) { + // gradient clipping + ggml_float sum = 0.0; + for (int64_t i = 0; i < nx; ++i) { + sum += (ggml_float)(g[i]*g[i]); + } + ggml_float norm = sqrt(sum); + if (norm > (ggml_float) gclip) { + gnorm = (float) ((ggml_float) gclip / norm); + } + } + const float beta1h = alpha*sched/(1.0f - powf(beta1, opt->iter)); + const float beta2h = 1.0f/(1.0f - powf(beta2, opt->iter)); + int64_t i = 0; + for (int p = 0; p < np; ++p) { + const int64_t ne = ggml_nelements(ps[p]); + const float p_decay = ((ggml_n_dims(ps[p]) >= decay_min_ndim) ? decay : 0.0f) * sched; + for (int64_t j = 0; j < ne; ++j) { + float x = ggml_get_f32_1d(ps[p], j); + float g_ = g[i]*gnorm; + m[i] = m[i]*beta1 + g_*(1.0f - beta1); + v[i] = v[i]*beta2 + g_*g_*(1.0f - beta2); + float mh = m[i]*beta1h; + float vh = v[i]*beta2h; + vh = sqrtf(vh) + eps; + x = x*(1.0f - p_decay) - mh/vh; + ggml_set_f32_1d(ps[p], j, x); + ++i; + } + } + } + + fx = 0; + ggml_set_zero(opt->adam.g); + for (int accum_step = 0; accum_step < n_accum; ++accum_step) { + if (callback) { + callback(callback_data, accum_step, &sched, &cancel); + if (cancel) { + return GGML_OPT_RESULT_CANCEL;; + } + } + // ggml_graph_reset (gf); + ggml_set_f32 (f->grad, 1.0f); + ggml_graph_compute(gb, &cplan); + ggml_opt_acc_grad(np, ps, g, accum_norm); + fx += ggml_get_f32_1d(f, 0); + } + fx *= accum_norm; + + opt->loss_after = fx; + + // check convergence + if (fabsf(fx - fx_prev[0])/fx < params.adam.eps_f) { + GGML_PRINT_DEBUG("converged\n"); + + return GGML_OPT_RESULT_OK; + } + + // delta-based convergence test + if (pf != NULL) { + // need at least params.past iterations to start checking for convergence + if (params.past <= iter0 + t) { + const float rate = (pf[(iter0 + t)%params.past] - fx)/fx; + + if (fabsf(rate) < params.delta) { + return GGML_OPT_RESULT_OK; + } + } + + pf[(iter0 + t)%params.past] = fx; + } + + // check for improvement + if (params.max_no_improvement > 0) { + if (fx_best[0] > fx) { + fx_best[0] = fx; + n_no_improvement[0] = 0; + } else { + ++n_no_improvement[0]; + + if (n_no_improvement[0] >= params.max_no_improvement) { + return GGML_OPT_RESULT_OK; + } + } + } + + fx_prev[0] = fx; + + { + const int64_t t_end_cpu = ggml_cycles(); + GGML_PRINT_DEBUG("time iter: %5.3f s\n", ((float)(t_end_cpu - t_start_cpu))/CLOCKS_PER_SEC); + UNUSED(t_end_cpu); + + const int64_t t_end_wall = ggml_time_us(); + GGML_PRINT_DEBUG("wall time iter: %5.3f s\n", (t_end_wall - t_start_wall)/1e6); + UNUSED(t_end_wall); + } + } + + return GGML_OPT_RESULT_DID_NOT_CONVERGE; +} + +// +// L-BFGS +// +// the L-BFGS implementation below is based on the following implementation: +// +// https://github.com/chokkan/liblbfgs +// + +struct ggml_lbfgs_iteration_data { + float alpha; + float ys; + float * s; + float * y; +}; + +static enum ggml_opt_result linesearch_backtracking( + const struct ggml_opt_params * params, + int nx, + float * x, + float * fx, + float * g, + float * d, + float * step, + const float * xp, + struct ggml_tensor * f, + struct ggml_cgraph * gb, + struct ggml_cplan * cplan, + const int np, + struct ggml_tensor * ps[], + bool * cancel, + ggml_opt_callback callback, + void * callback_data) { + int count = 0; + + float width = 0.0f; + float dg = 0.0f; + float finit = 0.0f; + float dginit = 0.0f; + float dgtest = 0.0f; + + const float dec = 0.5f; + const float inc = 2.1f; + + const int n_accum = MAX(1, params->n_gradient_accumulation); + const float accum_norm = 1.0f / (float) n_accum; + + if (*step <= 0.f) { + return GGML_LINESEARCH_INVALID_PARAMETERS; + } + + // compute the initial gradient in the search direction + ggml_vec_dot_f32(nx, &dginit, 0, g, 0, d, 0, 1); + + // make sure that d points to a descent direction + if (0 < dginit) { + return GGML_LINESEARCH_FAIL; + } + + // initialize local variables + finit = *fx; + dgtest = params->lbfgs.ftol*dginit; + + while (true) { + ggml_vec_cpy_f32(nx, x, xp); + ggml_vec_mad_f32(nx, x, d, *step); + + // evaluate the function and gradient values + { + ggml_opt_set_params(np, ps, x); + + *fx = 0; + memset(g, 0, sizeof(float)*nx); + for (int accum_step = 0; accum_step < n_accum; ++accum_step) { + if (callback) { + // LBFG-S does not support learning rate -> ignore learning schedule + float sched = 0; + callback(callback_data, accum_step, &sched, cancel); + if (*cancel) { + return GGML_OPT_RESULT_CANCEL; + } + } + // ggml_graph_reset (gf); + ggml_set_f32 (f->grad, 1.0f); + ggml_graph_compute(gb, cplan); + ggml_opt_acc_grad(np, ps, g, accum_norm); + *fx += ggml_get_f32_1d(f, 0); + } + *fx *= accum_norm; + + } + + ++count; + + if (*fx > finit + (*step)*dgtest) { + width = dec; + } else { + // Armijo condition is satisfied + if (params->lbfgs.linesearch == GGML_LINESEARCH_BACKTRACKING_ARMIJO) { + return count; + } + + ggml_vec_dot_f32(nx, &dg, 0, g, 0, d, 0, 1); + + // check the Wolfe condition + if (dg < params->lbfgs.wolfe * dginit) { + width = inc; + } else { + if(params->lbfgs.linesearch == GGML_LINESEARCH_BACKTRACKING_WOLFE) { + // regular Wolfe conditions + return count; + } + + if(dg > -params->lbfgs.wolfe*dginit) { + width = dec; + } else { + // strong Wolfe condition (GGML_LINESEARCH_BACKTRACKING_STRONG_WOLFE) + return count; + } + } + } + + if (*step < params->lbfgs.min_step) { + return GGML_LINESEARCH_MINIMUM_STEP; + } + if (*step > params->lbfgs.max_step) { + return GGML_LINESEARCH_MAXIMUM_STEP; + } + if (params->lbfgs.max_linesearch <= count) { + return GGML_LINESEARCH_MAXIMUM_ITERATIONS; + } + + (*step) *= width; + } + + GGML_ABORT("line search failed"); + + //return GGML_LINESEARCH_FAIL; +} + +static enum ggml_opt_result ggml_opt_lbfgs( + struct ggml_context * ctx, + struct ggml_opt_context * opt, + struct ggml_opt_params params, + struct ggml_tensor * f, + struct ggml_cgraph * gf, + struct ggml_cgraph * gb, + ggml_opt_callback callback, + void * callback_data) { + if (params.lbfgs.linesearch == GGML_LINESEARCH_BACKTRACKING_WOLFE || + params.lbfgs.linesearch == GGML_LINESEARCH_BACKTRACKING_STRONG_WOLFE) { + if (params.lbfgs.wolfe <= params.lbfgs.ftol || 1.f <= params.lbfgs.wolfe) { + return GGML_OPT_RESULT_INVALID_WOLFE; + } + } + + const int m = params.lbfgs.m; + + // these will store the parameters we want to optimize + struct ggml_tensor * ps[GGML_MAX_PARAMS]; + + int np = 0; + int nx = 0; + for (int i = 0; i < gf->n_nodes; ++i) { + if (gf->nodes[i]->flags & GGML_TENSOR_FLAG_PARAM) { + GGML_PRINT_DEBUG("found param %d: grad->op = %d\n", np, gf->nodes[i]->grad->op); + + GGML_ASSERT(np < GGML_MAX_PARAMS); + + ps[np++] = gf->nodes[i]; + nx += ggml_nelements(gf->nodes[i]); + } + } + + if ((opt->params.type != params.type) || (opt->nx != nx) || (opt->params.past != params.past) || (opt->params.lbfgs.m != params.lbfgs.m)) { + int iter = opt->iter; + ggml_opt_init(ctx, opt, params, nx); + opt->iter = iter; + } + + struct ggml_cplan cplan = ggml_graph_plan(gb, params.n_threads, NULL); + struct ggml_object * obj = ggml_new_object(ctx, GGML_OBJECT_TYPE_WORK_BUFFER, cplan.work_size); + cplan.work_data = (uint8_t *)ctx->mem_buffer + obj->offs; + + float * x = opt->lbfgs.x->data; // current parameters + float * xp = opt->lbfgs.xp->data; // previous parameters + float * g = opt->lbfgs.g->data; // current gradient + float * gp = opt->lbfgs.gp->data; // previous gradient + float * d = opt->lbfgs.d->data; // search direction + + float * pf = params.past > 0 ? opt->lbfgs.pf->data : NULL; // past function values + + const int n_accum = MAX(1, params.n_gradient_accumulation); + const float accum_norm = 1.0f / (float) n_accum; + + float fx = 0.0f; // cost function value + float xnorm = 0.0f; // ||x|| + float gnorm = 0.0f; // ||g|| + + // initialize x from the graph nodes + ggml_opt_get_params(np, ps, x); + + // the L-BFGS memory + float * lm_alpha = opt->lbfgs.lmal->data; + float * lm_ys = opt->lbfgs.lmys->data; + float * lm_s = opt->lbfgs.lms->data; + float * lm_y = opt->lbfgs.lmy->data; + + bool cancel = false; + + // evaluate the function value and its gradient + { + ggml_opt_set_params(np, ps, x); + + fx = 0; + memset(g, 0, sizeof(float)*nx); + for (int accum_step = 0; accum_step < n_accum; ++accum_step) { + if (callback) { + // LBFG-S does not support learning rate -> ignore learning schedule + float sched = 0; + callback(callback_data, accum_step, &sched, &cancel); + if (cancel) { + return GGML_OPT_RESULT_CANCEL; + } + } + // ggml_graph_reset (gf); + ggml_set_f32 (f->grad, 1.0f); + ggml_graph_compute(gb, &cplan); + ggml_opt_acc_grad(np, ps, g, accum_norm); + fx += ggml_get_f32_1d(f, 0); + } + fx *= accum_norm; + + opt->loss_before = fx; + opt->loss_after = fx; + } + + // search direction = -gradient + ggml_vec_neg_f32(nx, d, g); + + // ||x||, ||g|| + ggml_vec_norm_f32(nx, &xnorm, x); + ggml_vec_norm_f32(nx, &gnorm, g); + + if (xnorm < 1.0f) { + xnorm = 1.0f; + } + + // already optimized + if (gnorm/xnorm <= params.lbfgs.eps) { + return GGML_OPT_RESULT_OK; + } + + if (opt->just_initialized) { + if (pf) { + pf[0] = fx; + } + opt->lbfgs.fx_best = fx; + + // initial step + ggml_vec_norm_inv_f32(nx, &opt->lbfgs.step, d); + opt->lbfgs.j = 0; + opt->lbfgs.k = 1; + opt->lbfgs.end = 0; + opt->lbfgs.n_no_improvement = 0; + opt->just_initialized = false; + } + + float * fx_best = &opt->lbfgs.fx_best; + float * step = &opt->lbfgs.step; + int * j = &opt->lbfgs.j; + int * k = &opt->lbfgs.k; + int * end = &opt->lbfgs.end; + int * n_no_improvement = &opt->lbfgs.n_no_improvement; + + int ls = 0; + int bound = 0; + + float ys = 0.0f; + float yy = 0.0f; + float beta = 0.0f; + + int it = 0; + + while (true) { + // store the current position and gradient vectors + ggml_vec_cpy_f32(nx, xp, x); + ggml_vec_cpy_f32(nx, gp, g); + + // TODO: instead of passing &cancel here, use the return code of the linesearch + // to determine if the optimization should be cancelled + // this is a simple change, but not doing this atm, since I don't have a nice + // way to test and don't want to break something with so many changes lined up + ls = linesearch_backtracking(¶ms, nx, x, &fx, g, d, step, xp, f, gb, &cplan, np, ps, &cancel, callback, callback_data); + if (cancel) { + return GGML_OPT_RESULT_CANCEL; + } + + if (ls < 0) { + // linesearch failed - go back to the previous point and return + ggml_vec_cpy_f32(nx, x, xp); + ggml_vec_cpy_f32(nx, g, gp); + + return ls; + } + + opt->loss_after = fx; + + ggml_vec_norm_f32(nx, &xnorm, x); + ggml_vec_norm_f32(nx, &gnorm, g); + + GGML_PRINT_DEBUG("f = %10.6f\n", ggml_get_f32_1d(f, 0)); + + if (xnorm < 1.0f) { + xnorm = 1.0f; + } + if (gnorm/xnorm <= params.lbfgs.eps) { + // converged + return GGML_OPT_RESULT_OK; + } + + // delta-based convergence test + if (pf != NULL) { + // need at least params.past iterations to start checking for convergence + if (params.past <= k[0]) { + const float rate = (pf[k[0]%params.past] - fx)/fx; + + if (fabsf(rate) < params.delta) { + return GGML_OPT_RESULT_OK; + } + } + + pf[k[0]%params.past] = fx; + } + + // check for improvement + if (params.max_no_improvement > 0) { + if (fx < fx_best[0]) { + fx_best[0] = fx; + n_no_improvement[0] = 0; + } else { + n_no_improvement[0]++; + + if (n_no_improvement[0] >= params.max_no_improvement) { + return GGML_OPT_RESULT_OK; + } + } + } + + if (params.lbfgs.n_iter != 0 && params.lbfgs.n_iter < it + 1) { + // reached the maximum number of iterations + return GGML_OPT_RESULT_DID_NOT_CONVERGE; + } + + // update vectors s and y: + // s_{k+1} = x_{k+1} - x_{k} = \step * d_{k}. + // y_{k+1} = g_{k+1} - g_{k}. + // + ggml_vec_sub_f32(nx, &lm_s[end[0]*nx], x, xp); + ggml_vec_sub_f32(nx, &lm_y[end[0]*nx], g, gp); + + // compute scalars ys and yy: + // ys = y^t \cdot s -> 1 / \rho. + // yy = y^t \cdot y. + // + ggml_vec_dot_f32(nx, &ys, 0, &lm_y[end[0]*nx], 0, &lm_s[end[0]*nx], 0, 1); + ggml_vec_dot_f32(nx, &yy, 0, &lm_y[end[0]*nx], 0, &lm_y[end[0]*nx], 0, 1); + + lm_ys[end[0]] = ys; + + // find new search direction + // ref: https://en.wikipedia.org/wiki/Limited-memory_BFGS + + bound = (m <= k[0]) ? m : k[0]; + k[0]++; + it++; + end[0] = (end[0] + 1)%m; + + // initialize search direction with -g + ggml_vec_neg_f32(nx, d, g); + + j[0] = end[0]; + for (int i = 0; i < bound; ++i) { + j[0] = (j[0] + m - 1) % m; + // \alpha_{j} = \rho_{j} s^{t}_{j} \cdot q_{k+1} + ggml_vec_dot_f32(nx, &lm_alpha[j[0]], 0, &lm_s[j[0]*nx], 0, d, 0, 1); + lm_alpha[j[0]] /= lm_ys[j[0]]; + // q_{i} = q_{i+1} - \alpha_{i} y_{i} + ggml_vec_mad_f32(nx, d, &lm_y[j[0]*nx], -lm_alpha[j[0]]); + } + + ggml_vec_scale_f32(nx, d, ys/yy); + + for (int i = 0; i < bound; ++i) { + // \beta_{j} = \rho_{j} y^t_{j} \cdot \gamma_{i} + ggml_vec_dot_f32(nx, &beta, 0, &lm_y[j[0]*nx], 0, d, 0, 1); + beta /= lm_ys[j[0]]; + // \gamma_{i+1} = \gamma_{i} + (\alpha_{j} - \beta_{j}) s_{j} + ggml_vec_mad_f32(nx, d, &lm_s[j[0]*nx], lm_alpha[j[0]] - beta); + j[0] = (j[0] + 1)%m; + } + + step[0] = 1.0; + } + + GGML_ABORT("lbfgs failed"); + + //return GGML_OPT_RESULT_DID_NOT_CONVERGE; +} + +struct ggml_opt_params ggml_opt_default_params(enum ggml_opt_type type) { + struct ggml_opt_params result; + + switch (type) { + case GGML_OPT_TYPE_ADAM: + { + result = (struct ggml_opt_params) { + .type = GGML_OPT_TYPE_ADAM, + .graph_size = GGML_DEFAULT_GRAPH_SIZE, + .n_threads = 1, // FIXME: GGML_DEFAULT_N_THREADS ? + .past = 0, + .delta = 1e-5f, + + .max_no_improvement = 100, + + .print_forward_graph = true, + .print_backward_graph = true, + + .n_gradient_accumulation = 1, + + .adam = { + .n_iter = 10000, + .sched = 1.000f, + .decay = 0.0f, + .decay_min_ndim = 2, + .alpha = 0.001f, + .beta1 = 0.9f, + .beta2 = 0.999f, + .eps = 1e-8f, + .eps_f = 1e-5f, + .eps_g = 1e-3f, + .gclip = 0.0f, + }, + }; + } break; + case GGML_OPT_TYPE_LBFGS: + { + result = (struct ggml_opt_params) { + .type = GGML_OPT_TYPE_LBFGS, + .graph_size = GGML_DEFAULT_GRAPH_SIZE, + .n_threads = 1, + .past = 0, + .delta = 1e-5f, + + .max_no_improvement = 0, + + .print_forward_graph = true, + .print_backward_graph = true, + + .n_gradient_accumulation = 1, + + .lbfgs = { + .m = 6, + .n_iter = 100, + .max_linesearch = 20, + + .eps = 1e-5f, + .ftol = 1e-4f, + .wolfe = 0.9f, + .min_step = 1e-20f, + .max_step = 1e+20f, + + .linesearch = GGML_LINESEARCH_DEFAULT, + }, + }; + } break; + } + + return result; +} + +GGML_API void ggml_opt_init( + struct ggml_context * ctx, + struct ggml_opt_context * opt, + struct ggml_opt_params params, + int64_t nx) { + opt->ctx = ctx; + opt->params = params; + opt->iter = 0; + opt->nx = nx; + opt->just_initialized = true; + if (opt->ctx == NULL) { + struct ggml_init_params ctx_opt_params; + if (opt->params.type == GGML_OPT_TYPE_ADAM) { + ctx_opt_params.mem_size = GGML_MEM_ALIGN*3 + ggml_tensor_overhead()*3 + ggml_type_size(GGML_TYPE_F32)*nx*3; + if (opt->params.past > 0) { + ctx_opt_params.mem_size += GGML_MEM_ALIGN + ggml_tensor_overhead() + ggml_type_size(GGML_TYPE_F32)*opt->params.past; + } + } else if (opt->params.type == GGML_OPT_TYPE_LBFGS) { + ctx_opt_params.mem_size = GGML_MEM_ALIGN*9 + ggml_tensor_overhead()*9 + ggml_type_size(GGML_TYPE_F32)*(nx*5 + opt->params.lbfgs.m*2 + nx*opt->params.lbfgs.m*2); + if (opt->params.past > 0) { + ctx_opt_params.mem_size += GGML_MEM_ALIGN + ggml_tensor_overhead() + ggml_type_size(GGML_TYPE_F32)*opt->params.past; + } + } + ctx_opt_params.mem_buffer = NULL; + ctx_opt_params.no_alloc = false; + + opt->ctx = ggml_init(ctx_opt_params); + } + switch (opt->params.type) { + case GGML_OPT_TYPE_ADAM: + { + opt->adam.g = ggml_new_tensor_1d(opt->ctx, GGML_TYPE_F32, nx); + opt->adam.m = ggml_new_tensor_1d(opt->ctx, GGML_TYPE_F32, nx); + opt->adam.v = ggml_new_tensor_1d(opt->ctx, GGML_TYPE_F32, nx); + opt->adam.pf = params.past > 0 + ? ggml_new_tensor_1d(opt->ctx, GGML_TYPE_F32, params.past) + : NULL; + ggml_set_zero(opt->adam.m); + ggml_set_zero(opt->adam.v); + if (opt->adam.pf) { + ggml_set_zero(opt->adam.pf); + } + } break; + case GGML_OPT_TYPE_LBFGS: + { + opt->lbfgs.x = ggml_new_tensor_1d(opt->ctx, GGML_TYPE_F32, nx); + opt->lbfgs.xp = ggml_new_tensor_1d(opt->ctx, GGML_TYPE_F32, nx); + opt->lbfgs.g = ggml_new_tensor_1d(opt->ctx, GGML_TYPE_F32, nx); + opt->lbfgs.gp = ggml_new_tensor_1d(opt->ctx, GGML_TYPE_F32, nx); + opt->lbfgs.d = ggml_new_tensor_1d(opt->ctx, GGML_TYPE_F32, nx); + opt->lbfgs.pf = params.past > 0 + ? ggml_new_tensor_1d(opt->ctx, GGML_TYPE_F32, params.past) + : NULL; + opt->lbfgs.lmal = ggml_new_tensor_1d(opt->ctx, GGML_TYPE_F32, params.lbfgs.m); + opt->lbfgs.lmys = ggml_new_tensor_1d(opt->ctx, GGML_TYPE_F32, params.lbfgs.m); + opt->lbfgs.lms = ggml_new_tensor_2d(opt->ctx, GGML_TYPE_F32, nx, params.lbfgs.m); + opt->lbfgs.lmy = ggml_new_tensor_2d(opt->ctx, GGML_TYPE_F32, nx, params.lbfgs.m); + ggml_set_zero(opt->lbfgs.x); + ggml_set_zero(opt->lbfgs.xp); + ggml_set_zero(opt->lbfgs.g); + ggml_set_zero(opt->lbfgs.gp); + ggml_set_zero(opt->lbfgs.d); + if (opt->lbfgs.pf) { + ggml_set_zero(opt->lbfgs.pf); + } + ggml_set_zero(opt->lbfgs.lmal); + ggml_set_zero(opt->lbfgs.lmys); + ggml_set_zero(opt->lbfgs.lms); + ggml_set_zero(opt->lbfgs.lmy); + } break; + } +} + +enum ggml_opt_result ggml_opt( + struct ggml_context * ctx, + struct ggml_opt_params params, + struct ggml_tensor * f) { + bool free_ctx = false; + if (ctx == NULL) { + struct ggml_init_params params_ctx = { + .mem_size = 16*1024*1024, + .mem_buffer = NULL, + .no_alloc = false, + }; + + ctx = ggml_init(params_ctx); + if (ctx == NULL) { + return GGML_OPT_RESULT_NO_CONTEXT; + } + + free_ctx = true; + } + + enum ggml_opt_result result = GGML_OPT_RESULT_OK; + + struct ggml_opt_context * opt = (struct ggml_opt_context *) alloca(sizeof(struct ggml_opt_context)); + + ggml_opt_init(ctx, opt, params, 0); + result = ggml_opt_resume(ctx, opt, f); + + if (free_ctx) { + ggml_free(ctx); + } + + return result; +} + +enum ggml_opt_result ggml_opt_resume( + struct ggml_context * ctx, + struct ggml_opt_context * opt, + struct ggml_tensor * f) { + + // build forward + backward compute graphs + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx, opt->params.graph_size, true); + ggml_build_forward_expand(gf, f); + + struct ggml_cgraph * gb = ggml_graph_dup(ctx, gf); + ggml_build_backward_expand(ctx, gf, gb, false); + + return ggml_opt_resume_g(ctx, opt, f, gf, gb, NULL, NULL); +} + +enum ggml_opt_result ggml_opt_resume_g( + struct ggml_context * ctx, + struct ggml_opt_context * opt, + struct ggml_tensor * f, + struct ggml_cgraph * gf, + struct ggml_cgraph * gb, + ggml_opt_callback callback, + void * callback_data) { + + GGML_ASSERT(f->grad && "ggml_set_param must be called for at least one ancestor"); + + // build forward + backward compute graphs + enum ggml_opt_result result = GGML_OPT_RESULT_OK; + + switch (opt->params.type) { + case GGML_OPT_TYPE_ADAM: + { + result = ggml_opt_adam(ctx, opt, opt->params, f, gf, gb, callback, callback_data); + } break; + case GGML_OPT_TYPE_LBFGS: + { + result = ggml_opt_lbfgs(ctx, opt, opt->params, f, gf, gb, callback, callback_data); + } break; + } + + if (opt->params.print_forward_graph) { + ggml_graph_print (gf); + ggml_graph_dump_dot(gf, NULL, "opt-forward.dot"); + } + + if (opt->params.print_backward_graph) { + ggml_graph_print (gb); + ggml_graph_dump_dot(gb, gf, "opt-backward.dot"); + } + + return result; +} + +//////////////////////////////////////////////////////////////////////////////// + +void ggml_set_input(struct ggml_tensor * tensor) { + tensor->flags |= GGML_TENSOR_FLAG_INPUT; +} + +void ggml_set_output(struct ggml_tensor * tensor) { + tensor->flags |= GGML_TENSOR_FLAG_OUTPUT; +} + +void ggml_set_param(struct ggml_context * ctx, struct ggml_tensor * tensor) { + GGML_UNUSED(ctx); // TODO: remove this parameter + tensor->flags |= GGML_TENSOR_FLAG_PARAM; +} + +void ggml_set_loss(struct ggml_tensor * tensor) { + GGML_ASSERT(ggml_is_scalar(tensor)); + GGML_ASSERT(tensor->type == GGML_TYPE_F32); + tensor->flags |= GGML_TENSOR_FLAG_LOSS; +} + +//////////////////////////////////////////////////////////////////////////////// + +void ggml_quantize_init(enum ggml_type type) { + ggml_critical_section_start(); + + switch (type) { + case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: + case GGML_TYPE_IQ2_S: + case GGML_TYPE_IQ1_S: + case GGML_TYPE_IQ1_M: iq2xs_init_impl(type); break; + case GGML_TYPE_IQ3_XXS: iq3xs_init_impl(256); break; + case GGML_TYPE_IQ3_S: iq3xs_init_impl(512); break; + default: // nothing + break; + } + + ggml_critical_section_end(); +} + +void ggml_quantize_free(void) { + ggml_critical_section_start(); + + iq2xs_free_impl(GGML_TYPE_IQ2_XXS); + iq2xs_free_impl(GGML_TYPE_IQ2_XS); + iq2xs_free_impl(GGML_TYPE_IQ1_S); + iq3xs_free_impl(256); + + ggml_critical_section_end(); +} + +bool ggml_quantize_requires_imatrix(enum ggml_type type) { + return + type == GGML_TYPE_IQ2_XXS || + type == GGML_TYPE_IQ2_XS || + type == GGML_TYPE_IQ1_S;// || + //type == GGML_TYPE_IQ1_M; +} + +size_t ggml_quantize_chunk( + enum ggml_type type, + const float * src, + void * dst, + int64_t start, + int64_t nrows, + int64_t n_per_row, + const float * imatrix) { + const int64_t n = (int64_t) nrows * n_per_row; + + if (ggml_quantize_requires_imatrix(type)) { + GGML_ASSERT(imatrix != NULL); + } + + GGML_ASSERT(start % type_traits[type].blck_size == 0); + GGML_ASSERT(start % n_per_row == 0); + + ggml_quantize_init(type); // this is noop if already initialized + + const size_t start_row = start / n_per_row; + const size_t row_size = ggml_row_size(type, n_per_row); + + size_t result = 0; + + switch (type) { + case GGML_TYPE_Q4_0: result = quantize_q4_0(src + start, (char *) dst + start_row * row_size, nrows, n_per_row, imatrix); break; + case GGML_TYPE_Q4_1: result = quantize_q4_1(src + start, (char *) dst + start_row * row_size, nrows, n_per_row, imatrix); break; + case GGML_TYPE_Q5_0: result = quantize_q5_0(src + start, (char *) dst + start_row * row_size, nrows, n_per_row, imatrix); break; + case GGML_TYPE_Q5_1: result = quantize_q5_1(src + start, (char *) dst + start_row * row_size, nrows, n_per_row, imatrix); break; + case GGML_TYPE_Q8_0: result = quantize_q8_0(src + start, (char *) dst + start_row * row_size, nrows, n_per_row, imatrix); break; + case GGML_TYPE_Q2_K: result = quantize_q2_K(src + start, (char *) dst + start_row * row_size, nrows, n_per_row, imatrix); break; + case GGML_TYPE_Q3_K: result = quantize_q3_K(src + start, (char *) dst + start_row * row_size, nrows, n_per_row, imatrix); break; + case GGML_TYPE_Q4_K: result = quantize_q4_K(src + start, (char *) dst + start_row * row_size, nrows, n_per_row, imatrix); break; + case GGML_TYPE_Q5_K: result = quantize_q5_K(src + start, (char *) dst + start_row * row_size, nrows, n_per_row, imatrix); break; + case GGML_TYPE_Q6_K: result = quantize_q6_K(src + start, (char *) dst + start_row * row_size, nrows, n_per_row, imatrix); break; + case GGML_TYPE_TQ1_0: result = quantize_tq1_0(src + start, (char *) dst + start_row * row_size, nrows, n_per_row, imatrix); break; + case GGML_TYPE_TQ2_0: result = quantize_tq2_0(src + start, (char *) dst + start_row * row_size, nrows, n_per_row, imatrix); break; + case GGML_TYPE_IQ2_XXS: result = quantize_iq2_xxs(src + start, (char *) dst + start_row * row_size, nrows, n_per_row, imatrix); break; + case GGML_TYPE_IQ2_XS: result = quantize_iq2_xs (src + start, (char *) dst + start_row * row_size, nrows, n_per_row, imatrix); break; + case GGML_TYPE_IQ3_XXS: result = quantize_iq3_xxs(src + start, (char *) dst + start_row * row_size, nrows, n_per_row, imatrix); break; + case GGML_TYPE_IQ3_S: result = quantize_iq3_s (src + start, (char *) dst + start_row * row_size, nrows, n_per_row, imatrix); break; + case GGML_TYPE_IQ2_S: result = quantize_iq2_s (src + start, (char *) dst + start_row * row_size, nrows, n_per_row, imatrix); break; + case GGML_TYPE_IQ1_S: result = quantize_iq1_s (src + start, (char *) dst + start_row * row_size, nrows, n_per_row, imatrix); break; + case GGML_TYPE_IQ1_M: result = quantize_iq1_m (src + start, (char *) dst + start_row * row_size, nrows, n_per_row, imatrix); break; + case GGML_TYPE_IQ4_NL: result = quantize_iq4_nl (src + start, (char *) dst + start_row * row_size, nrows, n_per_row, imatrix); break; + case GGML_TYPE_IQ4_XS: result = quantize_iq4_xs (src + start, (char *) dst + start_row * row_size, nrows, n_per_row, imatrix); break; + case GGML_TYPE_Q4_0_4_4: result = quantize_q4_0_4x4(src + start, (char *) dst + start_row * row_size, nrows, n_per_row, imatrix); break; + case GGML_TYPE_Q4_0_4_8: result = quantize_q4_0_4x8(src + start, (char *) dst + start_row * row_size, nrows, n_per_row, imatrix); break; + case GGML_TYPE_Q4_0_8_8: result = quantize_q4_0_8x8(src + start, (char *) dst + start_row * row_size, nrows, n_per_row, imatrix); break; + case GGML_TYPE_F16: + { + size_t elemsize = sizeof(ggml_fp16_t); + ggml_fp32_to_fp16_row(src + start, (ggml_fp16_t *)dst + start, n); + result = n * elemsize; + } break; + case GGML_TYPE_BF16: + { + size_t elemsize = sizeof(ggml_bf16_t); + ggml_fp32_to_bf16_row_ref(src + start, (ggml_bf16_t *)dst + start, n); + result = n * elemsize; + } break; + case GGML_TYPE_F32: + { + size_t elemsize = sizeof(float); + result = n * elemsize; + memcpy((uint8_t *)dst + start * elemsize, src + start, result); + } break; + default: + assert(false); + } + + GGML_ASSERT(result == nrows * row_size); + + return result; +} + +//////////////////////////////////////////////////////////////////////////////// + +struct gguf_str { + uint64_t n; // GGUFv2 + char * data; +}; + +static const size_t GGUF_TYPE_SIZE[GGUF_TYPE_COUNT] = { + [GGUF_TYPE_UINT8] = sizeof(uint8_t), + [GGUF_TYPE_INT8] = sizeof(int8_t), + [GGUF_TYPE_UINT16] = sizeof(uint16_t), + [GGUF_TYPE_INT16] = sizeof(int16_t), + [GGUF_TYPE_UINT32] = sizeof(uint32_t), + [GGUF_TYPE_INT32] = sizeof(int32_t), + [GGUF_TYPE_FLOAT32] = sizeof(float), + [GGUF_TYPE_BOOL] = sizeof(bool), + [GGUF_TYPE_STRING] = sizeof(struct gguf_str), + [GGUF_TYPE_UINT64] = sizeof(uint64_t), + [GGUF_TYPE_INT64] = sizeof(int64_t), + [GGUF_TYPE_FLOAT64] = sizeof(double), + [GGUF_TYPE_ARRAY] = 0, // undefined +}; +static_assert(GGUF_TYPE_COUNT == 13, "GGUF_TYPE_COUNT != 13"); + +static const char * GGUF_TYPE_NAME[GGUF_TYPE_COUNT] = { + [GGUF_TYPE_UINT8] = "u8", + [GGUF_TYPE_INT8] = "i8", + [GGUF_TYPE_UINT16] = "u16", + [GGUF_TYPE_INT16] = "i16", + [GGUF_TYPE_UINT32] = "u32", + [GGUF_TYPE_INT32] = "i32", + [GGUF_TYPE_FLOAT32] = "f32", + [GGUF_TYPE_BOOL] = "bool", + [GGUF_TYPE_STRING] = "str", + [GGUF_TYPE_ARRAY] = "arr", + [GGUF_TYPE_UINT64] = "u64", + [GGUF_TYPE_INT64] = "i64", + [GGUF_TYPE_FLOAT64] = "f64", +}; +static_assert(GGUF_TYPE_COUNT == 13, "GGUF_TYPE_COUNT != 13"); + +union gguf_value { + uint8_t uint8; + int8_t int8; + uint16_t uint16; + int16_t int16; + uint32_t uint32; + int32_t int32; + float float32; + uint64_t uint64; + int64_t int64; + double float64; + bool bool_; + + struct gguf_str str; + + struct { + enum gguf_type type; + + uint64_t n; // GGUFv2 + void * data; + } arr; +}; + +struct gguf_kv { + struct gguf_str key; + + enum gguf_type type; + union gguf_value value; +}; + +struct gguf_header { + char magic[4]; + + uint32_t version; + uint64_t n_tensors; // GGUFv2 + uint64_t n_kv; // GGUFv2 +}; + +struct gguf_tensor_info { + struct gguf_str name; + + uint32_t n_dims; + uint64_t ne[GGML_MAX_DIMS]; + + enum ggml_type type; + + uint64_t offset; // offset from start of `data`, must be a multiple of `ALIGNMENT` + + // for writing API + const void * data; + size_t size; +}; + +struct gguf_context { + struct gguf_header header; + + struct gguf_kv * kv; + struct gguf_tensor_info * infos; + + size_t alignment; + size_t offset; // offset of `data` from beginning of file + size_t size; // size of `data` in bytes + + //uint8_t * padding; + void * data; +}; + +static size_t gguf_type_size(enum gguf_type type) { + GGML_ASSERT(0 <= type && type < GGUF_TYPE_COUNT); + return GGUF_TYPE_SIZE[type]; +} + +static void gguf_tensor_info_sanitize(struct gguf_tensor_info * info) { + GGML_ASSERT(info->n_dims <= GGML_MAX_DIMS); + GGML_ASSERT(0 <= info->type && info->type < GGML_TYPE_COUNT); + + for (uint32_t i = 0; i < info->n_dims; ++i) { + GGML_ASSERT(info->ne[i] > 0); + } + + // prevent overflow for total number of elements + GGML_ASSERT(INT64_MAX/info->ne[1] > info->ne[0]); + GGML_ASSERT(INT64_MAX/info->ne[2] > info->ne[0]*info->ne[1]); + GGML_ASSERT(INT64_MAX/info->ne[3] > info->ne[0]*info->ne[1]*info->ne[2]); +} + +static bool gguf_fread_el(FILE * file, void * dst, size_t size, size_t * offset) { + const size_t n = fread(dst, 1, size, file); + *offset += n; + return n == size; +} + +static bool gguf_fread_str(FILE * file, struct gguf_str * p, size_t * offset) { + p->n = 0; + p->data = NULL; + + bool ok = true; + + ok = ok && gguf_fread_el(file, &p->n, sizeof(p->n), offset); + + // early exit if string length is invalid, prevents from integer overflow + if (p->n == SIZE_MAX) { + fprintf(stderr, "%s: invalid string length (%" PRIu64 ")\n", __func__, p->n); + return false; + } + + p->data = GGML_CALLOC(p->n + 1, 1); + + ok = ok && gguf_fread_el(file, p->data, p->n, offset); + + return ok; +} + +static void gguf_free_kv(struct gguf_kv * kv) { + if (kv->key.data) { + GGML_FREE(kv->key.data); + } + + if (kv->type == GGUF_TYPE_STRING) { + if (kv->value.str.data) { + GGML_FREE(kv->value.str.data); + } + } + + if (kv->type == GGUF_TYPE_ARRAY) { + if (kv->value.arr.data) { + if (kv->value.arr.type == GGUF_TYPE_STRING) { + for (uint64_t j = 0; j < kv->value.arr.n; ++j) { + struct gguf_str * str = &((struct gguf_str *) kv->value.arr.data)[j]; + if (str->data) { + GGML_FREE(str->data); + } + } + } + GGML_FREE(kv->value.arr.data); + } + } +} + +struct gguf_context * gguf_init_empty(void) { + struct gguf_context * ctx = GGML_CALLOC(1, sizeof(struct gguf_context)); + + memcpy(ctx->header.magic, GGUF_MAGIC, sizeof(ctx->header.magic)); + ctx->header.version = GGUF_VERSION; + ctx->header.n_tensors = 0; + ctx->header.n_kv = 0; + + ctx->kv = NULL; + ctx->infos = NULL; + + ctx->alignment = GGUF_DEFAULT_ALIGNMENT; + ctx->offset = 0; + ctx->size = 0; + + ctx->data = NULL; + + return ctx; +} + +struct gguf_context * gguf_init_from_file(const char * fname, struct gguf_init_params params) { + FILE * file = ggml_fopen(fname, "rb"); + if (!file) { + fprintf(stderr, "%s: failed to open '%s': '%s'\n", __func__, fname, strerror(errno)); + return NULL; + } + + // offset from start of file + size_t offset = 0; + + char magic[4]; + + // check the magic before making allocations + { + gguf_fread_el(file, &magic, sizeof(magic), &offset); + + for (uint32_t i = 0; i < sizeof(magic); i++) { + if (magic[i] != GGUF_MAGIC[i]) { + fprintf(stderr, "%s: invalid magic characters '%c%c%c%c'\n", __func__, magic[0], magic[1], magic[2], magic[3]); + fclose(file); + return NULL; + } + } + } + + bool ok = true; + + struct gguf_context * ctx = GGML_CALLOC(1, sizeof(struct gguf_context)); + + // read the header + { + strncpy(ctx->header.magic, magic, 4); + + ctx->kv = NULL; + ctx->infos = NULL; + ctx->data = NULL; + + ok = ok && gguf_fread_el(file, &ctx->header.version, sizeof(ctx->header.version), &offset); + ok = ok && gguf_fread_el(file, &ctx->header.n_tensors, sizeof(ctx->header.n_tensors), &offset); + ok = ok && gguf_fread_el(file, &ctx->header.n_kv, sizeof(ctx->header.n_kv), &offset); + + if (ctx->header.version == 1) { + fprintf(stderr, "%s: GGUFv1 is no longer supported. please use a more up-to-date version\n", __func__); + fclose(file); + gguf_free(ctx); + return NULL; + } + + // sanity-checks to prevent from integer/buffer overflows + + ok = ok && (ctx->header.n_tensors < (SIZE_MAX/2)/sizeof(struct gguf_tensor_info)); + ok = ok && (ctx->header.n_tensors < (SIZE_MAX/2)/ggml_tensor_overhead()); + ok = ok && (ctx->header.n_kv < (SIZE_MAX/2)/sizeof(struct gguf_kv)); + + if (!ok) { + fprintf(stderr, "%s: failed to read header\n", __func__); + fclose(file); + gguf_free(ctx); + return NULL; + } + } + + // read the kv pairs + { + const uint64_t n_kv = ctx->header.n_kv; + + // header.n_kv will hold the actual value of pairs that were successfully read in the loop below + ctx->header.n_kv = 0; + ctx->kv = GGML_CALLOC(n_kv, sizeof(struct gguf_kv)); + + for (uint64_t i = 0; i < n_kv; ++i) { + struct gguf_kv * kv = &ctx->kv[i]; + + //fprintf(stderr, "%s: reading kv %d\n", __func__, i); + + ok = ok && gguf_fread_str(file, &kv->key, &offset); + ok = ok && gguf_fread_el (file, &kv->type, sizeof(kv->type), &offset); + + //fprintf(stderr, "%s: reading kv with key %s\n", __func__, kv->key.data); + + switch (kv->type) { + case GGUF_TYPE_UINT8: ok = ok && gguf_fread_el (file, &kv->value.uint8, sizeof(kv->value.uint8), &offset); break; + case GGUF_TYPE_INT8: ok = ok && gguf_fread_el (file, &kv->value.int8, sizeof(kv->value.int8), &offset); break; + case GGUF_TYPE_UINT16: ok = ok && gguf_fread_el (file, &kv->value.uint16, sizeof(kv->value.uint16), &offset); break; + case GGUF_TYPE_INT16: ok = ok && gguf_fread_el (file, &kv->value.int16, sizeof(kv->value.int16), &offset); break; + case GGUF_TYPE_UINT32: ok = ok && gguf_fread_el (file, &kv->value.uint32, sizeof(kv->value.uint32), &offset); break; + case GGUF_TYPE_INT32: ok = ok && gguf_fread_el (file, &kv->value.int32, sizeof(kv->value.int32), &offset); break; + case GGUF_TYPE_FLOAT32: ok = ok && gguf_fread_el (file, &kv->value.float32, sizeof(kv->value.float32), &offset); break; + case GGUF_TYPE_UINT64: ok = ok && gguf_fread_el (file, &kv->value.uint64, sizeof(kv->value.uint64), &offset); break; + case GGUF_TYPE_INT64: ok = ok && gguf_fread_el (file, &kv->value.int64, sizeof(kv->value.int64), &offset); break; + case GGUF_TYPE_FLOAT64: ok = ok && gguf_fread_el (file, &kv->value.float64, sizeof(kv->value.float64), &offset); break; + case GGUF_TYPE_BOOL: ok = ok && gguf_fread_el (file, &kv->value.bool_, sizeof(kv->value.bool_), &offset); break; + case GGUF_TYPE_STRING: ok = ok && gguf_fread_str(file, &kv->value.str, &offset); break; + case GGUF_TYPE_ARRAY: + { + ok = ok && gguf_fread_el(file, &kv->value.arr.type, sizeof(kv->value.arr.type), &offset); + ok = ok && gguf_fread_el(file, &kv->value.arr.n, sizeof(kv->value.arr.n), &offset); + + switch (kv->value.arr.type) { + case GGUF_TYPE_UINT8: + case GGUF_TYPE_INT8: + case GGUF_TYPE_UINT16: + case GGUF_TYPE_INT16: + case GGUF_TYPE_UINT32: + case GGUF_TYPE_INT32: + case GGUF_TYPE_FLOAT32: + case GGUF_TYPE_UINT64: + case GGUF_TYPE_INT64: + case GGUF_TYPE_FLOAT64: + case GGUF_TYPE_BOOL: + { + // prevent from integer overflow in the malloc below + if (kv->value.arr.n >= SIZE_MAX/gguf_type_size(kv->value.arr.type)) { + fprintf(stderr, "%s: array size is too large (%" PRIu64 ")\n", __func__, kv->value.arr.n); + fclose(file); + gguf_free(ctx); + return NULL; + } + + kv->value.arr.data = GGML_CALLOC(kv->value.arr.n, gguf_type_size(kv->value.arr.type)); + + ok = ok && gguf_fread_el(file, kv->value.arr.data, kv->value.arr.n * gguf_type_size(kv->value.arr.type), &offset); + } break; + case GGUF_TYPE_STRING: + { + // prevent from integer overflow in the malloc below + if (kv->value.arr.n >= SIZE_MAX/sizeof(struct gguf_str)) { + fprintf(stderr, "%s: array size is too large (%" PRIu64 ")\n", __func__, kv->value.arr.n); + fclose(file); + gguf_free(ctx); + return NULL; + } + + kv->value.arr.data = GGML_CALLOC(kv->value.arr.n, sizeof(struct gguf_str)); + + for (uint64_t j = 0; j < kv->value.arr.n; ++j) { + ok = ok && gguf_fread_str(file, &((struct gguf_str *) kv->value.arr.data)[j], &offset); + } + } break; + case GGUF_TYPE_ARRAY: + default: GGML_ABORT("invalid type"); + } + } break; + default: GGML_ABORT("invalid type"); + } + + if (!ok) { + break; + } + + ctx->header.n_kv++; + } + + if (!ok) { + fprintf(stderr, "%s: failed to read key-value pairs\n", __func__); + fclose(file); + gguf_free(ctx); + return NULL; + } + } + + // read the tensor infos + if (ctx->header.n_tensors > 0) { + ctx->infos = GGML_CALLOC(ctx->header.n_tensors, sizeof(struct gguf_tensor_info)); + + for (uint64_t i = 0; i < ctx->header.n_tensors; ++i) { + struct gguf_tensor_info * info = &ctx->infos[i]; + + for (int j = 0; j < GGML_MAX_DIMS; ++j) { + info->ne[j] = 1; + } + + ok = ok && gguf_fread_str(file, &info->name, &offset); + ok = ok && gguf_fread_el (file, &info->n_dims, sizeof(info->n_dims), &offset); + + ok = ok && (info->n_dims <= GGML_MAX_DIMS); + + for (uint32_t j = 0; j < info->n_dims; ++j) { + ok = ok && gguf_fread_el(file, &info->ne[j], sizeof(info->ne[j]), &offset); + } + + ok = ok && gguf_fread_el (file, &info->type, sizeof(info->type), &offset); + ok = ok && gguf_fread_el (file, &info->offset, sizeof(info->offset), &offset); + + // TODO: return an error instead of crashing with GGML_ASSERT + gguf_tensor_info_sanitize(info); + + // make sure there is no duplicated tensor names + for (uint64_t j = 0; j < i && ok; ++j) { + if (strcmp(info->name.data, ctx->infos[j].name.data) == 0) { + fprintf(stderr, "%s: duplicated tensor name %s\n", __func__, info->name.data); + ok = false; + } + } + + if (!ok) { + fprintf(stderr, "%s: failed to read tensor info\n", __func__); + fclose(file); + gguf_free(ctx); + return NULL; + } + } + } + + ctx->alignment = GGUF_DEFAULT_ALIGNMENT; + + int alignment_idx = gguf_find_key(ctx, "general.alignment"); + if (alignment_idx != -1) { + ctx->alignment = gguf_get_val_u32(ctx, alignment_idx); + } + + // we require the data section to be aligned, so take into account any padding + { + const size_t offset_pad = offset % ctx->alignment; + + if (offset_pad != 0) { + offset += ctx->alignment - offset_pad; + fseek(file, offset, SEEK_SET); + } + } + + // store the current file offset - this is where the data section starts + ctx->offset = offset; + + // compute the total size of the data section, taking into account the alignment + { + ctx->size = 0; + for (uint64_t i = 0; i < ctx->header.n_tensors; ++i) { + struct gguf_tensor_info * info = &ctx->infos[i]; + + const int64_t ne = + (int64_t) info->ne[0] * + (int64_t) info->ne[1] * + (int64_t) info->ne[2] * + (int64_t) info->ne[3]; + + if (ggml_blck_size(info->type) == 0 || ne % ggml_blck_size(info->type) != 0) { + fprintf(stderr, "%s: tensor '%s' of type %d (%s) number of elements (%" PRId64 ") is not a multiple of block size (%" PRId64 ")\n", + __func__, info->name.data, (int) info->type, ggml_type_name(info->type), ne, ggml_blck_size(info->type)); + fclose(file); + gguf_free(ctx); + return NULL; + } + + const size_t size_cur = ggml_row_size(info->type, ne); + + ctx->size += GGML_PAD(size_cur, ctx->alignment); + } + } + + // load the tensor data only if requested + if (params.ctx != NULL) { + // if the provided gguf_context is no_alloc, then we create "empty" tensors and do not read the binary blob + // otherwise, we load the binary blob into the created ggml_context as well, and point the "data" members of + // the ggml_tensor structs to the appropriate locations in the binary blob + + // compute the exact size needed for the new ggml_context + const size_t mem_size = + params.no_alloc ? + (ctx->header.n_tensors )*ggml_tensor_overhead() : + (ctx->header.n_tensors + 1)*ggml_tensor_overhead() + ctx->size; + + struct ggml_init_params pdata = { + .mem_size = mem_size, + .mem_buffer = NULL, + .no_alloc = params.no_alloc, + }; + + *params.ctx = ggml_init(pdata); + if (*params.ctx == NULL) { + fprintf(stderr, "%s: failed to initialize context\n", __func__); + fclose(file); + gguf_free(ctx); + return NULL; + } + + struct ggml_context * ctx_data = *params.ctx; + + struct ggml_tensor * data = NULL; + + if (!params.no_alloc) { + data = ggml_new_tensor_1d(ctx_data, GGML_TYPE_I8, ctx->size); + + ok = ok && data != NULL; + + // read the binary blob with the tensor data + ok = ok && gguf_fread_el(file, data->data, ctx->size, &offset); + + if (!ok) { + fprintf(stderr, "%s: failed to read tensor data\n", __func__); + fclose(file); + ggml_free(ctx_data); + gguf_free(ctx); + return NULL; + } + + ctx->data = data->data; + } + + ggml_set_no_alloc(ctx_data, true); + + // create the tensors + for (uint64_t i = 0; i < ctx->header.n_tensors; ++i) { + const int64_t ne[GGML_MAX_DIMS] = { + ctx->infos[i].ne[0], + ctx->infos[i].ne[1], + ctx->infos[i].ne[2], + ctx->infos[i].ne[3], + }; + + struct ggml_tensor * cur = ggml_new_tensor(ctx_data, ctx->infos[i].type, ctx->infos[i].n_dims, ne); + + ok = ok && cur != NULL; + + if (!ok) { + break; + } + + ggml_set_name(cur, ctx->infos[i].name.data); + + // point the data member to the appropriate location in the binary blob using the tensor infos + if (!params.no_alloc) { + //cur->data = (char *) data->data + ctx->infos[i].offset - ctx->offset; // offset from start of file + cur->data = (char *) data->data + ctx->infos[i].offset; // offset from data + } + } + + if (!ok) { + fprintf(stderr, "%s: failed to read the tensor data\n", __func__); + fclose(file); + ggml_free(ctx_data); + gguf_free(ctx); + return NULL; + } + + ggml_set_no_alloc(ctx_data, params.no_alloc); + } + + fclose(file); + + return ctx; +} + +void gguf_free(struct gguf_context * ctx) { + if (ctx == NULL) { + return; + } + + if (ctx->kv) { + // free string memory - not great.. + for (uint64_t i = 0; i < ctx->header.n_kv; ++i) { + gguf_free_kv(&ctx->kv[i]); + } + + GGML_FREE(ctx->kv); + } + + if (ctx->infos) { + for (uint64_t i = 0; i < ctx->header.n_tensors; ++i) { + struct gguf_tensor_info * info = &ctx->infos[i]; + + if (info->name.data) { + GGML_FREE(info->name.data); + } + } + + GGML_FREE(ctx->infos); + } + + GGML_FREE(ctx); +} + +const char * gguf_type_name(enum gguf_type type) { + return GGUF_TYPE_NAME[type]; +} + +int gguf_get_version(const struct gguf_context * ctx) { + return ctx->header.version; +} + +size_t gguf_get_alignment(const struct gguf_context * ctx) { + return ctx->alignment; +} + +size_t gguf_get_data_offset(const struct gguf_context * ctx) { + return ctx->offset; +} + +void * gguf_get_data(const struct gguf_context * ctx) { + return ctx->data; +} + +int gguf_get_n_kv(const struct gguf_context * ctx) { + return ctx->header.n_kv; +} + +int gguf_find_key(const struct gguf_context * ctx, const char * key) { + // return -1 if key not found + int keyfound = -1; + + const int n_kv = gguf_get_n_kv(ctx); + + for (int i = 0; i < n_kv; ++i) { + if (strcmp(key, gguf_get_key(ctx, i)) == 0) { + keyfound = i; + break; + } + } + + return keyfound; +} + +const char * gguf_get_key(const struct gguf_context * ctx, int key_id) { + GGML_ASSERT(key_id >= 0 && key_id < gguf_get_n_kv(ctx)); + return ctx->kv[key_id].key.data; +} + +enum gguf_type gguf_get_kv_type(const struct gguf_context * ctx, int key_id) { + GGML_ASSERT(key_id >= 0 && key_id < gguf_get_n_kv(ctx)); + return ctx->kv[key_id].type; +} + +enum gguf_type gguf_get_arr_type(const struct gguf_context * ctx, int key_id) { + GGML_ASSERT(key_id >= 0 && key_id < gguf_get_n_kv(ctx)); + GGML_ASSERT(ctx->kv[key_id].type == GGUF_TYPE_ARRAY); + return ctx->kv[key_id].value.arr.type; +} + +const void * gguf_get_arr_data(const struct gguf_context * ctx, int key_id) { + GGML_ASSERT(key_id >= 0 && key_id < gguf_get_n_kv(ctx)); + GGML_ASSERT(ctx->kv[key_id].type == GGUF_TYPE_ARRAY); + return ctx->kv[key_id].value.arr.data; +} + +const char * gguf_get_arr_str(const struct gguf_context * ctx, int key_id, int i) { + GGML_ASSERT(key_id >= 0 && key_id < gguf_get_n_kv(ctx)); + GGML_ASSERT(ctx->kv[key_id].type == GGUF_TYPE_ARRAY); + struct gguf_kv * kv = &ctx->kv[key_id]; + struct gguf_str * str = &((struct gguf_str *) kv->value.arr.data)[i]; + return str->data; +} + +int gguf_get_arr_n(const struct gguf_context * ctx, int key_id) { + GGML_ASSERT(key_id >= 0 && key_id < gguf_get_n_kv(ctx)); + GGML_ASSERT(ctx->kv[key_id].type == GGUF_TYPE_ARRAY); + return ctx->kv[key_id].value.arr.n; +} + +uint8_t gguf_get_val_u8(const struct gguf_context * ctx, int key_id) { + GGML_ASSERT(key_id >= 0 && key_id < gguf_get_n_kv(ctx)); + GGML_ASSERT(ctx->kv[key_id].type == GGUF_TYPE_UINT8); + return ctx->kv[key_id].value.uint8; +} + +int8_t gguf_get_val_i8(const struct gguf_context * ctx, int key_id) { + GGML_ASSERT(key_id >= 0 && key_id < gguf_get_n_kv(ctx)); + GGML_ASSERT(ctx->kv[key_id].type == GGUF_TYPE_INT8); + return ctx->kv[key_id].value.int8; +} + +uint16_t gguf_get_val_u16(const struct gguf_context * ctx, int key_id) { + GGML_ASSERT(key_id >= 0 && key_id < gguf_get_n_kv(ctx)); + GGML_ASSERT(ctx->kv[key_id].type == GGUF_TYPE_UINT16); + return ctx->kv[key_id].value.uint16; +} + +int16_t gguf_get_val_i16(const struct gguf_context * ctx, int key_id) { + GGML_ASSERT(key_id >= 0 && key_id < gguf_get_n_kv(ctx)); + GGML_ASSERT(ctx->kv[key_id].type == GGUF_TYPE_INT16); + return ctx->kv[key_id].value.int16; +} + +uint32_t gguf_get_val_u32(const struct gguf_context * ctx, int key_id) { + GGML_ASSERT(key_id >= 0 && key_id < gguf_get_n_kv(ctx)); + GGML_ASSERT(ctx->kv[key_id].type == GGUF_TYPE_UINT32); + return ctx->kv[key_id].value.uint32; +} + +int32_t gguf_get_val_i32(const struct gguf_context * ctx, int key_id) { + GGML_ASSERT(key_id >= 0 && key_id < gguf_get_n_kv(ctx)); + GGML_ASSERT(ctx->kv[key_id].type == GGUF_TYPE_INT32); + return ctx->kv[key_id].value.int32; +} + +float gguf_get_val_f32(const struct gguf_context * ctx, int key_id) { + GGML_ASSERT(key_id >= 0 && key_id < gguf_get_n_kv(ctx)); + GGML_ASSERT(ctx->kv[key_id].type == GGUF_TYPE_FLOAT32); + return ctx->kv[key_id].value.float32; +} + +uint64_t gguf_get_val_u64(const struct gguf_context * ctx, int key_id) { + GGML_ASSERT(key_id >= 0 && key_id < gguf_get_n_kv(ctx)); + GGML_ASSERT(ctx->kv[key_id].type == GGUF_TYPE_UINT64); + return ctx->kv[key_id].value.uint64; +} + +int64_t gguf_get_val_i64(const struct gguf_context * ctx, int key_id) { + GGML_ASSERT(key_id >= 0 && key_id < gguf_get_n_kv(ctx)); + GGML_ASSERT(ctx->kv[key_id].type == GGUF_TYPE_INT64); + return ctx->kv[key_id].value.int64; +} + +double gguf_get_val_f64(const struct gguf_context * ctx, int key_id) { + GGML_ASSERT(key_id >= 0 && key_id < gguf_get_n_kv(ctx)); + GGML_ASSERT(ctx->kv[key_id].type == GGUF_TYPE_FLOAT64); + return ctx->kv[key_id].value.float64; +} + +bool gguf_get_val_bool(const struct gguf_context * ctx, int key_id) { + GGML_ASSERT(key_id >= 0 && key_id < gguf_get_n_kv(ctx)); + GGML_ASSERT(ctx->kv[key_id].type == GGUF_TYPE_BOOL); + return ctx->kv[key_id].value.bool_; +} + +const char * gguf_get_val_str(const struct gguf_context * ctx, int key_id) { + GGML_ASSERT(key_id >= 0 && key_id < gguf_get_n_kv(ctx)); + GGML_ASSERT(ctx->kv[key_id].type == GGUF_TYPE_STRING); + return ctx->kv[key_id].value.str.data; +} + +const void * gguf_get_val_data(const struct gguf_context * ctx, int key_id) { + GGML_ASSERT(key_id >= 0 && key_id < gguf_get_n_kv(ctx)); + GGML_ASSERT(ctx->kv[key_id].type != GGUF_TYPE_ARRAY); + GGML_ASSERT(ctx->kv[key_id].type != GGUF_TYPE_STRING); + return &ctx->kv[key_id].value; +} + +int gguf_get_n_tensors(const struct gguf_context * ctx) { + return ctx->header.n_tensors; +} + +int gguf_find_tensor(const struct gguf_context * ctx, const char * name) { + // return -1 if tensor not found + int tensorfound = -1; + + const int n_tensors = gguf_get_n_tensors(ctx); + + for (int i = 0; i < n_tensors; ++i) { + if (strcmp(name, gguf_get_tensor_name(ctx, i)) == 0) { + tensorfound = i; + break; + } + } + + return tensorfound; +} + +size_t gguf_get_tensor_offset(const struct gguf_context * ctx, int i) { + return ctx->infos[i].offset; +} + +char * gguf_get_tensor_name(const struct gguf_context * ctx, int i) { + return ctx->infos[i].name.data; +} + +enum ggml_type gguf_get_tensor_type(const struct gguf_context * ctx, int i) { + return ctx->infos[i].type; +} + +// returns the index +static int gguf_get_or_add_key(struct gguf_context * ctx, const char * key) { + const int idx = gguf_find_key(ctx, key); + if (idx >= 0) { + return idx; + } + + const int n_kv = gguf_get_n_kv(ctx); + + ctx->kv = realloc(ctx->kv, (n_kv + 1) * sizeof(struct gguf_kv)); + ctx->kv[n_kv].key.n = strlen(key); + ctx->kv[n_kv].key.data = strdup(key); + ctx->header.n_kv++; + + return n_kv; +} + +void gguf_remove_key(struct gguf_context * ctx, const char * key) { + const int idx = gguf_find_key(ctx, key); + if (idx >= 0) { + const int n_kv = gguf_get_n_kv(ctx); + gguf_free_kv(&ctx->kv[idx]); + for (int i = idx; i < n_kv-1; ++i) { + ctx->kv[i] = ctx->kv[i+1]; + } + ctx->kv = realloc(ctx->kv, (n_kv - 1) * sizeof(struct gguf_kv)); + ctx->header.n_kv--; + } +} + +void gguf_set_val_u8(struct gguf_context * ctx, const char * key, uint8_t val) { + const int idx = gguf_get_or_add_key(ctx, key); + + ctx->kv[idx].type = GGUF_TYPE_UINT8; + ctx->kv[idx].value.uint8 = val; +} + +void gguf_set_val_i8(struct gguf_context * ctx, const char * key, int8_t val) { + const int idx = gguf_get_or_add_key(ctx, key); + + ctx->kv[idx].type = GGUF_TYPE_INT8; + ctx->kv[idx].value.int8 = val; +} + +void gguf_set_val_u16(struct gguf_context * ctx, const char * key, uint16_t val) { + const int idx = gguf_get_or_add_key(ctx, key); + + ctx->kv[idx].type = GGUF_TYPE_UINT16; + ctx->kv[idx].value.uint16 = val; +} + +void gguf_set_val_i16(struct gguf_context * ctx, const char * key, int16_t val) { + const int idx = gguf_get_or_add_key(ctx, key); + + ctx->kv[idx].type = GGUF_TYPE_INT16; + ctx->kv[idx].value.int16 = val; +} + +void gguf_set_val_u32(struct gguf_context * ctx, const char * key, uint32_t val) { + const int idx = gguf_get_or_add_key(ctx, key); + + ctx->kv[idx].type = GGUF_TYPE_UINT32; + ctx->kv[idx].value.uint32 = val; +} + +void gguf_set_val_i32(struct gguf_context * ctx, const char * key, int32_t val) { + const int idx = gguf_get_or_add_key(ctx, key); + + ctx->kv[idx].type = GGUF_TYPE_INT32; + ctx->kv[idx].value.int32 = val; +} + +void gguf_set_val_f32(struct gguf_context * ctx, const char * key, float val) { + const int idx = gguf_get_or_add_key(ctx, key); + + ctx->kv[idx].type = GGUF_TYPE_FLOAT32; + ctx->kv[idx].value.float32 = val; +} + +void gguf_set_val_u64(struct gguf_context * ctx, const char * key, uint64_t val) { + const int idx = gguf_get_or_add_key(ctx, key); + + ctx->kv[idx].type = GGUF_TYPE_UINT64; + ctx->kv[idx].value.uint64 = val; +} + +void gguf_set_val_i64(struct gguf_context * ctx, const char * key, int64_t val) { + const int idx = gguf_get_or_add_key(ctx, key); + + ctx->kv[idx].type = GGUF_TYPE_INT64; + ctx->kv[idx].value.int64 = val; +} + +void gguf_set_val_f64(struct gguf_context * ctx, const char * key, double val) { + const int idx = gguf_get_or_add_key(ctx, key); + + ctx->kv[idx].type = GGUF_TYPE_FLOAT64; + ctx->kv[idx].value.float64 = val; +} + +void gguf_set_val_bool(struct gguf_context * ctx, const char * key, bool val) { + const int idx = gguf_get_or_add_key(ctx, key); + + ctx->kv[idx].type = GGUF_TYPE_BOOL; + ctx->kv[idx].value.bool_ = val; +} + +void gguf_set_val_str(struct gguf_context * ctx, const char * key, const char * val) { + const int idx = gguf_get_or_add_key(ctx, key); + + ctx->kv[idx].type = GGUF_TYPE_STRING; + ctx->kv[idx].value.str.n = strlen(val); + ctx->kv[idx].value.str.data = strdup(val); +} + +void gguf_set_arr_data(struct gguf_context * ctx, const char * key, enum gguf_type type, const void * data, int n) { + const int idx = gguf_get_or_add_key(ctx, key); + + ctx->kv[idx].type = GGUF_TYPE_ARRAY; + ctx->kv[idx].value.arr.type = type; + ctx->kv[idx].value.arr.n = n; + ctx->kv[idx].value.arr.data = GGML_CALLOC(n, gguf_type_size(type)); + memcpy(ctx->kv[idx].value.arr.data, data, n*gguf_type_size(type)); +} + +void gguf_set_arr_str(struct gguf_context * ctx, const char * key, const char ** data, int n) { + const int idx = gguf_get_or_add_key(ctx, key); + + ctx->kv[idx].type = GGUF_TYPE_ARRAY; + ctx->kv[idx].value.arr.type = GGUF_TYPE_STRING; + ctx->kv[idx].value.arr.n = n; + ctx->kv[idx].value.arr.data = GGML_CALLOC(n, sizeof(struct gguf_str)); + for (int i = 0; i < n; i++) { + struct gguf_str * str = &((struct gguf_str *)ctx->kv[idx].value.arr.data)[i]; + str->n = strlen(data[i]); + str->data = strdup(data[i]); + } +} + +// set or add KV pairs from another context +void gguf_set_kv(struct gguf_context * ctx, struct gguf_context * src) { + for (uint32_t i = 0; i < src->header.n_kv; i++) { + switch (src->kv[i].type) { + case GGUF_TYPE_UINT8: gguf_set_val_u8 (ctx, src->kv[i].key.data, src->kv[i].value.uint8); break; + case GGUF_TYPE_INT8: gguf_set_val_i8 (ctx, src->kv[i].key.data, src->kv[i].value.int8); break; + case GGUF_TYPE_UINT16: gguf_set_val_u16 (ctx, src->kv[i].key.data, src->kv[i].value.uint16); break; + case GGUF_TYPE_INT16: gguf_set_val_i16 (ctx, src->kv[i].key.data, src->kv[i].value.int16); break; + case GGUF_TYPE_UINT32: gguf_set_val_u32 (ctx, src->kv[i].key.data, src->kv[i].value.uint32); break; + case GGUF_TYPE_INT32: gguf_set_val_i32 (ctx, src->kv[i].key.data, src->kv[i].value.int32); break; + case GGUF_TYPE_FLOAT32: gguf_set_val_f32 (ctx, src->kv[i].key.data, src->kv[i].value.float32); break; + case GGUF_TYPE_UINT64: gguf_set_val_u64 (ctx, src->kv[i].key.data, src->kv[i].value.uint64); break; + case GGUF_TYPE_INT64: gguf_set_val_i64 (ctx, src->kv[i].key.data, src->kv[i].value.int64); break; + case GGUF_TYPE_FLOAT64: gguf_set_val_f64 (ctx, src->kv[i].key.data, src->kv[i].value.float64); break; + case GGUF_TYPE_BOOL: gguf_set_val_bool(ctx, src->kv[i].key.data, src->kv[i].value.bool_); break; + case GGUF_TYPE_STRING: gguf_set_val_str (ctx, src->kv[i].key.data, src->kv[i].value.str.data); break; + case GGUF_TYPE_ARRAY: + { + if (src->kv[i].value.arr.type == GGUF_TYPE_STRING) { + const char ** data = GGML_CALLOC(src->kv[i].value.arr.n, sizeof(char *)); + for (uint32_t j = 0; j < src->kv[i].value.arr.n; j++) { + data[j] = ((struct gguf_str *)src->kv[i].value.arr.data)[j].data; + } + gguf_set_arr_str(ctx, src->kv[i].key.data, data, src->kv[i].value.arr.n); + GGML_FREE((void *)data); + } else if (src->kv[i].value.arr.type == GGUF_TYPE_ARRAY) { + GGML_ABORT("nested arrays not supported"); + } else { + gguf_set_arr_data(ctx, src->kv[i].key.data, src->kv[i].value.arr.type, src->kv[i].value.arr.data, src->kv[i].value.arr.n); + } + } break; + default: GGML_ABORT("invalid type"); + } + } +} + +void gguf_add_tensor( + struct gguf_context * ctx, + const struct ggml_tensor * tensor) { + GGML_ASSERT(tensor); + if (gguf_find_tensor(ctx, tensor->name) != -1) { + GGML_ABORT("duplicated tensor name"); + } + + const int idx = ctx->header.n_tensors; + ctx->infos = realloc(ctx->infos, (idx + 1)*sizeof(struct gguf_tensor_info)); + + ctx->infos[idx].name.n = strlen(tensor->name); + ctx->infos[idx].name.data = strdup(tensor->name); + + for (int i = 0; i < GGML_MAX_DIMS; ++i) { + ctx->infos[idx].ne[i] = 1; + } + + ctx->infos[idx].n_dims = ggml_n_dims(tensor); + for (uint32_t i = 0; i < ctx->infos[idx].n_dims; i++) { + ctx->infos[idx].ne[i] = tensor->ne[i]; + } + + ctx->infos[idx].type = tensor->type; + ctx->infos[idx].offset = 0; + ctx->infos[idx].data = tensor->data; + ctx->infos[idx].size = ggml_nbytes(tensor); + + if (ctx->header.n_tensors > 0) { + ctx->infos[idx].offset = ctx->infos[idx - 1].offset + GGML_PAD(ctx->infos[idx - 1].size, ctx->alignment); + } + + ctx->header.n_tensors++; +} + +void gguf_set_tensor_type(struct gguf_context * ctx, const char * name, enum ggml_type type) { + const int idx = gguf_find_tensor(ctx, name); + if (idx < 0) { + GGML_ABORT("tensor not found"); + } + + ctx->infos[idx].type = type; +} + +void gguf_set_tensor_data(struct gguf_context * ctx, const char * name, const void * data, size_t size) { + const int idx = gguf_find_tensor(ctx, name); + if (idx < 0) { + GGML_ABORT("tensor not found"); + } + + ctx->infos[idx].data = data; + ctx->infos[idx].size = size; + + // update offsets + for (uint32_t i = idx + 1; i < ctx->header.n_tensors; ++i) { + ctx->infos[i].offset = ctx->infos[i - 1].offset + GGML_PAD(ctx->infos[i - 1].size, ctx->alignment); + } +} + +//static void gguf_fwrite_str(FILE * file, const struct gguf_str * val) { +// fwrite(&val->n, sizeof(val->n), 1, file); +// fwrite(val->data, sizeof(char), val->n, file); +//} +// +//static void gguf_fwrite_el(FILE * file, const void * val, size_t size) { +// fwrite(val, sizeof(char), size, file); +//} + +struct gguf_buf { + void * data; + size_t size; + size_t offset; +}; + +static struct gguf_buf gguf_buf_init(size_t size) { + struct gguf_buf buf = { + /*buf.data =*/ size == 0 ? NULL : GGML_CALLOC(1, size), + /*buf.size =*/ size, + /*buf.offset =*/ 0, + }; + + return buf; +} + +static void gguf_buf_free(struct gguf_buf buf) { + if (buf.data) { + GGML_FREE(buf.data); + } +} + +static void gguf_buf_grow(struct gguf_buf * buf, size_t size) { + if (buf->offset + size > buf->size) { + buf->size = 1.5*(buf->offset + size); + if (buf->data) { + buf->data = realloc(buf->data, buf->size); + } + } +} + +static void gguf_bwrite_str(struct gguf_buf * buf, const struct gguf_str * val) { + gguf_buf_grow(buf, sizeof(val->n) + val->n); + + if (buf->data) { + memcpy((char *) buf->data + buf->offset, &val->n, sizeof(val->n)); + } + buf->offset += sizeof(val->n); + + if (buf->data) { + memcpy((char *) buf->data + buf->offset, val->data, val->n); + } + buf->offset += val->n; +} + +static void gguf_bwrite_el(struct gguf_buf * buf, const void * val, size_t el_size) { + gguf_buf_grow(buf, el_size); + + if (buf->data) { + memcpy((char *) buf->data + buf->offset, val, el_size); + } + buf->offset += el_size; +} + +static void gguf_write_to_buf(const struct gguf_context * ctx, struct gguf_buf * buf, bool only_meta) { + // write header + gguf_bwrite_el(buf, &ctx->header.magic, sizeof(ctx->header.magic)); + gguf_bwrite_el(buf, &ctx->header.version, sizeof(ctx->header.version)); + gguf_bwrite_el(buf, &ctx->header.n_tensors, sizeof(ctx->header.n_tensors)); + gguf_bwrite_el(buf, &ctx->header.n_kv, sizeof(ctx->header.n_kv)); + + // write key-value pairs + for (uint32_t i = 0; i < ctx->header.n_kv; ++i) { + struct gguf_kv * kv = &ctx->kv[i]; + + gguf_bwrite_str(buf, &kv->key); + gguf_bwrite_el (buf, &kv->type, sizeof(kv->type)); + + switch (kv->type) { + case GGUF_TYPE_UINT8: gguf_bwrite_el( buf, &kv->value.uint8, sizeof(kv->value.uint8) ); break; + case GGUF_TYPE_INT8: gguf_bwrite_el (buf, &kv->value.int8, sizeof(kv->value.int8) ); break; + case GGUF_TYPE_UINT16: gguf_bwrite_el (buf, &kv->value.uint16, sizeof(kv->value.uint16) ); break; + case GGUF_TYPE_INT16: gguf_bwrite_el (buf, &kv->value.int16, sizeof(kv->value.int16) ); break; + case GGUF_TYPE_UINT32: gguf_bwrite_el (buf, &kv->value.uint32, sizeof(kv->value.uint32) ); break; + case GGUF_TYPE_INT32: gguf_bwrite_el (buf, &kv->value.int32, sizeof(kv->value.int32) ); break; + case GGUF_TYPE_FLOAT32: gguf_bwrite_el (buf, &kv->value.float32, sizeof(kv->value.float32)); break; + case GGUF_TYPE_UINT64: gguf_bwrite_el (buf, &kv->value.uint64, sizeof(kv->value.uint64) ); break; + case GGUF_TYPE_INT64: gguf_bwrite_el (buf, &kv->value.int64, sizeof(kv->value.int64) ); break; + case GGUF_TYPE_FLOAT64: gguf_bwrite_el (buf, &kv->value.float64, sizeof(kv->value.float64)); break; + case GGUF_TYPE_BOOL: gguf_bwrite_el (buf, &kv->value.bool_, sizeof(kv->value.bool_) ); break; + case GGUF_TYPE_STRING: gguf_bwrite_str(buf, &kv->value.str ); break; + case GGUF_TYPE_ARRAY: + { + gguf_bwrite_el(buf, &kv->value.arr.type, sizeof(kv->value.arr.type)); + gguf_bwrite_el(buf, &kv->value.arr.n, sizeof(kv->value.arr.n) ); + + switch (kv->value.arr.type) { + case GGUF_TYPE_UINT8: + case GGUF_TYPE_INT8: + case GGUF_TYPE_UINT16: + case GGUF_TYPE_INT16: + case GGUF_TYPE_UINT32: + case GGUF_TYPE_INT32: + case GGUF_TYPE_FLOAT32: + case GGUF_TYPE_UINT64: + case GGUF_TYPE_INT64: + case GGUF_TYPE_FLOAT64: + case GGUF_TYPE_BOOL: + { + gguf_bwrite_el(buf, kv->value.arr.data, kv->value.arr.n * gguf_type_size(kv->value.arr.type)); + } break; + case GGUF_TYPE_STRING: + { + for (uint32_t j = 0; j < kv->value.arr.n; ++j) { + gguf_bwrite_str(buf, &((struct gguf_str *) kv->value.arr.data)[j]); + } + } break; + case GGUF_TYPE_ARRAY: + default: GGML_ABORT("invalid type"); + } + } break; + default: GGML_ABORT("invalid type"); + } + } + + // write tensor infos + for (uint32_t i = 0; i < ctx->header.n_tensors; ++i) { + struct gguf_tensor_info * info = &ctx->infos[i]; + + gguf_bwrite_str(buf, &info->name); + gguf_bwrite_el (buf, &info->n_dims, sizeof(info->n_dims)); + for (uint32_t j = 0; j < info->n_dims; ++j) { + gguf_bwrite_el(buf, &info->ne[j], sizeof(info->ne[j])); + } + gguf_bwrite_el(buf, &info->type, sizeof(info->type)); + gguf_bwrite_el(buf, &info->offset, sizeof(info->offset)); + } + + // we require the data section to be aligned, so take into account any padding + { + const size_t offset = buf->offset; + const size_t offset_pad = GGML_PAD(offset, ctx->alignment); + + if (offset_pad != offset) { + uint8_t pad = 0; + for (size_t i = 0; i < offset_pad - offset; ++i) { + gguf_bwrite_el(buf, &pad, sizeof(pad)); + } + } + } + + if (only_meta) { + return; + } + + size_t offset = 0; + + // write tensor data + for (uint32_t i = 0; i < ctx->header.n_tensors; ++i) { + struct gguf_tensor_info * info = &ctx->infos[i]; + + const size_t size = info->size; + const size_t size_pad = GGML_PAD(size, ctx->alignment); + + gguf_bwrite_el(buf, info->data, size); + + if (size_pad != size) { + uint8_t pad = 0; + for (size_t j = 0; j < size_pad - size; ++j) { + gguf_bwrite_el(buf, &pad, sizeof(pad)); + } + } + + GGML_ASSERT(offset == info->offset); + + offset += size_pad; + } +} + +void gguf_write_to_file(const struct gguf_context * ctx, const char * fname, bool only_meta) { + FILE * file = ggml_fopen(fname, "wb"); + if (!file) { + GGML_ABORT("failed to open file for writing"); + } + + struct gguf_buf buf = gguf_buf_init(16*1024); + + gguf_write_to_buf(ctx, &buf, only_meta); + + fwrite(buf.data, 1, buf.offset, file); + + gguf_buf_free(buf); + + fclose(file); +} + +size_t gguf_get_meta_size(const struct gguf_context * ctx) { + // no allocs - only compute size + struct gguf_buf buf = gguf_buf_init(0); + + gguf_write_to_buf(ctx, &buf, true); + + return buf.offset; +} + +void gguf_get_meta_data(const struct gguf_context * ctx, void * data) { + struct gguf_buf buf = gguf_buf_init(16*1024); + + gguf_write_to_buf(ctx, &buf, true); + + memcpy(data, buf.data, buf.offset); + + gguf_buf_free(buf); +} + +//////////////////////////////////////////////////////////////////////////////// + +int ggml_cpu_has_avx(void) { +#if defined(__AVX__) + return 1; +#else + return 0; +#endif +} + +int ggml_cpu_has_avx_vnni(void) { +#if defined(__AVXVNNI__) + return 1; +#else + return 0; +#endif +} + +int ggml_cpu_has_avx2(void) { +#if defined(__AVX2__) + return 1; +#else + return 0; +#endif +} + +int ggml_cpu_has_avx512(void) { +#if defined(__AVX512F__) + return 1; +#else + return 0; +#endif +} + +int ggml_cpu_has_avx512_vbmi(void) { +#if defined(__AVX512VBMI__) + return 1; +#else + return 0; +#endif +} + +int ggml_cpu_has_avx512_vnni(void) { +#if defined(__AVX512VNNI__) + return 1; +#else + return 0; +#endif +} + +int ggml_cpu_has_avx512_bf16(void) { +#if defined(__AVX512BF16__) + return 1; +#else + return 0; +#endif +} + +int ggml_cpu_has_fma(void) { +#if defined(__FMA__) + return 1; +#else + return 0; +#endif +} + +int ggml_cpu_has_neon(void) { +#if defined(__ARM_ARCH) + return ggml_arm_arch_features.has_neon; +#else + return 0; +#endif +} + +int ggml_cpu_has_sve(void) { +#if defined(__ARM_ARCH) + return ggml_arm_arch_features.has_sve; +#else + return 0; +#endif +} + +int ggml_cpu_has_arm_fma(void) { +#if defined(__ARM_FEATURE_FMA) + return 1; +#else + return 0; +#endif +} + +int ggml_cpu_has_riscv_v(void) { +#if defined(__riscv_v_intrinsic) + return 1; +#else + return 0; +#endif +} + +int ggml_cpu_has_metal(void) { +#if defined(GGML_USE_METAL) + return 1; +#else + return 0; +#endif +} + +int ggml_cpu_has_f16c(void) { +#if defined(__F16C__) + return 1; +#else + return 0; +#endif +} + +int ggml_cpu_has_fp16_va(void) { +#if defined(__ARM_FEATURE_FP16_VECTOR_ARITHMETIC) + return 1; +#else + return 0; +#endif +} + +int ggml_cpu_has_wasm_simd(void) { +#if defined(__wasm_simd128__) + return 1; +#else + return 0; +#endif +} + +int ggml_cpu_has_blas(void) { +#if defined(GGML_USE_BLAS) || defined(GGML_USE_CUDA) || defined(GGML_USE_VULKAN) || defined(GGML_USE_SYCL) + return 1; +#else + return 0; +#endif +} + +int ggml_cpu_has_cuda(void) { +#if defined(GGML_USE_CUDA) + return 1; +#else + return 0; +#endif +} + +int ggml_cpu_has_vulkan(void) { +#if defined(GGML_USE_VULKAN) + return 1; +#else + return 0; +#endif +} + +int ggml_cpu_has_kompute(void) { +#if defined(GGML_USE_KOMPUTE) + return 1; +#else + return 0; +#endif +} + +int ggml_cpu_has_sycl(void) { +#if defined(GGML_USE_SYCL) + return 1; +#else + return 0; +#endif +} + +int ggml_cpu_has_rpc(void) { +#if defined(GGML_USE_RPC) + return 1; +#else + return 0; +#endif +} + +int ggml_cpu_has_cann(void) { +#if defined(GGML_USE_CANN) + return 1; +#else + return 0; +#endif +} + +int ggml_cpu_has_llamafile(void) { +#if defined(GGML_USE_LLAMAFILE) + return 1; +#else + return 0; +#endif +} + +int ggml_cpu_has_gpublas(void) { + return ggml_cpu_has_cuda() || ggml_cpu_has_vulkan() || ggml_cpu_has_kompute() || ggml_cpu_has_sycl(); +} + +int ggml_cpu_has_sse3(void) { +#if defined(__SSE3__) + return 1; +#else + return 0; +#endif +} + +int ggml_cpu_has_ssse3(void) { +#if defined(__SSSE3__) + return 1; +#else + return 0; +#endif +} + +int ggml_cpu_has_vsx(void) { +#if defined(__POWER9_VECTOR__) + return 1; +#else + return 0; +#endif +} + +int ggml_cpu_has_matmul_int8(void) { +#if defined(__ARM_ARCH) + return ggml_arm_arch_features.has_i8mm; +#else + return 0; +#endif +} + +int ggml_cpu_get_sve_cnt(void) { +#if defined(__ARM_ARCH) + return ggml_arm_arch_features.sve_cnt; +#else + return 0; +#endif +} +//////////////////////////////////////////////////////////////////////////////// diff --git a/ml/backend/ggml/ggml.h b/ml/backend/ggml/ggml.h new file mode 100644 index 000000000..73deed077 --- /dev/null +++ b/ml/backend/ggml/ggml.h @@ -0,0 +1,2595 @@ +/** + * llama.cpp - commit 3f1ae2e32cde00c39b96be6d01c2997c29bae555 - do not edit this file + * + * MIT License + * + * Copyright (c) 2023-2024 The ggml authors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +#pragma once + +// +// GGML Tensor Library +// +// This documentation is still a work in progress. +// If you wish some specific topics to be covered, feel free to drop a comment: +// +// https://github.com/ggerganov/whisper.cpp/issues/40 +// +// ## Overview +// +// This library implements: +// +// - a set of tensor operations +// - automatic differentiation +// - basic optimization algorithms +// +// The aim of this library is to provide a minimalistic approach for various machine learning tasks. This includes, +// but is not limited to, the following: +// +// - linear regression +// - support vector machines +// - neural networks +// +// The library allows the user to define a certain function using the available tensor operations. This function +// definition is represented internally via a computation graph. Each tensor operation in the function definition +// corresponds to a node in the graph. Having the computation graph defined, the user can choose to compute the +// function's value and/or its gradient with respect to the input variables. Optionally, the function can be optimized +// using one of the available optimization algorithms. +// +// For example, here we define the function: f(x) = a*x^2 + b +// +// { +// struct ggml_init_params params = { +// .mem_size = 16*1024*1024, +// .mem_buffer = NULL, +// }; +// +// // memory allocation happens here +// struct ggml_context * ctx = ggml_init(params); +// +// struct ggml_tensor * x = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, 1); +// +// ggml_set_param(ctx, x); // x is an input variable +// +// struct ggml_tensor * a = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, 1); +// struct ggml_tensor * b = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, 1); +// struct ggml_tensor * x2 = ggml_mul(ctx, x, x); +// struct ggml_tensor * f = ggml_add(ctx, ggml_mul(ctx, a, x2), b); +// +// ... +// } +// +// Notice that the function definition above does not involve any actual computation. The computation is performed only +// when the user explicitly requests it. For example, to compute the function's value at x = 2.0: +// +// { +// ... +// +// struct ggml_cgraph * gf = ggml_new_graph(ctx); +// ggml_build_forward_expand(gf, f); +// +// // set the input variable and parameter values +// ggml_set_f32(x, 2.0f); +// ggml_set_f32(a, 3.0f); +// ggml_set_f32(b, 4.0f); +// +// ggml_graph_compute_with_ctx(ctx, &gf, n_threads); +// +// printf("f = %f\n", ggml_get_f32_1d(f, 0)); +// +// ... +// } +// +// The actual computation is performed in the ggml_graph_compute() function. +// +// The ggml_new_tensor_...() functions create new tensors. They are allocated in the memory buffer provided to the +// ggml_init() function. You have to be careful not to exceed the memory buffer size. Therefore, you have to know +// in advance how much memory you need for your computation. Alternatively, you can allocate a large enough memory +// and after defining the computation graph, call the ggml_used_mem() function to find out how much memory was +// actually needed. +// +// The ggml_set_param() function marks a tensor as an input variable. This is used by the automatic +// differentiation and optimization algorithms. +// +// The described approach allows to define the function graph once and then compute its forward or backward graphs +// multiple times. All computations will use the same memory buffer allocated in the ggml_init() function. This way +// the user can avoid the memory allocation overhead at runtime. +// +// The library supports multi-dimensional tensors - up to 4 dimensions. The FP16 and FP32 data types are first class +// citizens, but in theory the library can be extended to support FP8 and integer data types. +// +// Each tensor operation produces a new tensor. Initially the library was envisioned to support only the use of unary +// and binary operations. Most of the available operations fall into one of these two categories. With time, it became +// clear that the library needs to support more complex operations. The way to support these operations is not clear +// yet, but a few examples are demonstrated in the following operations: +// +// - ggml_permute() +// - ggml_conv_1d_1s() +// - ggml_conv_1d_2s() +// +// For each tensor operator, the library implements a forward and backward computation function. The forward function +// computes the output tensor value given the input tensor values. The backward function computes the adjoint of the +// input tensors given the adjoint of the output tensor. For a detailed explanation of what this means, take a +// calculus class, or watch the following video: +// +// What is Automatic Differentiation? +// https://www.youtube.com/watch?v=wG_nF1awSSY +// +// +// ## Tensor data (struct ggml_tensor) +// +// The tensors are stored in memory via the ggml_tensor struct. The structure provides information about the size of +// the tensor, the data type, and the memory buffer where the tensor data is stored. Additionally, it contains +// pointers to the "source" tensors - i.e. the tensors that were used to compute the current tensor. For example: +// +// { +// struct ggml_tensor * c = ggml_add(ctx, a, b); +// +// assert(c->src[0] == a); +// assert(c->src[1] == b); +// } +// +// The multi-dimensional tensors are stored in row-major order. The ggml_tensor struct contains fields for the +// number of elements in each dimension ("ne") as well as the number of bytes ("nb", a.k.a. stride). This allows +// to store tensors that are not contiguous in memory, which is useful for operations such as transposition and +// permutation. All tensor operations have to take the stride into account and not assume that the tensor is +// contiguous in memory. +// +// The data of the tensor is accessed via the "data" pointer. For example: +// +// { +// const int nx = 2; +// const int ny = 3; +// +// struct ggml_tensor * a = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, nx, ny); +// +// for (int y = 0; y < ny; y++) { +// for (int x = 0; x < nx; x++) { +// *(float *) ((char *) a->data + y*a->nb[1] + x*a->nb[0]) = x + y; +// } +// } +// +// ... +// } +// +// Alternatively, there are helper functions, such as ggml_get_f32_1d() and ggml_set_f32_1d() that can be used. +// +// ## The matrix multiplication operator (ggml_mul_mat) +// +// TODO +// +// +// ## Multi-threading +// +// TODO +// +// +// ## Overview of ggml.c +// +// TODO +// +// +// ## SIMD optimizations +// +// TODO +// +// +// ## Debugging ggml +// +// TODO +// +// + +#ifdef GGML_SHARED +# if defined(_WIN32) && !defined(__MINGW32__) +# ifdef GGML_BUILD +# define GGML_API __declspec(dllexport) +# else +# define GGML_API __declspec(dllimport) +# endif +# else +# define GGML_API __attribute__ ((visibility ("default"))) +# endif +#else +# define GGML_API +#endif + +#ifdef GGML_MULTIPLATFORM +# if defined(_WIN32) +# define GGML_CALL +# else +# define GGML_CALL __attribute__((__ms_abi__)) +# endif +#else +# define GGML_CALL +#endif + +// TODO: support for clang +#ifdef __GNUC__ +# define GGML_DEPRECATED(func, hint) func __attribute__((deprecated(hint))) +#elif defined(_MSC_VER) +# define GGML_DEPRECATED(func, hint) __declspec(deprecated(hint)) func +#else +# define GGML_DEPRECATED(func, hint) func +#endif + +#ifndef __GNUC__ +# define GGML_ATTRIBUTE_FORMAT(...) +#elif defined(__MINGW32__) +# define GGML_ATTRIBUTE_FORMAT(...) __attribute__((format(gnu_printf, __VA_ARGS__))) +#else +# define GGML_ATTRIBUTE_FORMAT(...) __attribute__((format(printf, __VA_ARGS__))) +#endif + +#include +#include +#include +#include + +#define GGML_FILE_MAGIC 0x67676d6c // "ggml" +#define GGML_FILE_VERSION 2 + +#define GGML_QNT_VERSION 2 // bump this on quantization format changes +#define GGML_QNT_VERSION_FACTOR 1000 // do not change this + +#define GGML_MAX_DIMS 4 +#define GGML_MAX_PARAMS 2048 +#define GGML_MAX_CONTEXTS 64 +#define GGML_MAX_SRC 10 +#define GGML_MAX_N_THREADS 512 +#define GGML_MAX_OP_PARAMS 64 + +#ifndef GGML_MAX_NAME +# define GGML_MAX_NAME 64 +#endif + +#define GGML_DEFAULT_N_THREADS 4 +#define GGML_DEFAULT_GRAPH_SIZE 2048 + +#if UINTPTR_MAX == 0xFFFFFFFF + #define GGML_MEM_ALIGN 4 +#else + #define GGML_MEM_ALIGN 16 +#endif + +#define GGML_EXIT_SUCCESS 0 +#define GGML_EXIT_ABORTED 1 + +#define GGML_ROPE_TYPE_NEOX 2 + +#define GGUF_MAGIC "GGUF" + +#define GGUF_VERSION 3 + +#define GGUF_DEFAULT_ALIGNMENT 32 + +#define GGML_UNUSED(x) (void)(x) + +#define GGML_PAD(x, n) (((x) + (n) - 1) & ~((n) - 1)) + +#ifndef NDEBUG +# define GGML_UNREACHABLE() do { fprintf(stderr, "statement should be unreachable\n"); abort(); } while(0) +#elif defined(__GNUC__) +# define GGML_UNREACHABLE() __builtin_unreachable() +#elif defined(_MSC_VER) +# define GGML_UNREACHABLE() __assume(0) +#else +# define GGML_UNREACHABLE() ((void) 0) +#endif + +#ifdef __cplusplus +# define GGML_NORETURN [[noreturn]] +#elif defined(_MSC_VER) +# define GGML_NORETURN __declspec(noreturn) +#else +# define GGML_NORETURN _Noreturn +#endif + +#define GGML_ABORT(...) ggml_abort(__FILE__, __LINE__, __VA_ARGS__) +#define GGML_ASSERT(x) if (!(x)) GGML_ABORT("GGML_ASSERT(%s) failed", #x) + +// used to copy the number of elements and stride in bytes of tensors into local variables. +// main purpose is to reduce code duplication and improve readability. +// +// example: +// +// GGML_TENSOR_LOCALS(int64_t, ne1, src1, ne); +// GGML_TENSOR_LOCALS(size_t, nb1, src1, nb); +// +#define GGML_TENSOR_LOCALS_1(type, prefix, pointer, array) \ + const type prefix##0 = (pointer)->array[0]; \ + GGML_UNUSED(prefix##0); +#define GGML_TENSOR_LOCALS_2(type, prefix, pointer, array) \ + GGML_TENSOR_LOCALS_1 (type, prefix, pointer, array) \ + const type prefix##1 = (pointer)->array[1]; \ + GGML_UNUSED(prefix##1); +#define GGML_TENSOR_LOCALS_3(type, prefix, pointer, array) \ + GGML_TENSOR_LOCALS_2 (type, prefix, pointer, array) \ + const type prefix##2 = (pointer)->array[2]; \ + GGML_UNUSED(prefix##2); +#define GGML_TENSOR_LOCALS(type, prefix, pointer, array) \ + GGML_TENSOR_LOCALS_3 (type, prefix, pointer, array) \ + const type prefix##3 = (pointer)->array[3]; \ + GGML_UNUSED(prefix##3); + +#define GGML_TENSOR_UNARY_OP_LOCALS \ + GGML_TENSOR_LOCALS(int64_t, ne0, src0, ne) \ + GGML_TENSOR_LOCALS(size_t, nb0, src0, nb) \ + GGML_TENSOR_LOCALS(int64_t, ne, dst, ne) \ + GGML_TENSOR_LOCALS(size_t, nb, dst, nb) + +#define GGML_TENSOR_BINARY_OP_LOCALS \ + GGML_TENSOR_LOCALS(int64_t, ne0, src0, ne) \ + GGML_TENSOR_LOCALS(size_t, nb0, src0, nb) \ + GGML_TENSOR_LOCALS(int64_t, ne1, src1, ne) \ + GGML_TENSOR_LOCALS(size_t, nb1, src1, nb) \ + GGML_TENSOR_LOCALS(int64_t, ne, dst, ne) \ + GGML_TENSOR_LOCALS(size_t, nb, dst, nb) + +#define GGML_TENSOR_BINARY_OP_LOCALS01 \ + GGML_TENSOR_LOCALS(int64_t, ne0, src0, ne) \ + GGML_TENSOR_LOCALS(size_t, nb0, src0, nb) \ + GGML_TENSOR_LOCALS(int64_t, ne1, src1, ne) \ + GGML_TENSOR_LOCALS(size_t, nb1, src1, nb) + +#ifdef __cplusplus +extern "C" { +#endif + + GGML_NORETURN GGML_ATTRIBUTE_FORMAT(3, 4) + GGML_API void ggml_abort(const char * file, int line, const char * fmt, ...); + + enum ggml_status { + GGML_STATUS_ALLOC_FAILED = -2, + GGML_STATUS_FAILED = -1, + GGML_STATUS_SUCCESS = 0, + GGML_STATUS_ABORTED = 1, + }; + + // get ggml_status name string + GGML_API GGML_CALL const char * ggml_status_to_string(enum ggml_status status); + + // ieee 754-2008 half-precision float16 + // todo: make this not an integral type + typedef uint16_t ggml_fp16_t; + GGML_API float ggml_fp16_to_fp32(ggml_fp16_t); + GGML_API ggml_fp16_t ggml_fp32_to_fp16(float); + GGML_API void ggml_fp16_to_fp32_row(const ggml_fp16_t *, float *, int64_t); + GGML_API void ggml_fp32_to_fp16_row(const float *, ggml_fp16_t *, int64_t); + + // google brain half-precision bfloat16 + typedef struct { uint16_t bits; } ggml_bf16_t; + GGML_API ggml_bf16_t ggml_fp32_to_bf16(float); + GGML_API float ggml_bf16_to_fp32(ggml_bf16_t); // consider just doing << 16 + GGML_API void ggml_bf16_to_fp32_row(const ggml_bf16_t *, float *, int64_t); + GGML_API void ggml_fp32_to_bf16_row_ref(const float *, ggml_bf16_t *, int64_t); + GGML_API void ggml_fp32_to_bf16_row(const float *, ggml_bf16_t *, int64_t); + + struct ggml_object; + struct ggml_context; + struct ggml_cgraph; + + // NOTE: always add types at the end of the enum to keep backward compatibility + enum ggml_type { + GGML_TYPE_F32 = 0, + GGML_TYPE_F16 = 1, + GGML_TYPE_Q4_0 = 2, + GGML_TYPE_Q4_1 = 3, + // GGML_TYPE_Q4_2 = 4, support has been removed + // GGML_TYPE_Q4_3 = 5, support has been removed + GGML_TYPE_Q5_0 = 6, + GGML_TYPE_Q5_1 = 7, + GGML_TYPE_Q8_0 = 8, + GGML_TYPE_Q8_1 = 9, + GGML_TYPE_Q2_K = 10, + GGML_TYPE_Q3_K = 11, + GGML_TYPE_Q4_K = 12, + GGML_TYPE_Q5_K = 13, + GGML_TYPE_Q6_K = 14, + GGML_TYPE_Q8_K = 15, + GGML_TYPE_IQ2_XXS = 16, + GGML_TYPE_IQ2_XS = 17, + GGML_TYPE_IQ3_XXS = 18, + GGML_TYPE_IQ1_S = 19, + GGML_TYPE_IQ4_NL = 20, + GGML_TYPE_IQ3_S = 21, + GGML_TYPE_IQ2_S = 22, + GGML_TYPE_IQ4_XS = 23, + GGML_TYPE_I8 = 24, + GGML_TYPE_I16 = 25, + GGML_TYPE_I32 = 26, + GGML_TYPE_I64 = 27, + GGML_TYPE_F64 = 28, + GGML_TYPE_IQ1_M = 29, + GGML_TYPE_BF16 = 30, + GGML_TYPE_Q4_0_4_4 = 31, + GGML_TYPE_Q4_0_4_8 = 32, + GGML_TYPE_Q4_0_8_8 = 33, + GGML_TYPE_TQ1_0 = 34, + GGML_TYPE_TQ2_0 = 35, + GGML_TYPE_COUNT, + }; + + // precision + enum ggml_prec { + GGML_PREC_DEFAULT, + GGML_PREC_F32, + }; + + enum ggml_backend_type { + GGML_BACKEND_TYPE_CPU = 0, + GGML_BACKEND_TYPE_GPU = 10, + GGML_BACKEND_TYPE_GPU_SPLIT = 20, + }; + + // model file types + enum ggml_ftype { + GGML_FTYPE_UNKNOWN = -1, + GGML_FTYPE_ALL_F32 = 0, + GGML_FTYPE_MOSTLY_F16 = 1, // except 1d tensors + GGML_FTYPE_MOSTLY_Q4_0 = 2, // except 1d tensors + GGML_FTYPE_MOSTLY_Q4_1 = 3, // except 1d tensors + GGML_FTYPE_MOSTLY_Q4_1_SOME_F16 = 4, // tok_embeddings.weight and output.weight are F16 + GGML_FTYPE_MOSTLY_Q8_0 = 7, // except 1d tensors + GGML_FTYPE_MOSTLY_Q5_0 = 8, // except 1d tensors + GGML_FTYPE_MOSTLY_Q5_1 = 9, // except 1d tensors + GGML_FTYPE_MOSTLY_Q2_K = 10, // except 1d tensors + GGML_FTYPE_MOSTLY_Q3_K = 11, // except 1d tensors + GGML_FTYPE_MOSTLY_Q4_K = 12, // except 1d tensors + GGML_FTYPE_MOSTLY_Q5_K = 13, // except 1d tensors + GGML_FTYPE_MOSTLY_Q6_K = 14, // except 1d tensors + GGML_FTYPE_MOSTLY_IQ2_XXS = 15, // except 1d tensors + GGML_FTYPE_MOSTLY_IQ2_XS = 16, // except 1d tensors + GGML_FTYPE_MOSTLY_IQ3_XXS = 17, // except 1d tensors + GGML_FTYPE_MOSTLY_IQ1_S = 18, // except 1d tensors + GGML_FTYPE_MOSTLY_IQ4_NL = 19, // except 1d tensors + GGML_FTYPE_MOSTLY_IQ3_S = 20, // except 1d tensors + GGML_FTYPE_MOSTLY_IQ2_S = 21, // except 1d tensors + GGML_FTYPE_MOSTLY_IQ4_XS = 22, // except 1d tensors + GGML_FTYPE_MOSTLY_IQ1_M = 23, // except 1d tensors + GGML_FTYPE_MOSTLY_BF16 = 24, // except 1d tensors + GGML_FTYPE_MOSTLY_Q4_0_4_4 = 25, // except 1d tensors + GGML_FTYPE_MOSTLY_Q4_0_4_8 = 26, // except 1d tensors + GGML_FTYPE_MOSTLY_Q4_0_8_8 = 27, // except 1d tensors + }; + + // available tensor operations: + enum ggml_op { + GGML_OP_NONE = 0, + + GGML_OP_DUP, + GGML_OP_ADD, + GGML_OP_ADD1, + GGML_OP_ACC, + GGML_OP_SUB, + GGML_OP_MUL, + GGML_OP_DIV, + GGML_OP_SQR, + GGML_OP_SQRT, + GGML_OP_LOG, + GGML_OP_SIN, + GGML_OP_COS, + GGML_OP_SUM, + GGML_OP_SUM_ROWS, + GGML_OP_MEAN, + GGML_OP_ARGMAX, + GGML_OP_REPEAT, + GGML_OP_REPEAT_BACK, + GGML_OP_CONCAT, + GGML_OP_SILU_BACK, + GGML_OP_NORM, // normalize + GGML_OP_RMS_NORM, + GGML_OP_RMS_NORM_BACK, + GGML_OP_GROUP_NORM, + + GGML_OP_MUL_MAT, + GGML_OP_MUL_MAT_ID, + GGML_OP_OUT_PROD, + + GGML_OP_SCALE, + GGML_OP_SET, + GGML_OP_CPY, + GGML_OP_CONT, + GGML_OP_RESHAPE, + GGML_OP_VIEW, + GGML_OP_PERMUTE, + GGML_OP_TRANSPOSE, + GGML_OP_GET_ROWS, + GGML_OP_GET_ROWS_BACK, + GGML_OP_DIAG, + GGML_OP_DIAG_MASK_INF, + GGML_OP_DIAG_MASK_ZERO, + GGML_OP_SOFT_MAX, + GGML_OP_SOFT_MAX_BACK, + GGML_OP_ROPE, + GGML_OP_ROPE_BACK, + GGML_OP_CLAMP, + GGML_OP_CONV_TRANSPOSE_1D, + GGML_OP_IM2COL, + GGML_OP_IM2COL_BACK, + GGML_OP_CONV_TRANSPOSE_2D, + GGML_OP_POOL_1D, + GGML_OP_POOL_2D, + GGML_OP_POOL_2D_BACK, + GGML_OP_UPSCALE, // nearest interpolate + GGML_OP_PAD, + GGML_OP_UNPAD, + GGML_OP_ARANGE, + GGML_OP_TIMESTEP_EMBEDDING, + GGML_OP_ARGSORT, + GGML_OP_LEAKY_RELU, + + GGML_OP_FLASH_ATTN_EXT, + GGML_OP_FLASH_ATTN_BACK, + GGML_OP_SSM_CONV, + GGML_OP_SSM_SCAN, + GGML_OP_WIN_PART, + GGML_OP_WIN_UNPART, + GGML_OP_GET_REL_POS, + GGML_OP_ADD_REL_POS, + GGML_OP_RWKV_WKV, + + GGML_OP_UNARY, + + GGML_OP_MAP_UNARY, + GGML_OP_MAP_BINARY, + + GGML_OP_MAP_CUSTOM1_F32, + GGML_OP_MAP_CUSTOM2_F32, + GGML_OP_MAP_CUSTOM3_F32, + + GGML_OP_MAP_CUSTOM1, + GGML_OP_MAP_CUSTOM2, + GGML_OP_MAP_CUSTOM3, + + GGML_OP_CROSS_ENTROPY_LOSS, + GGML_OP_CROSS_ENTROPY_LOSS_BACK, + GGML_OP_OPT_STEP_ADAMW, + + GGML_OP_COUNT, + }; + + enum ggml_unary_op { + GGML_UNARY_OP_ABS, + GGML_UNARY_OP_SGN, + GGML_UNARY_OP_NEG, + GGML_UNARY_OP_STEP, + GGML_UNARY_OP_TANH, + GGML_UNARY_OP_ELU, + GGML_UNARY_OP_RELU, + GGML_UNARY_OP_SIGMOID, + GGML_UNARY_OP_GELU, + GGML_UNARY_OP_GELU_QUICK, + GGML_UNARY_OP_SILU, + GGML_UNARY_OP_HARDSWISH, + GGML_UNARY_OP_HARDSIGMOID, + GGML_UNARY_OP_EXP, + + GGML_UNARY_OP_COUNT, + }; + + enum ggml_object_type { + GGML_OBJECT_TYPE_TENSOR, + GGML_OBJECT_TYPE_GRAPH, + GGML_OBJECT_TYPE_WORK_BUFFER + }; + + enum ggml_log_level { + GGML_LOG_LEVEL_NONE = 0, + GGML_LOG_LEVEL_INFO = 1, + GGML_LOG_LEVEL_WARN = 2, + GGML_LOG_LEVEL_ERROR = 3, + GGML_LOG_LEVEL_DEBUG = 4, + GGML_LOG_LEVEL_CONT = 5, // continue previous log + }; + + // this tensor... + enum ggml_tensor_flag { + GGML_TENSOR_FLAG_INPUT = 1, // ...is an input for the GGML compute graph + GGML_TENSOR_FLAG_OUTPUT = 2, // ...is an output for the GGML compute graph + GGML_TENSOR_FLAG_PARAM = 4, // ...contains trainable parameters + GGML_TENSOR_FLAG_LOSS = 8, // ...defines loss for numerical optimization (multiple loss tensors add up) + }; + + // n-dimensional tensor + struct ggml_tensor { + enum ggml_type type; + + GGML_DEPRECATED(enum ggml_backend_type backend, "use the buffer type to find the storage location of the tensor"); + + struct ggml_backend_buffer * buffer; + + int64_t ne[GGML_MAX_DIMS]; // number of elements + size_t nb[GGML_MAX_DIMS]; // stride in bytes: + // nb[0] = ggml_type_size(type) + // nb[1] = nb[0] * (ne[0] / ggml_blck_size(type)) + padding + // nb[i] = nb[i-1] * ne[i-1] + + // compute data + enum ggml_op op; + + // op params - allocated as int32_t for alignment + int32_t op_params[GGML_MAX_OP_PARAMS / sizeof(int32_t)]; + + int32_t flags; + + struct ggml_tensor * grad; + struct ggml_tensor * src[GGML_MAX_SRC]; + + // source tensor and offset for views + struct ggml_tensor * view_src; + size_t view_offs; + + void * data; + + char name[GGML_MAX_NAME]; + + void * extra; // extra things e.g. for ggml-cuda.cu + + // char padding[4]; + }; + + static const size_t GGML_TENSOR_SIZE = sizeof(struct ggml_tensor); + + // Abort callback + // If not NULL, called before ggml computation + // If it returns true, the computation is aborted + typedef bool (*ggml_abort_callback)(void * data); + + // Scheduling priorities + enum ggml_sched_priority { + GGML_SCHED_PRIO_NORMAL, + GGML_SCHED_PRIO_MEDIUM, + GGML_SCHED_PRIO_HIGH, + GGML_SCHED_PRIO_REALTIME + }; + + // Threadpool params + // Use ggml_threadpool_params_default() or ggml_threadpool_params_init() to populate the defaults + struct ggml_threadpool_params { + bool cpumask[GGML_MAX_N_THREADS]; // mask of cpu cores (all-zeros means use default affinity settings) + int n_threads; // number of threads + enum ggml_sched_priority prio; // thread priority + uint32_t poll; // polling level (0 - no polling, 100 - aggressive polling) + bool strict_cpu; // strict cpu placement + bool paused; // start in paused state + }; + + struct ggml_threadpool; // forward declaration, see ggml.c + + typedef struct ggml_threadpool * ggml_threadpool_t; + + // the compute plan that needs to be prepared for ggml_graph_compute() + // since https://github.com/ggerganov/ggml/issues/287 + struct ggml_cplan { + size_t work_size; // size of work buffer, calculated by `ggml_graph_plan()` + uint8_t * work_data; // work buffer, to be allocated by caller before calling to `ggml_graph_compute()` + + int n_threads; + struct ggml_threadpool * threadpool; + + // abort ggml_graph_compute when true + ggml_abort_callback abort_callback; + void * abort_callback_data; + }; + + // scratch buffer + struct ggml_scratch { + size_t offs; + size_t size; + void * data; + }; + + struct ggml_init_params { + // memory pool + size_t mem_size; // bytes + void * mem_buffer; // if NULL, memory will be allocated internally + bool no_alloc; // don't allocate memory for the tensor data + }; + + // numa strategies + enum ggml_numa_strategy { + GGML_NUMA_STRATEGY_DISABLED = 0, + GGML_NUMA_STRATEGY_DISTRIBUTE = 1, + GGML_NUMA_STRATEGY_ISOLATE = 2, + GGML_NUMA_STRATEGY_NUMACTL = 3, + GGML_NUMA_STRATEGY_MIRROR = 4, + GGML_NUMA_STRATEGY_COUNT + }; + + // + // GUID + // + + // GUID types + typedef uint8_t ggml_guid[16]; + typedef ggml_guid * ggml_guid_t; + + GGML_API bool ggml_guid_matches(ggml_guid_t guid_a, ggml_guid_t guid_b); + + // misc + + GGML_API void ggml_time_init(void); // call this once at the beginning of the program + GGML_API int64_t ggml_time_ms(void); + GGML_API int64_t ggml_time_us(void); + GGML_API int64_t ggml_cycles(void); + GGML_API int64_t ggml_cycles_per_ms(void); + + // accepts a UTF-8 path, even on Windows + GGML_API FILE * ggml_fopen(const char * fname, const char * mode); + + GGML_API void ggml_numa_init(enum ggml_numa_strategy numa); // call once for better performance on NUMA systems + GGML_API bool ggml_is_numa(void); // true if init detected that system has >1 NUMA node + + GGML_API void ggml_print_object (const struct ggml_object * obj); + GGML_API void ggml_print_objects(const struct ggml_context * ctx); + + GGML_API GGML_CALL int64_t ggml_nelements (const struct ggml_tensor * tensor); + GGML_API GGML_CALL int64_t ggml_nrows (const struct ggml_tensor * tensor); + GGML_API GGML_CALL size_t ggml_nbytes (const struct ggml_tensor * tensor); + GGML_API size_t ggml_nbytes_pad (const struct ggml_tensor * tensor); // same as ggml_nbytes() but padded to GGML_MEM_ALIGN + + GGML_API GGML_CALL int64_t ggml_blck_size(enum ggml_type type); + GGML_API GGML_CALL size_t ggml_type_size(enum ggml_type type); // size in bytes for all elements in a block + GGML_API GGML_CALL size_t ggml_row_size (enum ggml_type type, int64_t ne); // size in bytes for all elements in a row + + GGML_DEPRECATED( + GGML_API double ggml_type_sizef(enum ggml_type type), // ggml_type_size()/ggml_blck_size() as float + "use ggml_row_size() instead"); + + GGML_API GGML_CALL const char * ggml_type_name(enum ggml_type type); + GGML_API GGML_CALL const char * ggml_op_name (enum ggml_op op); + GGML_API const char * ggml_op_symbol(enum ggml_op op); + + GGML_API const char * ggml_unary_op_name(enum ggml_unary_op op); + GGML_API GGML_CALL const char * ggml_op_desc(const struct ggml_tensor * t); // unary or op name + + GGML_API GGML_CALL size_t ggml_element_size(const struct ggml_tensor * tensor); + + GGML_API GGML_CALL bool ggml_is_quantized(enum ggml_type type); + + // TODO: temporary until model loading of ggml examples is refactored + GGML_API enum ggml_type ggml_ftype_to_ggml_type(enum ggml_ftype ftype); + + GGML_API GGML_CALL bool ggml_is_transposed(const struct ggml_tensor * tensor); + GGML_API GGML_CALL bool ggml_is_permuted (const struct ggml_tensor * tensor); + GGML_API GGML_CALL bool ggml_is_empty (const struct ggml_tensor * tensor); + GGML_API bool ggml_is_scalar (const struct ggml_tensor * tensor); + GGML_API bool ggml_is_vector (const struct ggml_tensor * tensor); + GGML_API bool ggml_is_matrix (const struct ggml_tensor * tensor); + GGML_API bool ggml_is_3d (const struct ggml_tensor * tensor); + GGML_API int ggml_n_dims (const struct ggml_tensor * tensor); // returns 1 for scalars + + GGML_API GGML_CALL bool ggml_is_contiguous (const struct ggml_tensor * tensor); + GGML_API GGML_CALL bool ggml_is_contiguous_0(const struct ggml_tensor * tensor); // same as ggml_is_contiguous() + GGML_API GGML_CALL bool ggml_is_contiguous_1(const struct ggml_tensor * tensor); // contiguous for dims >= 1 + GGML_API GGML_CALL bool ggml_is_contiguous_2(const struct ggml_tensor * tensor); // contiguous for dims >= 2 + + GGML_API bool ggml_are_same_shape (const struct ggml_tensor * t0, const struct ggml_tensor * t1); + GGML_API bool ggml_are_same_stride(const struct ggml_tensor * t0, const struct ggml_tensor * t1); + + GGML_API bool ggml_can_repeat(const struct ggml_tensor * t0, const struct ggml_tensor * t1); + + // use this to compute the memory overhead of a tensor + GGML_API size_t ggml_tensor_overhead(void); + + GGML_API bool ggml_validate_row_data(enum ggml_type type, const void * data, size_t nbytes); + + // main + + GGML_API struct ggml_context * ggml_init(struct ggml_init_params params); + GGML_API void ggml_free(struct ggml_context * ctx); + + GGML_API size_t ggml_used_mem(const struct ggml_context * ctx); + + GGML_API size_t ggml_set_scratch (struct ggml_context * ctx, struct ggml_scratch scratch); + GGML_API bool ggml_get_no_alloc(struct ggml_context * ctx); + GGML_API void ggml_set_no_alloc(struct ggml_context * ctx, bool no_alloc); + + GGML_API void * ggml_get_mem_buffer (const struct ggml_context * ctx); + GGML_API size_t ggml_get_mem_size (const struct ggml_context * ctx); + GGML_API size_t ggml_get_max_tensor_size(const struct ggml_context * ctx); + + GGML_API struct ggml_tensor * ggml_new_tensor( + struct ggml_context * ctx, + enum ggml_type type, + int n_dims, + const int64_t *ne); + + GGML_API struct ggml_tensor * ggml_new_tensor_1d( + struct ggml_context * ctx, + enum ggml_type type, + int64_t ne0); + + GGML_API struct ggml_tensor * ggml_new_tensor_2d( + struct ggml_context * ctx, + enum ggml_type type, + int64_t ne0, + int64_t ne1); + + GGML_API struct ggml_tensor * ggml_new_tensor_3d( + struct ggml_context * ctx, + enum ggml_type type, + int64_t ne0, + int64_t ne1, + int64_t ne2); + + GGML_API struct ggml_tensor * ggml_new_tensor_4d( + struct ggml_context * ctx, + enum ggml_type type, + int64_t ne0, + int64_t ne1, + int64_t ne2, + int64_t ne3); + + GGML_API struct ggml_tensor * ggml_new_i32(struct ggml_context * ctx, int32_t value); + GGML_API struct ggml_tensor * ggml_new_f32(struct ggml_context * ctx, float value); + + GGML_API struct ggml_tensor * ggml_dup_tensor (struct ggml_context * ctx, const struct ggml_tensor * src); + GGML_API struct ggml_tensor * ggml_view_tensor(struct ggml_context * ctx, struct ggml_tensor * src); + + // Context tensor enumeration and lookup + GGML_API struct ggml_tensor * ggml_get_first_tensor(const struct ggml_context * ctx); + GGML_API struct ggml_tensor * ggml_get_next_tensor (const struct ggml_context * ctx, struct ggml_tensor * tensor); + GGML_API struct ggml_tensor * ggml_get_tensor(struct ggml_context * ctx, const char * name); + + GGML_API struct ggml_tensor * ggml_set_zero(struct ggml_tensor * tensor); + GGML_API struct ggml_tensor * ggml_set_i32 (struct ggml_tensor * tensor, int32_t value); + GGML_API struct ggml_tensor * ggml_set_f32 (struct ggml_tensor * tensor, float value); + + // Converts a flat index into coordinates + GGML_API void ggml_unravel_index(const struct ggml_tensor * tensor, int64_t i, int64_t * i0, int64_t * i1, int64_t * i2, int64_t * i3); + + GGML_API int32_t ggml_get_i32_1d(const struct ggml_tensor * tensor, int i); + GGML_API void ggml_set_i32_1d(const struct ggml_tensor * tensor, int i, int32_t value); + + GGML_API int32_t ggml_get_i32_nd(const struct ggml_tensor * tensor, int i0, int i1, int i2, int i3); + GGML_API void ggml_set_i32_nd(const struct ggml_tensor * tensor, int i0, int i1, int i2, int i3, int32_t value); + + GGML_API float ggml_get_f32_1d(const struct ggml_tensor * tensor, int i); + GGML_API void ggml_set_f32_1d(const struct ggml_tensor * tensor, int i, float value); + + GGML_API float ggml_get_f32_nd(const struct ggml_tensor * tensor, int i0, int i1, int i2, int i3); + GGML_API void ggml_set_f32_nd(const struct ggml_tensor * tensor, int i0, int i1, int i2, int i3, float value); + + GGML_API void * ggml_get_data (const struct ggml_tensor * tensor); + GGML_API float * ggml_get_data_f32(const struct ggml_tensor * tensor); + + GGML_API GGML_CALL enum ggml_unary_op ggml_get_unary_op(const struct ggml_tensor * tensor); + + GGML_API const char * ggml_get_name (const struct ggml_tensor * tensor); + GGML_API struct ggml_tensor * ggml_set_name ( struct ggml_tensor * tensor, const char * name); + GGML_ATTRIBUTE_FORMAT(2, 3) + GGML_API struct ggml_tensor * ggml_format_name( struct ggml_tensor * tensor, const char * fmt, ...); + + // + // operations on tensors with backpropagation + // + + GGML_API struct ggml_tensor * ggml_dup( + struct ggml_context * ctx, + struct ggml_tensor * a); + + // in-place, returns view(a) + GGML_API struct ggml_tensor * ggml_dup_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_add( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b); + + GGML_API struct ggml_tensor * ggml_add_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b); + + GGML_API struct ggml_tensor * ggml_add_cast( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + enum ggml_type type); + + GGML_API struct ggml_tensor * ggml_add1( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b); + + GGML_API struct ggml_tensor * ggml_add1_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b); + + // dst = a + // view(dst, nb1, nb2, nb3, offset) += b + // return dst + GGML_API struct ggml_tensor * ggml_acc( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + size_t nb1, + size_t nb2, + size_t nb3, + size_t offset); + + GGML_API struct ggml_tensor * ggml_acc_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + size_t nb1, + size_t nb2, + size_t nb3, + size_t offset); + + GGML_API struct ggml_tensor * ggml_sub( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b); + + GGML_API struct ggml_tensor * ggml_sub_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b); + + GGML_API struct ggml_tensor * ggml_mul( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b); + + GGML_API struct ggml_tensor * ggml_mul_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b); + + GGML_API struct ggml_tensor * ggml_div( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b); + + GGML_API struct ggml_tensor * ggml_div_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b); + + GGML_API struct ggml_tensor * ggml_sqr( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_sqr_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_sqrt( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_sqrt_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_log( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_log_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_sin( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_sin_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_cos( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_cos_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a); + + // return scalar + GGML_API struct ggml_tensor * ggml_sum( + struct ggml_context * ctx, + struct ggml_tensor * a); + + // sums along rows, with input shape [a,b,c,d] return shape [1,b,c,d] + GGML_API struct ggml_tensor * ggml_sum_rows( + struct ggml_context * ctx, + struct ggml_tensor * a); + + // mean along rows + GGML_API struct ggml_tensor * ggml_mean( + struct ggml_context * ctx, + struct ggml_tensor * a); + + // argmax along rows + GGML_API struct ggml_tensor * ggml_argmax( + struct ggml_context * ctx, + struct ggml_tensor * a); + + // if a is the same shape as b, and a is not parameter, return a + // otherwise, return a new tensor: repeat(a) to fit in b + GGML_API struct ggml_tensor * ggml_repeat( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b); + + // sums repetitions in a into shape of b + GGML_API struct ggml_tensor * ggml_repeat_back( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b); + + // concat a and b along dim + // used in stable-diffusion + GGML_API struct ggml_tensor * ggml_concat( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + int dim); + + GGML_API struct ggml_tensor * ggml_abs( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_abs_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_sgn( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_sgn_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_neg( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_neg_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_step( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_step_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_tanh( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_tanh_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_elu( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_elu_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_relu( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_leaky_relu( + struct ggml_context * ctx, + struct ggml_tensor * a, float negative_slope, bool inplace); + + GGML_API struct ggml_tensor * ggml_relu_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_sigmoid( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_sigmoid_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_gelu( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_gelu_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_gelu_quick( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_gelu_quick_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_silu( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_silu_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a); + + // a - x + // b - dy + GGML_API struct ggml_tensor * ggml_silu_back( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b); + + // hardswish(x) = x * relu6(x + 3) / 6 + GGML_API struct ggml_tensor * ggml_hardswish( + struct ggml_context * ctx, + struct ggml_tensor * a); + + // hardsigmoid(x) = relu6(x + 3) / 6 + GGML_API struct ggml_tensor * ggml_hardsigmoid( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_exp( + struct ggml_context * ctx, + struct ggml_tensor * a); + + GGML_API struct ggml_tensor * ggml_exp_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a); + + // normalize along rows + GGML_API struct ggml_tensor * ggml_norm( + struct ggml_context * ctx, + struct ggml_tensor * a, + float eps); + + GGML_API struct ggml_tensor * ggml_norm_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + float eps); + + GGML_API struct ggml_tensor * ggml_rms_norm( + struct ggml_context * ctx, + struct ggml_tensor * a, + float eps); + + GGML_API struct ggml_tensor * ggml_rms_norm_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + float eps); + + // group normalize along ne0*ne1*n_groups + // used in stable-diffusion + GGML_API struct ggml_tensor * ggml_group_norm( + struct ggml_context * ctx, + struct ggml_tensor * a, + int n_groups, + float eps); + + GGML_API struct ggml_tensor * ggml_group_norm_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + int n_groups, + float eps); + + // a - x + // b - dy + GGML_API struct ggml_tensor * ggml_rms_norm_back( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + float eps); + + // A: k columns, n rows => [ne03, ne02, n, k] + // B: k columns, m rows (i.e. we transpose it internally) => [ne03 * x, ne02 * y, m, k] + // result is n columns, m rows => [ne03 * x, ne02 * y, m, n] + GGML_API struct ggml_tensor * ggml_mul_mat( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b); + + // change the precision of a matrix multiplication + // set to GGML_PREC_F32 for higher precision (useful for phi-2) + GGML_API void ggml_mul_mat_set_prec( + struct ggml_tensor * a, + enum ggml_prec prec); + + // indirect matrix multiplication + GGML_API struct ggml_tensor * ggml_mul_mat_id( + struct ggml_context * ctx, + struct ggml_tensor * as, + struct ggml_tensor * b, + struct ggml_tensor * ids); + + // A: m columns, n rows, + // B: p columns, n rows, + // result is m columns, p rows + GGML_API struct ggml_tensor * ggml_out_prod( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b); + + // + // operations on tensors without backpropagation + // + + GGML_API struct ggml_tensor * ggml_scale( + struct ggml_context * ctx, + struct ggml_tensor * a, + float s); + + // in-place, returns view(a) + GGML_API struct ggml_tensor * ggml_scale_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + float s); + + // b -> view(a,offset,nb1,nb2,3), return modified a + GGML_API struct ggml_tensor * ggml_set( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + size_t nb1, + size_t nb2, + size_t nb3, + size_t offset); // in bytes + + // b -> view(a,offset,nb1,nb2,3), return view(a) + GGML_API struct ggml_tensor * ggml_set_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + size_t nb1, + size_t nb2, + size_t nb3, + size_t offset); // in bytes + + GGML_API struct ggml_tensor * ggml_set_1d( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + size_t offset); // in bytes + + GGML_API struct ggml_tensor * ggml_set_1d_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + size_t offset); // in bytes + + // b -> view(a,offset,nb1,nb2,3), return modified a + GGML_API struct ggml_tensor * ggml_set_2d( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + size_t nb1, + size_t offset); // in bytes + + // b -> view(a,offset,nb1,nb2,3), return view(a) + GGML_API struct ggml_tensor * ggml_set_2d_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + size_t nb1, + size_t offset); // in bytes + + // a -> b, return view(b) + GGML_API struct ggml_tensor * ggml_cpy( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b); + + GGML_API struct ggml_tensor * ggml_cast( + struct ggml_context * ctx, + struct ggml_tensor * a, + enum ggml_type type); + + // make contiguous + GGML_API struct ggml_tensor * ggml_cont( + struct ggml_context * ctx, + struct ggml_tensor * a); + + // make contiguous, with new shape + GGML_API struct ggml_tensor * ggml_cont_1d( + struct ggml_context * ctx, + struct ggml_tensor * a, + int64_t ne0); + + GGML_API struct ggml_tensor * ggml_cont_2d( + struct ggml_context * ctx, + struct ggml_tensor * a, + int64_t ne0, + int64_t ne1); + + GGML_API struct ggml_tensor * ggml_cont_3d( + struct ggml_context * ctx, + struct ggml_tensor * a, + int64_t ne0, + int64_t ne1, + int64_t ne2); + + GGML_API struct ggml_tensor * ggml_cont_4d( + struct ggml_context * ctx, + struct ggml_tensor * a, + int64_t ne0, + int64_t ne1, + int64_t ne2, + int64_t ne3); + + // return view(a), b specifies the new shape + // TODO: when we start computing gradient, make a copy instead of view + GGML_API struct ggml_tensor * ggml_reshape( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b); + + // return view(a) + // TODO: when we start computing gradient, make a copy instead of view + GGML_API struct ggml_tensor * ggml_reshape_1d( + struct ggml_context * ctx, + struct ggml_tensor * a, + int64_t ne0); + + GGML_API struct ggml_tensor * ggml_reshape_2d( + struct ggml_context * ctx, + struct ggml_tensor * a, + int64_t ne0, + int64_t ne1); + + // return view(a) + // TODO: when we start computing gradient, make a copy instead of view + GGML_API struct ggml_tensor * ggml_reshape_3d( + struct ggml_context * ctx, + struct ggml_tensor * a, + int64_t ne0, + int64_t ne1, + int64_t ne2); + + GGML_API struct ggml_tensor * ggml_reshape_4d( + struct ggml_context * ctx, + struct ggml_tensor * a, + int64_t ne0, + int64_t ne1, + int64_t ne2, + int64_t ne3); + + // offset in bytes + GGML_API struct ggml_tensor * ggml_view_1d( + struct ggml_context * ctx, + struct ggml_tensor * a, + int64_t ne0, + size_t offset); + + GGML_API struct ggml_tensor * ggml_view_2d( + struct ggml_context * ctx, + struct ggml_tensor * a, + int64_t ne0, + int64_t ne1, + size_t nb1, // row stride in bytes + size_t offset); + + GGML_API struct ggml_tensor * ggml_view_3d( + struct ggml_context * ctx, + struct ggml_tensor * a, + int64_t ne0, + int64_t ne1, + int64_t ne2, + size_t nb1, // row stride in bytes + size_t nb2, // slice stride in bytes + size_t offset); + + GGML_API struct ggml_tensor * ggml_view_4d( + struct ggml_context * ctx, + struct ggml_tensor * a, + int64_t ne0, + int64_t ne1, + int64_t ne2, + int64_t ne3, + size_t nb1, // row stride in bytes + size_t nb2, // slice stride in bytes + size_t nb3, + size_t offset); + + GGML_API struct ggml_tensor * ggml_permute( + struct ggml_context * ctx, + struct ggml_tensor * a, + int axis0, + int axis1, + int axis2, + int axis3); + + // alias for ggml_permute(ctx, a, 1, 0, 2, 3) + GGML_API struct ggml_tensor * ggml_transpose( + struct ggml_context * ctx, + struct ggml_tensor * a); + + // supports 3D: a->ne[2] == b->ne[1] + GGML_API struct ggml_tensor * ggml_get_rows( + struct ggml_context * ctx, + struct ggml_tensor * a, // data + struct ggml_tensor * b); // row indices + + GGML_API struct ggml_tensor * ggml_get_rows_back( + struct ggml_context * ctx, + struct ggml_tensor * a, // gradients of ggml_get_rows result + struct ggml_tensor * b, // row indices + struct ggml_tensor * c); // data for ggml_get_rows, only used for its shape + + GGML_API struct ggml_tensor * ggml_diag( + struct ggml_context * ctx, + struct ggml_tensor * a); + + // set elements above the diagonal to -INF + GGML_API struct ggml_tensor * ggml_diag_mask_inf( + struct ggml_context * ctx, + struct ggml_tensor * a, + int n_past); + + // in-place, returns view(a) + GGML_API struct ggml_tensor * ggml_diag_mask_inf_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + int n_past); + + // set elements above the diagonal to 0 + GGML_API struct ggml_tensor * ggml_diag_mask_zero( + struct ggml_context * ctx, + struct ggml_tensor * a, + int n_past); + + // in-place, returns view(a) + GGML_API struct ggml_tensor * ggml_diag_mask_zero_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + int n_past); + + GGML_API struct ggml_tensor * ggml_soft_max( + struct ggml_context * ctx, + struct ggml_tensor * a); + + // in-place, returns view(a) + GGML_API struct ggml_tensor * ggml_soft_max_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a); + + // fused soft_max(a*scale + mask*(ALiBi slope)) + // mask is optional + // max_bias = 0.0f for no ALiBi + GGML_API struct ggml_tensor * ggml_soft_max_ext( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * mask, + float scale, + float max_bias); + + GGML_API struct ggml_tensor * ggml_soft_max_back( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b); + + // in-place, returns view(a) + GGML_API struct ggml_tensor * ggml_soft_max_back_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b); + + // rotary position embedding + // if (mode & 1) - skip n_past elements (NOT SUPPORTED) + // if (mode & GGML_ROPE_TYPE_NEOX) - GPT-NeoX style + // + // b is an int32 vector with size a->ne[2], it contains the positions + GGML_API struct ggml_tensor * ggml_rope( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + int n_dims, + int mode); + + // in-place, returns view(a) + GGML_API struct ggml_tensor * ggml_rope_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + int n_dims, + int mode); + + // custom RoPE + // c is freq factors (e.g. phi3-128k), (optional) + GGML_API struct ggml_tensor * ggml_rope_ext( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + struct ggml_tensor * c, + int n_dims, + int mode, + int n_ctx_orig, + float freq_base, + float freq_scale, + float ext_factor, + float attn_factor, + float beta_fast, + float beta_slow); + + // in-place, returns view(a) + GGML_API struct ggml_tensor * ggml_rope_ext_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + struct ggml_tensor * c, + int n_dims, + int mode, + int n_ctx_orig, + float freq_base, + float freq_scale, + float ext_factor, + float attn_factor, + float beta_fast, + float beta_slow); + + GGML_DEPRECATED(GGML_API struct ggml_tensor * ggml_rope_custom( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + int n_dims, + int mode, + int n_ctx_orig, + float freq_base, + float freq_scale, + float ext_factor, + float attn_factor, + float beta_fast, + float beta_slow), + "use ggml_rope_ext instead"); + + GGML_DEPRECATED(GGML_API struct ggml_tensor * ggml_rope_custom_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + int n_dims, + int mode, + int n_ctx_orig, + float freq_base, + float freq_scale, + float ext_factor, + float attn_factor, + float beta_fast, + float beta_slow), + "use ggml_rope_ext_inplace instead"); + + // compute correction dims for YaRN RoPE scaling + GGML_CALL void ggml_rope_yarn_corr_dims( + int n_dims, int n_ctx_orig, float freq_base, float beta_fast, float beta_slow, float dims[2]); + + // rotary position embedding backward, i.e compute dx from dy + // a - dy + GGML_API struct ggml_tensor * ggml_rope_back( + struct ggml_context * ctx, + struct ggml_tensor * a, // gradients of ggml_rope result + struct ggml_tensor * b, // positions + struct ggml_tensor * c, // freq factors + int n_dims, + int mode, + int n_ctx_orig, + float freq_base, + float freq_scale, + float ext_factor, + float attn_factor, + float beta_fast, + float beta_slow); + + // clamp + // in-place, returns view(a) + GGML_API struct ggml_tensor * ggml_clamp( + struct ggml_context * ctx, + struct ggml_tensor * a, + float min, + float max); + + // im2col + // converts data into a format that effectively results in a convolution when combined with matrix multiplication + GGML_API struct ggml_tensor * ggml_im2col( + struct ggml_context * ctx, + struct ggml_tensor * a, // convolution kernel + struct ggml_tensor * b, // data + int s0, // stride dimension 0 + int s1, // stride dimension 1 + int p0, // padding dimension 0 + int p1, // padding dimension 1 + int d0, // dilation dimension 0 + int d1, // dilation dimension 1 + bool is_2D, + enum ggml_type dst_type); + + GGML_API struct ggml_tensor * ggml_im2col_back( + struct ggml_context * ctx, + struct ggml_tensor * a, // convolution kernel + struct ggml_tensor * b, // gradient of im2col output + int64_t * ne, // shape of im2col input + int s0, // stride dimension 0 + int s1, // stride dimension 1 + int p0, // padding dimension 0 + int p1, // padding dimension 1 + int d0, // dilation dimension 0 + int d1, // dilation dimension 1 + bool is_2D); + + GGML_API struct ggml_tensor * ggml_conv_depthwise_2d( + struct ggml_context * ctx, + struct ggml_tensor * a, // convolution kernel + struct ggml_tensor * b, // data + int s0, // stride dimension 0 + int s1, // stride dimension 1 + int p0, // padding dimension 0 + int p1, // padding dimension 1 + int d0, // dilation dimension 0 + int d1); // dilation dimension 1 + + GGML_API struct ggml_tensor * ggml_conv_1d( + struct ggml_context * ctx, + struct ggml_tensor * a, // convolution kernel + struct ggml_tensor * b, // data + int s0, // stride + int p0, // padding + int d0); // dilation + + // conv_1d with padding = half + // alias for ggml_conv_1d(a, b, s, a->ne[0]/2, d) + GGML_API struct ggml_tensor* ggml_conv_1d_ph( + struct ggml_context * ctx, + struct ggml_tensor * a, // convolution kernel + struct ggml_tensor * b, // data + int s, // stride + int d); // dilation + + GGML_API struct ggml_tensor * ggml_conv_transpose_1d( + struct ggml_context * ctx, + struct ggml_tensor * a, // convolution kernel + struct ggml_tensor * b, // data + int s0, // stride + int p0, // padding + int d0); // dilation + + GGML_API struct ggml_tensor * ggml_conv_2d( + struct ggml_context * ctx, + struct ggml_tensor * a, // convolution kernel + struct ggml_tensor * b, // data + int s0, // stride dimension 0 + int s1, // stride dimension 1 + int p0, // padding dimension 0 + int p1, // padding dimension 1 + int d0, // dilation dimension 0 + int d1); // dilation dimension 1 + + + // kernel size is a->ne[0] x a->ne[1] + // stride is equal to kernel size + // padding is zero + // example: + // a: 16 16 3 768 + // b: 1024 1024 3 1 + // res: 64 64 768 1 + // used in sam + GGML_API struct ggml_tensor * ggml_conv_2d_sk_p0( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b); + + // kernel size is a->ne[0] x a->ne[1] + // stride is 1 + // padding is half + // example: + // a: 3 3 256 256 + // b: 64 64 256 1 + // res: 64 64 256 1 + // used in sam + GGML_API struct ggml_tensor * ggml_conv_2d_s1_ph( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b); + + GGML_API struct ggml_tensor * ggml_conv_transpose_2d_p0( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + int stride); + + enum ggml_op_pool { + GGML_OP_POOL_MAX, + GGML_OP_POOL_AVG, + GGML_OP_POOL_COUNT, + }; + + GGML_API struct ggml_tensor * ggml_pool_1d( + struct ggml_context * ctx, + struct ggml_tensor * a, + enum ggml_op_pool op, + int k0, // kernel size + int s0, // stride + int p0); // padding + + // the result will have 2*p0 padding for the first dimension + // and 2*p1 padding for the second dimension + GGML_API struct ggml_tensor * ggml_pool_2d( + struct ggml_context * ctx, + struct ggml_tensor * a, + enum ggml_op_pool op, + int k0, + int k1, + int s0, + int s1, + float p0, + float p1); + + GGML_API struct ggml_tensor * ggml_pool_2d_back( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * af, // "a"/input used in forward pass + enum ggml_op_pool op, + int k0, + int k1, + int s0, + int s1, + float p0, + float p1); + + // nearest interpolate + // multiplies ne0 and ne1 by scale factor + // used in stable-diffusion + GGML_API struct ggml_tensor * ggml_upscale( + struct ggml_context * ctx, + struct ggml_tensor * a, + int scale_factor); + + // nearest interpolate + // nearest interpolate to specified dimensions + // used in tortoise.cpp + GGML_API struct ggml_tensor * ggml_upscale_ext( + struct ggml_context * ctx, + struct ggml_tensor * a, + int ne0, + int ne1, + int ne2, + int ne3); + + // pad each dimension with zeros: [x, ..., x] -> [x, ..., x, 0, ..., 0] + GGML_API struct ggml_tensor * ggml_pad( + struct ggml_context * ctx, + struct ggml_tensor * a, + int p0, + int p1, + int p2, + int p3); + + // unpad each dimension: [x, ..., x, y, ..., y] -> [x, ..., x] + GGML_API struct ggml_tensor * ggml_unpad( + struct ggml_context * ctx, + struct ggml_tensor * a, + int p0, + int p1, + int p2, + int p3); + + // Ref: https://github.com/CompVis/stable-diffusion/blob/main/ldm/modules/diffusionmodules/util.py#L151 + // timesteps: [N,] + // return: [N, dim] + GGML_API struct ggml_tensor * ggml_timestep_embedding( + struct ggml_context * ctx, + struct ggml_tensor * timesteps, + int dim, + int max_period); + + // sort rows + enum ggml_sort_order { + GGML_SORT_ORDER_ASC, + GGML_SORT_ORDER_DESC, + }; + + GGML_API struct ggml_tensor * ggml_argsort( + struct ggml_context * ctx, + struct ggml_tensor * a, + enum ggml_sort_order order); + + GGML_API struct ggml_tensor * ggml_arange( + struct ggml_context * ctx, + float start, + float stop, + float step); + + // top k elements per row + GGML_API struct ggml_tensor * ggml_top_k( + struct ggml_context * ctx, + struct ggml_tensor * a, + int k); + +#define GGML_KQ_MASK_PAD 32 + + // q: [n_embd, n_batch, n_head, 1] + // k: [n_embd, n_kv, n_head_kv, 1] + // v: [n_embd, n_kv, n_head_kv, 1] !! not transposed !! + // mask: [n_kv, n_batch_pad, 1, 1] !! n_batch_pad = GGML_PAD(n_batch, GGML_KQ_MASK_PAD) !! + // res: [n_embd, n_head, n_batch, 1] !! permuted !! + GGML_API struct ggml_tensor * ggml_flash_attn_ext( + struct ggml_context * ctx, + struct ggml_tensor * q, + struct ggml_tensor * k, + struct ggml_tensor * v, + struct ggml_tensor * mask, + float scale, + float max_bias, + float logit_softcap); + + GGML_API void ggml_flash_attn_ext_set_prec( + struct ggml_tensor * a, + enum ggml_prec prec); + + // TODO: needs to be adapted to ggml_flash_attn_ext + GGML_API struct ggml_tensor * ggml_flash_attn_back( + struct ggml_context * ctx, + struct ggml_tensor * q, + struct ggml_tensor * k, + struct ggml_tensor * v, + struct ggml_tensor * d, + bool masked); + + GGML_API struct ggml_tensor * ggml_ssm_conv( + struct ggml_context * ctx, + struct ggml_tensor * sx, + struct ggml_tensor * c); + + GGML_API struct ggml_tensor * ggml_ssm_scan( + struct ggml_context * ctx, + struct ggml_tensor * s, + struct ggml_tensor * x, + struct ggml_tensor * dt, + struct ggml_tensor * A, + struct ggml_tensor * B, + struct ggml_tensor * C); + + // partition into non-overlapping windows with padding if needed + // example: + // a: 768 64 64 1 + // w: 14 + // res: 768 14 14 25 + // used in sam + GGML_API struct ggml_tensor * ggml_win_part( + struct ggml_context * ctx, + struct ggml_tensor * a, + int w); + + // reverse of ggml_win_part + // used in sam + GGML_API struct ggml_tensor * ggml_win_unpart( + struct ggml_context * ctx, + struct ggml_tensor * a, + int w0, + int h0, + int w); + + GGML_API struct ggml_tensor * ggml_unary( + struct ggml_context * ctx, + struct ggml_tensor * a, + enum ggml_unary_op op); + + GGML_API struct ggml_tensor * ggml_unary_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + enum ggml_unary_op op); + + // used in sam + GGML_API struct ggml_tensor * ggml_get_rel_pos( + struct ggml_context * ctx, + struct ggml_tensor * a, + int qh, + int kh); + + // used in sam + GGML_API struct ggml_tensor * ggml_add_rel_pos( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * pw, + struct ggml_tensor * ph); + + GGML_API struct ggml_tensor * ggml_add_rel_pos_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * pw, + struct ggml_tensor * ph); + + GGML_API struct ggml_tensor * ggml_rwkv_wkv( + struct ggml_context * ctx, + struct ggml_tensor * k, + struct ggml_tensor * v, + struct ggml_tensor * r, + struct ggml_tensor * tf, + struct ggml_tensor * td, + struct ggml_tensor * state); + + // custom operators + + typedef void (*ggml_unary_op_f32_t) (const int, float *, const float *); + typedef void (*ggml_binary_op_f32_t)(const int, float *, const float *, const float *); + + typedef void (*ggml_custom1_op_f32_t)(struct ggml_tensor *, const struct ggml_tensor *); + typedef void (*ggml_custom2_op_f32_t)(struct ggml_tensor *, const struct ggml_tensor *, const struct ggml_tensor *); + typedef void (*ggml_custom3_op_f32_t)(struct ggml_tensor *, const struct ggml_tensor *, const struct ggml_tensor *, const struct ggml_tensor *); + + GGML_DEPRECATED(GGML_API struct ggml_tensor * ggml_map_unary_f32( + struct ggml_context * ctx, + struct ggml_tensor * a, + ggml_unary_op_f32_t fun), + "use ggml_map_custom1 instead"); + + GGML_DEPRECATED(GGML_API struct ggml_tensor * ggml_map_unary_inplace_f32( + struct ggml_context * ctx, + struct ggml_tensor * a, + ggml_unary_op_f32_t fun), + "use ggml_map_custom1_inplace instead"); + + GGML_DEPRECATED(GGML_API struct ggml_tensor * ggml_map_binary_f32( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + ggml_binary_op_f32_t fun), + "use ggml_map_custom2 instead"); + + GGML_DEPRECATED(GGML_API struct ggml_tensor * ggml_map_binary_inplace_f32( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + ggml_binary_op_f32_t fun), + "use ggml_map_custom2_inplace instead"); + + GGML_DEPRECATED(GGML_API struct ggml_tensor * ggml_map_custom1_f32( + struct ggml_context * ctx, + struct ggml_tensor * a, + ggml_custom1_op_f32_t fun), + "use ggml_map_custom1 instead"); + + GGML_DEPRECATED(GGML_API struct ggml_tensor * ggml_map_custom1_inplace_f32( + struct ggml_context * ctx, + struct ggml_tensor * a, + ggml_custom1_op_f32_t fun), + "use ggml_map_custom1_inplace instead"); + + GGML_DEPRECATED(GGML_API struct ggml_tensor * ggml_map_custom2_f32( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + ggml_custom2_op_f32_t fun), + "use ggml_map_custom2 instead"); + + GGML_DEPRECATED(GGML_API struct ggml_tensor * ggml_map_custom2_inplace_f32( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + ggml_custom2_op_f32_t fun), + "use ggml_map_custom2_inplace instead"); + + GGML_DEPRECATED(GGML_API struct ggml_tensor * ggml_map_custom3_f32( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + struct ggml_tensor * c, + ggml_custom3_op_f32_t fun), + "use ggml_map_custom3 instead"); + + GGML_DEPRECATED(GGML_API struct ggml_tensor * ggml_map_custom3_inplace_f32( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + struct ggml_tensor * c, + ggml_custom3_op_f32_t fun), + "use ggml_map_custom3_inplace instead"); + + // custom operators v2 + + typedef void (*ggml_custom1_op_t)(struct ggml_tensor * dst , const struct ggml_tensor * a, int ith, int nth, void * userdata); + typedef void (*ggml_custom2_op_t)(struct ggml_tensor * dst , const struct ggml_tensor * a, const struct ggml_tensor * b, int ith, int nth, void * userdata); + typedef void (*ggml_custom3_op_t)(struct ggml_tensor * dst , const struct ggml_tensor * a, const struct ggml_tensor * b, const struct ggml_tensor * c, int ith, int nth, void * userdata); + +#define GGML_N_TASKS_MAX (-1) + // n_tasks == GGML_N_TASKS_MAX means to use max number of tasks + + GGML_API struct ggml_tensor * ggml_map_custom1( + struct ggml_context * ctx, + struct ggml_tensor * a, + ggml_custom1_op_t fun, + int n_tasks, + void * userdata); + + GGML_API struct ggml_tensor * ggml_map_custom1_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + ggml_custom1_op_t fun, + int n_tasks, + void * userdata); + + GGML_API struct ggml_tensor * ggml_map_custom2( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + ggml_custom2_op_t fun, + int n_tasks, + void * userdata); + + GGML_API struct ggml_tensor * ggml_map_custom2_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + ggml_custom2_op_t fun, + int n_tasks, + void * userdata); + + GGML_API struct ggml_tensor * ggml_map_custom3( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + struct ggml_tensor * c, + ggml_custom3_op_t fun, + int n_tasks, + void * userdata); + + GGML_API struct ggml_tensor * ggml_map_custom3_inplace( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + struct ggml_tensor * c, + ggml_custom3_op_t fun, + int n_tasks, + void * userdata); + + // loss function + + GGML_API struct ggml_tensor * ggml_cross_entropy_loss( + struct ggml_context * ctx, + struct ggml_tensor * a, // logits + struct ggml_tensor * b); // labels + + GGML_API struct ggml_tensor * ggml_cross_entropy_loss_back( + struct ggml_context * ctx, + struct ggml_tensor * a, // logits + struct ggml_tensor * b, // labels + struct ggml_tensor * c); // gradients of cross_entropy_loss result + + // AdamW optimizer step + // Paper: https://arxiv.org/pdf/1711.05101v3.pdf + // PyTorch: https://pytorch.org/docs/stable/generated/torch.optim.AdamW.html + GGML_API struct ggml_tensor * ggml_opt_step_adamw( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * grad, + float alpha, + float beta1, + float beta2, + float eps, + float wd); // weight decay + + // + // automatic differentiation + // + + GGML_API void ggml_set_param(struct ggml_context * ctx, struct ggml_tensor * tensor); + GGML_API void ggml_set_loss(struct ggml_tensor * tensor); + + GGML_API void ggml_build_forward_expand (struct ggml_cgraph * cgraph, struct ggml_tensor * tensor); + GGML_API void ggml_build_backward_expand(struct ggml_context * ctx, struct ggml_cgraph * gf, struct ggml_cgraph * gb, bool accumulate); + + GGML_API void ggml_build_opt_adamw( + struct ggml_context * ctx, + struct ggml_cgraph * gf, + struct ggml_cgraph * gb, + float alpha, + float beta1, + float beta2, + float eps, + float wd); // weight decay + + // graph allocation in a context + GGML_API struct ggml_cgraph * ggml_new_graph (struct ggml_context * ctx); // size = GGML_DEFAULT_GRAPH_SIZE, grads = false + GGML_API struct ggml_cgraph * ggml_new_graph_custom(struct ggml_context * ctx, size_t size, bool grads); + GGML_API struct ggml_cgraph * ggml_graph_dup (struct ggml_context * ctx, struct ggml_cgraph * cgraph); + GGML_API void ggml_graph_cpy (struct ggml_cgraph * src, struct ggml_cgraph * dst); + GGML_API void ggml_graph_reset (struct ggml_cgraph * cgraph); // set regular grads + optimizer momenta to 0, set loss grad to 1 + GGML_API void ggml_graph_clear (struct ggml_cgraph * cgraph); + + GGML_API int ggml_graph_size (struct ggml_cgraph * cgraph); + GGML_API struct ggml_tensor * ggml_graph_node (struct ggml_cgraph * cgraph, int i); // if i < 0, returns nodes[n_nodes + i] + GGML_API struct ggml_tensor ** ggml_graph_nodes (struct ggml_cgraph * cgraph); + GGML_API int ggml_graph_n_nodes(struct ggml_cgraph * cgraph); + + GGML_API void ggml_graph_add_node(struct ggml_cgraph * cgraph, struct ggml_tensor * tensor); + + GGML_API size_t ggml_graph_overhead(void); + GGML_API size_t ggml_graph_overhead_custom(size_t size, bool grads); + + GGML_API struct ggml_threadpool_params ggml_threadpool_params_default(int n_threads); + GGML_API void ggml_threadpool_params_init (struct ggml_threadpool_params * p, int n_threads); + GGML_API bool ggml_threadpool_params_match (const struct ggml_threadpool_params * p0, const struct ggml_threadpool_params * p1); + GGML_API struct ggml_threadpool * ggml_threadpool_new (struct ggml_threadpool_params * params); + GGML_API void ggml_threadpool_free (struct ggml_threadpool * threadpool); + GGML_API int ggml_threadpool_get_n_threads(struct ggml_threadpool * threadpool); + GGML_API void ggml_threadpool_pause (struct ggml_threadpool * threadpool); + GGML_API void ggml_threadpool_resume (struct ggml_threadpool * threadpool); + + // ggml_graph_plan() has to be called before ggml_graph_compute() + // when plan.work_size > 0, caller must allocate memory for plan.work_data + GGML_API struct ggml_cplan ggml_graph_plan( + const struct ggml_cgraph * cgraph, + int n_threads, /* = GGML_DEFAULT_N_THREADS */ + struct ggml_threadpool * threadpool /* = NULL */ ); + GGML_API enum ggml_status ggml_graph_compute(struct ggml_cgraph * cgraph, struct ggml_cplan * cplan); + + // same as ggml_graph_compute() but the work data is allocated as a part of the context + // note: the drawback of this API is that you must have ensured that the context has enough memory for the work data + GGML_API enum ggml_status ggml_graph_compute_with_ctx(struct ggml_context * ctx, struct ggml_cgraph * cgraph, int n_threads); + + GGML_API struct ggml_tensor * ggml_graph_get_tensor(struct ggml_cgraph * cgraph, const char * name); + + GGML_API void ggml_graph_export(const struct ggml_cgraph * cgraph, const char * fname); + GGML_API struct ggml_cgraph * ggml_graph_import(const char * fname, struct ggml_context ** ctx_data, struct ggml_context ** ctx_eval); + + // print info and performance information for the graph + GGML_API void ggml_graph_print(const struct ggml_cgraph * cgraph); + + // dump the graph into a file using the dot format + GGML_API void ggml_graph_dump_dot(const struct ggml_cgraph * gb, const struct ggml_cgraph * gf, const char * filename); + + // build gradient checkpointing backward graph gb for gf using provided checkpoints + // gb_tmp will contain original backward graph with rewritten backward process nodes, + // but without the second forward pass nodes. + GGML_API void ggml_build_backward_gradient_checkpointing( + struct ggml_context * ctx, + struct ggml_cgraph * gf, + struct ggml_cgraph * gb, + struct ggml_cgraph * gb_tmp, + struct ggml_tensor * * checkpoints, + int n_checkpoints); + // + // optimization + // + + // optimization methods + enum ggml_opt_type { + GGML_OPT_TYPE_ADAM, + GGML_OPT_TYPE_LBFGS, + }; + + // linesearch methods + enum ggml_linesearch { + GGML_LINESEARCH_DEFAULT = 1, + + GGML_LINESEARCH_BACKTRACKING_ARMIJO = 0, + GGML_LINESEARCH_BACKTRACKING_WOLFE = 1, + GGML_LINESEARCH_BACKTRACKING_STRONG_WOLFE = 2, + }; + + // optimization return values + enum ggml_opt_result { + GGML_OPT_RESULT_OK = 0, + GGML_OPT_RESULT_DID_NOT_CONVERGE, + GGML_OPT_RESULT_NO_CONTEXT, + GGML_OPT_RESULT_INVALID_WOLFE, + GGML_OPT_RESULT_FAIL, + GGML_OPT_RESULT_CANCEL, + + GGML_LINESEARCH_FAIL = -128, + GGML_LINESEARCH_MINIMUM_STEP, + GGML_LINESEARCH_MAXIMUM_STEP, + GGML_LINESEARCH_MAXIMUM_ITERATIONS, + GGML_LINESEARCH_INVALID_PARAMETERS, + }; + + typedef void (*ggml_opt_callback)(void * data, int accum_step, float * sched, bool * cancel); + typedef void (*ggml_log_callback)(enum ggml_log_level level, const char * text, void * user_data); + + // optimization parameters + // + // see ggml.c (ggml_opt_default_params) for default values + // + struct ggml_opt_params { + enum ggml_opt_type type; + + size_t graph_size; + + int n_threads; + + // delta-based convergence test + // + // if past == 0 - disabled + // if past > 0: + // stop if |f(x) - f(x_past)| < delta * max(1, |f(x)|) + // + int past; + float delta; + + // maximum number of iterations without improvement + // + // if 0 - disabled + // if > 0: + // assume convergence if no cost improvement in this number of iterations + // + int max_no_improvement; + + bool print_forward_graph; + bool print_backward_graph; + + int n_gradient_accumulation; + + // ADAM parameters + struct { + int n_iter; + + float sched; // schedule multiplier (fixed, decay or warmup) + float decay; // weight decay for AdamW, use 0.0f to disable + int decay_min_ndim; // minimum number of tensor dimension to apply weight decay + float alpha; // learning rate + float beta1; + float beta2; + float eps; // epsilon for numerical stability + float eps_f; // epsilon for convergence test + float eps_g; // epsilon for convergence test + float gclip; // gradient clipping + } adam; + + // LBFGS parameters + struct { + int m; // number of corrections to approximate the inv. Hessian + int n_iter; + int max_linesearch; + + float eps; // convergence tolerance + float ftol; // line search tolerance + float wolfe; + float min_step; + float max_step; + + enum ggml_linesearch linesearch; + } lbfgs; + }; + + struct ggml_opt_context { + struct ggml_context * ctx; + struct ggml_opt_params params; + + int iter; + int64_t nx; // number of parameter elements + + bool just_initialized; + + float loss_before; + float loss_after; + + struct { + struct ggml_tensor * g; // current gradient + struct ggml_tensor * m; // first moment + struct ggml_tensor * v; // second moment + struct ggml_tensor * pf; // past function values + float fx_best; + float fx_prev; + int n_no_improvement; + } adam; + + struct { + struct ggml_tensor * x; // current parameters + struct ggml_tensor * xp; // previous parameters + struct ggml_tensor * g; // current gradient + struct ggml_tensor * gp; // previous gradient + struct ggml_tensor * d; // search direction + struct ggml_tensor * pf; // past function values + struct ggml_tensor * lmal; // the L-BFGS memory alpha + struct ggml_tensor * lmys; // the L-BFGS memory ys + struct ggml_tensor * lms; // the L-BFGS memory s + struct ggml_tensor * lmy; // the L-BFGS memory y + float fx_best; + float step; + int j; + int k; + int end; + int n_no_improvement; + } lbfgs; + }; + + GGML_API struct ggml_opt_params ggml_opt_default_params(enum ggml_opt_type type); + + // optimize the function defined by the tensor f + GGML_API enum ggml_opt_result ggml_opt( + struct ggml_context * ctx, + struct ggml_opt_params params, + struct ggml_tensor * f); + + // initialize optimizer context + GGML_API void ggml_opt_init( + struct ggml_context * ctx, + struct ggml_opt_context * opt, + struct ggml_opt_params params, + int64_t nx); + + // continue optimizing the function defined by the tensor f + GGML_API enum ggml_opt_result ggml_opt_resume( + struct ggml_context * ctx, + struct ggml_opt_context * opt, + struct ggml_tensor * f); + + // continue optimizing the function defined by the tensor f + GGML_API enum ggml_opt_result ggml_opt_resume_g( + struct ggml_context * ctx, + struct ggml_opt_context * opt, + struct ggml_tensor * f, + struct ggml_cgraph * gf, + struct ggml_cgraph * gb, + ggml_opt_callback callback, + void * callback_data); + + // + // tensor flags + // + GGML_API void ggml_set_input(struct ggml_tensor * tensor); + GGML_API void ggml_set_output(struct ggml_tensor * tensor); + + // + // quantization + // + + // - ggml_quantize_init can be called multiple times with the same type + // it will only initialize the quantization tables for the first call or after ggml_quantize_free + // automatically called by ggml_quantize_chunk for convenience + // + // - ggml_quantize_free will free any memory allocated by ggml_quantize_init + // call this at the end of the program to avoid memory leaks + // + // note: these are thread-safe + // + GGML_API void ggml_quantize_init(enum ggml_type type); + GGML_API void ggml_quantize_free(void); + + // some quantization type cannot be used without an importance matrix + GGML_API bool ggml_quantize_requires_imatrix(enum ggml_type type); + + // calls ggml_quantize_init internally (i.e. can allocate memory) + GGML_API size_t ggml_quantize_chunk( + enum ggml_type type, + const float * src, + void * dst, + int64_t start, + int64_t nrows, + int64_t n_per_row, + const float * imatrix); + + // + // gguf + // + + enum gguf_type { + GGUF_TYPE_UINT8 = 0, + GGUF_TYPE_INT8 = 1, + GGUF_TYPE_UINT16 = 2, + GGUF_TYPE_INT16 = 3, + GGUF_TYPE_UINT32 = 4, + GGUF_TYPE_INT32 = 5, + GGUF_TYPE_FLOAT32 = 6, + GGUF_TYPE_BOOL = 7, + GGUF_TYPE_STRING = 8, + GGUF_TYPE_ARRAY = 9, + GGUF_TYPE_UINT64 = 10, + GGUF_TYPE_INT64 = 11, + GGUF_TYPE_FLOAT64 = 12, + GGUF_TYPE_COUNT, // marks the end of the enum + }; + + struct gguf_context; + + struct gguf_init_params { + bool no_alloc; + + // if not NULL, create a ggml_context and allocate the tensor data in it + struct ggml_context ** ctx; + }; + + GGML_API struct gguf_context * gguf_init_empty(void); + GGML_API struct gguf_context * gguf_init_from_file(const char * fname, struct gguf_init_params params); + //GGML_API struct gguf_context * gguf_init_from_buffer(..); + + GGML_API void gguf_free(struct gguf_context * ctx); + + GGML_API const char * gguf_type_name(enum gguf_type type); + + GGML_API int gguf_get_version (const struct gguf_context * ctx); + GGML_API size_t gguf_get_alignment (const struct gguf_context * ctx); + GGML_API size_t gguf_get_data_offset(const struct gguf_context * ctx); + GGML_API void * gguf_get_data (const struct gguf_context * ctx); + + GGML_API int gguf_get_n_kv(const struct gguf_context * ctx); + GGML_API int gguf_find_key(const struct gguf_context * ctx, const char * key); + GGML_API const char * gguf_get_key (const struct gguf_context * ctx, int key_id); + + GGML_API enum gguf_type gguf_get_kv_type (const struct gguf_context * ctx, int key_id); + GGML_API enum gguf_type gguf_get_arr_type(const struct gguf_context * ctx, int key_id); + + // will abort if the wrong type is used for the key + GGML_API uint8_t gguf_get_val_u8 (const struct gguf_context * ctx, int key_id); + GGML_API int8_t gguf_get_val_i8 (const struct gguf_context * ctx, int key_id); + GGML_API uint16_t gguf_get_val_u16 (const struct gguf_context * ctx, int key_id); + GGML_API int16_t gguf_get_val_i16 (const struct gguf_context * ctx, int key_id); + GGML_API uint32_t gguf_get_val_u32 (const struct gguf_context * ctx, int key_id); + GGML_API int32_t gguf_get_val_i32 (const struct gguf_context * ctx, int key_id); + GGML_API float gguf_get_val_f32 (const struct gguf_context * ctx, int key_id); + GGML_API uint64_t gguf_get_val_u64 (const struct gguf_context * ctx, int key_id); + GGML_API int64_t gguf_get_val_i64 (const struct gguf_context * ctx, int key_id); + GGML_API double gguf_get_val_f64 (const struct gguf_context * ctx, int key_id); + GGML_API bool gguf_get_val_bool(const struct gguf_context * ctx, int key_id); + GGML_API const char * gguf_get_val_str (const struct gguf_context * ctx, int key_id); + GGML_API const void * gguf_get_val_data(const struct gguf_context * ctx, int key_id); + GGML_API int gguf_get_arr_n (const struct gguf_context * ctx, int key_id); + GGML_API const void * gguf_get_arr_data(const struct gguf_context * ctx, int key_id); + GGML_API const char * gguf_get_arr_str (const struct gguf_context * ctx, int key_id, int i); + + GGML_API int gguf_get_n_tensors (const struct gguf_context * ctx); + GGML_API int gguf_find_tensor (const struct gguf_context * ctx, const char * name); + GGML_API size_t gguf_get_tensor_offset(const struct gguf_context * ctx, int i); + GGML_API char * gguf_get_tensor_name (const struct gguf_context * ctx, int i); + GGML_API enum ggml_type gguf_get_tensor_type (const struct gguf_context * ctx, int i); + + // removes key if it exists + GGML_API void gguf_remove_key(struct gguf_context * ctx, const char * key); + + // overrides existing values or adds a new one + GGML_API void gguf_set_val_u8 (struct gguf_context * ctx, const char * key, uint8_t val); + GGML_API void gguf_set_val_i8 (struct gguf_context * ctx, const char * key, int8_t val); + GGML_API void gguf_set_val_u16 (struct gguf_context * ctx, const char * key, uint16_t val); + GGML_API void gguf_set_val_i16 (struct gguf_context * ctx, const char * key, int16_t val); + GGML_API void gguf_set_val_u32 (struct gguf_context * ctx, const char * key, uint32_t val); + GGML_API void gguf_set_val_i32 (struct gguf_context * ctx, const char * key, int32_t val); + GGML_API void gguf_set_val_f32 (struct gguf_context * ctx, const char * key, float val); + GGML_API void gguf_set_val_u64 (struct gguf_context * ctx, const char * key, uint64_t val); + GGML_API void gguf_set_val_i64 (struct gguf_context * ctx, const char * key, int64_t val); + GGML_API void gguf_set_val_f64 (struct gguf_context * ctx, const char * key, double val); + GGML_API void gguf_set_val_bool(struct gguf_context * ctx, const char * key, bool val); + GGML_API void gguf_set_val_str (struct gguf_context * ctx, const char * key, const char * val); + GGML_API void gguf_set_arr_data(struct gguf_context * ctx, const char * key, enum gguf_type type, const void * data, int n); + GGML_API void gguf_set_arr_str (struct gguf_context * ctx, const char * key, const char ** data, int n); + + // set or add KV pairs from another context + GGML_API void gguf_set_kv(struct gguf_context * ctx, struct gguf_context * src); + + // manage tensor info + GGML_API void gguf_add_tensor(struct gguf_context * ctx, const struct ggml_tensor * tensor); + GGML_API void gguf_set_tensor_type(struct gguf_context * ctx, const char * name, enum ggml_type type); + GGML_API void gguf_set_tensor_data(struct gguf_context * ctx, const char * name, const void * data, size_t size); + + // writing gguf files can be done in 2 ways: + // + // - write the entire gguf_context to a binary file in a single pass: + // + // gguf_write_to_file(ctx, fname); + // + // - first prepare a file with a placeholder for the meta data, write the tensor data, then write the meta data: + // + // FILE * f = fopen(fname, "wb"); + // fseek(f, gguf_get_meta_size(ctx), SEEK_SET); + // fwrite(f, ...); + // void * data = gguf_meta_get_meta_data(ctx); + // fseek(f, 0, SEEK_SET); + // fwrite(f, data, gguf_get_meta_size(ctx)); + // free(data); + // fclose(f); + // + + // write the entire context to a binary file + GGML_API void gguf_write_to_file(const struct gguf_context * ctx, const char * fname, bool only_meta); + + // get the size in bytes of the meta data (header, kv pairs, tensor info) including padding + GGML_API size_t gguf_get_meta_size(const struct gguf_context * ctx); + GGML_API void gguf_get_meta_data(const struct gguf_context * ctx, void * data); + + // + // system info + // + + GGML_API int ggml_cpu_has_avx (void); + GGML_API int ggml_cpu_has_avx_vnni (void); + GGML_API int ggml_cpu_has_avx2 (void); + GGML_API int ggml_cpu_has_avx512 (void); + GGML_API int ggml_cpu_has_avx512_vbmi(void); + GGML_API int ggml_cpu_has_avx512_vnni(void); + GGML_API int ggml_cpu_has_avx512_bf16(void); + GGML_API int ggml_cpu_has_fma (void); + GGML_API int ggml_cpu_has_neon (void); + GGML_API int ggml_cpu_has_sve (void); + GGML_API int ggml_cpu_has_arm_fma (void); + GGML_API int ggml_cpu_has_metal (void); + GGML_API int ggml_cpu_has_f16c (void); + GGML_API int ggml_cpu_has_fp16_va (void); + GGML_API int ggml_cpu_has_wasm_simd (void); + GGML_API int ggml_cpu_has_blas (void); + GGML_API int ggml_cpu_has_cuda (void); + GGML_API int ggml_cpu_has_vulkan (void); + GGML_API int ggml_cpu_has_kompute (void); + GGML_API int ggml_cpu_has_gpublas (void); + GGML_API int ggml_cpu_has_sse3 (void); + GGML_API int ggml_cpu_has_ssse3 (void); + GGML_API int ggml_cpu_has_riscv_v (void); + GGML_API int ggml_cpu_has_sycl (void); + GGML_API int ggml_cpu_has_rpc (void); + GGML_API int ggml_cpu_has_vsx (void); + GGML_API int ggml_cpu_has_matmul_int8(void); + GGML_API int ggml_cpu_has_cann (void); + GGML_API int ggml_cpu_has_llamafile (void); + + // get the sve vector length in bytes + GGML_API int ggml_cpu_get_sve_cnt(void); + + // + // Internal types and functions exposed for tests and benchmarks + // + +#ifdef __cplusplus +// restrict not standard in C++ +#define GGML_RESTRICT +#else +#define GGML_RESTRICT restrict +#endif + typedef void (*ggml_to_float_t) (const void * GGML_RESTRICT x, float * GGML_RESTRICT y, int64_t k); + typedef void (*ggml_from_float_t)(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int64_t k); + typedef void (*ggml_from_float_to_mat_t) + (const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int64_t nr, int64_t k, int64_t bs); + typedef void (*ggml_vec_dot_t) (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT x, size_t bx, + const void * GGML_RESTRICT y, size_t by, int nrc); + typedef void (*ggml_gemv_t) (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT x, + const void * GGML_RESTRICT y, int nr, int nc); + typedef void (*ggml_gemm_t) (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT x, + const void * GGML_RESTRICT y, int nr, int nc); + + typedef struct { + const char * type_name; + int64_t blck_size; + int64_t blck_size_interleave; // interleave elements in blocks + size_t type_size; + bool is_quantized; + ggml_to_float_t to_float; + ggml_from_float_t from_float; + ggml_from_float_t from_float_ref; + ggml_from_float_to_mat_t from_float_to_mat; + ggml_vec_dot_t vec_dot; + enum ggml_type vec_dot_type; + int64_t nrows; // number of rows to process simultaneously + int64_t ncols; // number of columns to process simultaneously + ggml_gemv_t gemv; + ggml_gemm_t gemm; + } ggml_type_traits_t; + + GGML_API ggml_type_traits_t ggml_internal_get_type_traits(enum ggml_type type); + +#ifdef __cplusplus +} +#endif diff --git a/ml/backend/ggml/sgemm.h b/ml/backend/ggml/sgemm.h new file mode 100644 index 000000000..caf6dd556 --- /dev/null +++ b/ml/backend/ggml/sgemm.h @@ -0,0 +1,14 @@ +#pragma once +#include +#include +#ifdef __cplusplus +extern "C" { +#endif + +bool llamafile_sgemm(int64_t, int64_t, int64_t, const void *, int64_t, + const void *, int64_t, void *, int64_t, int, int, + int, int, int); + +#ifdef __cplusplus +} +#endif diff --git a/ml/nn/convolution.go b/ml/nn/convolution.go new file mode 100644 index 000000000..c1b72391e --- /dev/null +++ b/ml/nn/convolution.go @@ -0,0 +1,11 @@ +package nn + +import "github.com/ollama/ollama/ml" + +type Conv2D struct { + Weight ml.Tensor `ggml:"weight"` +} + +func (m *Conv2D) Forward(ctx ml.Context, t ml.Tensor, s0, s1, p0, p1, d0, d1 int) ml.Tensor { + return m.Weight.Conv2D(ctx, t, s0, s1, p0, p1, d0, d1) +} diff --git a/ml/nn/embedding.go b/ml/nn/embedding.go new file mode 100644 index 000000000..291cf675d --- /dev/null +++ b/ml/nn/embedding.go @@ -0,0 +1,11 @@ +package nn + +import "github.com/ollama/ollama/ml" + +type Embedding struct { + Weight ml.Tensor `ggml:"weight"` +} + +func (m *Embedding) Forward(ctx ml.Context, hiddenState ml.Tensor) ml.Tensor { + return m.Weight.Rows(ctx, hiddenState) +} diff --git a/ml/nn/linear.go b/ml/nn/linear.go new file mode 100644 index 000000000..cdc192163 --- /dev/null +++ b/ml/nn/linear.go @@ -0,0 +1,17 @@ +package nn + +import "github.com/ollama/ollama/ml" + +type Linear struct { + Weight ml.Tensor `ggml:"weight"` + Bias ml.Tensor `ggml:"bias"` +} + +func (m *Linear) Forward(ctx ml.Context, t ml.Tensor) ml.Tensor { + t = m.Weight.Mulmat(ctx, t) + if m.Bias != nil { + t = t.Add(ctx, m.Bias) + } + + return t +} diff --git a/ml/nn/normalization.go b/ml/nn/normalization.go new file mode 100644 index 000000000..8a7913290 --- /dev/null +++ b/ml/nn/normalization.go @@ -0,0 +1,33 @@ +package nn + +import ( + "github.com/ollama/ollama/ml" +) + +type LayerNorm struct { + Weight ml.Tensor `ggml:"weight"` + Bias ml.Tensor `ggml:"bias"` +} + +func (m *LayerNorm) Forward(ctx ml.Context, t ml.Tensor, eps float32) ml.Tensor { + t = t.Norm(ctx, eps).Mul(ctx, m.Weight) + if m.Bias != nil { + t = t.Add(ctx, m.Bias) + } + + return t +} + +type RMSNorm struct { + Weight ml.Tensor `ggml:"weight"` + Bias ml.Tensor `ggml:"bias"` +} + +func (m *RMSNorm) Forward(ctx ml.Context, t ml.Tensor, eps float32) ml.Tensor { + t = t.RMSNorm(ctx, eps).Mul(ctx, m.Weight) + if m.Bias != nil { + t = t.Add(ctx, m.Bias) + } + + return t +} diff --git a/model/cmd/main.go b/model/cmd/main.go new file mode 100644 index 000000000..53d8fab6e --- /dev/null +++ b/model/cmd/main.go @@ -0,0 +1,154 @@ +package main + +import ( + "errors" + "flag" + "fmt" + "image" + "io" + "log/slog" + "os" + "path/filepath" + + "github.com/ollama/ollama/cache" + "github.com/ollama/ollama/ml" + "github.com/ollama/ollama/model" + _ "github.com/ollama/ollama/model/llama" + _ "github.com/ollama/ollama/model/mllama" + "github.com/ollama/ollama/sample" +) + +var args struct { + n int + debug bool + image string + cache bool +} + +func temp() error { + flag.IntVar(&args.n, "n", 10, "number of samples") + flag.BoolVar(&args.debug, "debug", false, "enable debug logging") + flag.StringVar(&args.image, "image", "", "path to image file") + flag.BoolVar(&args.cache, "cache", false, "enable KV cache") + + flag.Parse() + + if len(flag.Args()) != 1 { + return fmt.Errorf("usage: %s path/to/file a_max { + return a_max + } + + return a +} + +func (p *ImageProcessor) getImageSizeFitToCanvas(imageSize, canvasSize image.Point, tileSize int) image.Point { + targetWidth := p.clip(imageSize.X, tileSize, canvasSize.X) + targetHeight := p.clip(imageSize.Y, tileSize, canvasSize.Y) + + scaleWidth := float64(targetWidth) / float64(imageSize.X) + scaleHeight := float64(targetHeight) / float64(imageSize.Y) + + var w, h int + + if scaleWidth < scaleHeight { + w = targetWidth + h = min(int(math.Floor(float64(imageSize.Y)*scaleWidth)), targetHeight) + } else { + w = min(int(math.Floor(float64(imageSize.X)*scaleHeight)), targetWidth) + h = targetHeight + } + + return image.Point{w, h} +} + +func (p *ImageProcessor) getOptimalTiledCanvas(imageSize image.Point, maxImageTiles, tileSize int) image.Point { + possibleTileArrangements := p.supportedAspectRatios(maxImageTiles) + possibleCanvasSizes := []image.Point{} + for _, pta := range possibleTileArrangements { + possibleCanvasSizes = append(possibleCanvasSizes, image.Point{pta.X * tileSize, pta.Y * tileSize}) + } + + scales := []float64{} + + for _, pcs := range possibleCanvasSizes { + scaleHeight := float64(pcs.Y) / float64(imageSize.Y) + scaleWidth := float64(pcs.X) / float64(imageSize.X) + + if scaleWidth > scaleHeight { + scales = append(scales, scaleHeight) + } else { + scales = append(scales, scaleWidth) + } + } + + var minUpscale float64 + var maxDownscale float64 + var upscale bool + + for _, s := range scales { + if s > 1.0 { + upscale = true + if minUpscale == 0 { + minUpscale = s + } else { + minUpscale = math.Min(minUpscale, s) + } + } else { + maxDownscale = math.Max(maxDownscale, s) + } + } + + selectedScale := maxDownscale + if upscale { + selectedScale = minUpscale + } + + var selectedCanvas image.Point + for n, pcs := range possibleCanvasSizes { + if scales[n] == selectedScale { + // choose the smallest possible canvas + if selectedCanvas.X == 0 && selectedCanvas.Y == 0 { + selectedCanvas = pcs + } else if pcs.X*pcs.Y < selectedCanvas.X*selectedCanvas.Y { + selectedCanvas = pcs + } + } + } + return selectedCanvas +} + +func (p *ImageProcessor) splitToTiles(img image.Image, numTilesSize image.Point) []image.Image { + b := img.Bounds() + width := b.Max.X - b.Min.X + height := b.Max.Y - b.Min.Y + tileHeight := height / numTilesSize.Y + tileWidth := width / numTilesSize.X + + images := []image.Image{} + + for h := range numTilesSize.Y { + for w := range numTilesSize.X { + rect := image.Rect(tileWidth*w, tileHeight*h, tileWidth*(w+1), tileHeight*(h+1)) + images = append(images, img.(interface { + SubImage(image.Rectangle) image.Image + }).SubImage(rect)) + } + } + + return images +} + +// remove the "alpha" channel by drawing over a prefilled image +// +// remove the "alpha" channel by drawing over a prefilled image +// +//nolint:unused +func (p *ImageProcessor) compositeImage(img image.Image) image.Image { + dst := image.NewRGBA(img.Bounds()) + + white := color.RGBA{255, 255, 255, 255} + draw.Draw(dst, dst.Bounds(), &image.Uniform{white}, image.Point{}, draw.Src) + draw.Draw(dst, dst.Bounds(), img, img.Bounds().Min, draw.Over) + + return dst +} + +func (p *ImageProcessor) resize(img image.Image, outputSize image.Point, maxImageTiles int) (image.Image, image.Point) { + b := img.Bounds() + tileSize := outputSize.Y + + canvasSize := p.getOptimalTiledCanvas(b.Max, maxImageTiles, tileSize) + aspectRatio := image.Point{canvasSize.X / tileSize, canvasSize.Y / tileSize} + newSize := p.getImageSizeFitToCanvas(b.Max, canvasSize, tileSize) + + dst := image.NewRGBA(image.Rect(0, 0, newSize.X, newSize.Y)) + + // scaling choices: + // NearestNeighbor fast, blocky output + // ApproxBiLinear fast, medium quality + // BiLinear slow, high quality + // CatmullRom very slow, very high quality + draw.BiLinear.Scale(dst, dst.Rect, img, b, draw.Over, nil) + + return dst, aspectRatio +} + +func (p *ImageProcessor) pad(img image.Image, outputSize, aspectRatio image.Point) image.Image { + paddedSize := image.Point{ + X: outputSize.X * aspectRatio.X, + Y: outputSize.Y * aspectRatio.Y, + } + + dst := image.NewRGBA(image.Rect(0, 0, paddedSize.X, paddedSize.Y)) + draw.Draw(dst, img.Bounds(), img, image.Point{0, 0}, draw.Over) + + return dst +} + +func (p *ImageProcessor) pack(img image.Image, aspectRatio image.Point, mean, std [3]float32) []float32 { + subImages := p.splitToTiles(img, aspectRatio) + + var pixelVals []float32 + + for _, subImg := range subImages { + bounds := subImg.Bounds() + var rVals, gVals, bVals []float32 + for y := bounds.Min.Y; y < bounds.Max.Y; y++ { + for x := bounds.Min.X; x < bounds.Max.X; x++ { + c := subImg.At(x, y) + r, g, b, _ := c.RGBA() + rVal := float32(r>>8) / 255.0 + gVal := float32(g>>8) / 255.0 + bVal := float32(b>>8) / 255.0 + + rVal = (rVal - mean[0]) / std[0] + gVal = (gVal - mean[1]) / std[1] + bVal = (bVal - mean[2]) / std[2] + + rVals = append(rVals, rVal) + gVals = append(gVals, gVal) + bVals = append(bVals, bVal) + } + } + pixelVals = append(pixelVals, rVals...) + pixelVals = append(pixelVals, gVals...) + pixelVals = append(pixelVals, bVals...) + } + + return pixelVals +} + +func (p ImageProcessor) ProcessImage(img image.Image) ([]float32, int, error) { + outputSize := image.Point{p.imageSize, p.imageSize} + + // clip values + mean := [3]float32{0.48145466, 0.4578275, 0.40821073} + std := [3]float32{0.26862954, 0.26130258, 0.27577711} + + newImage, aspectRatio := p.resize(img, outputSize, p.maxNumTiles) + newImage = p.pad(newImage, outputSize, aspectRatio) + + data := p.pack(newImage, aspectRatio, mean, std) + aspectRatioIndex := slices.Index(p.supportedAspectRatios(p.maxNumTiles), aspectRatio) + 1 + return data, aspectRatioIndex, nil +} diff --git a/model/mllama/process_text.go b/model/mllama/process_text.go new file mode 100644 index 000000000..88f9aea09 --- /dev/null +++ b/model/mllama/process_text.go @@ -0,0 +1,25 @@ +package mllama + +import ( + "github.com/ollama/ollama/ml" + "github.com/ollama/ollama/model" +) + +type TextProcessor struct { + model.BytePairEncoding +} + +func newTextProcessor(c ml.Config) TextProcessor { + return TextProcessor{ + BytePairEncoding: model.BytePairEncoding{ + Pretokenizer: c.String("tokenizer.ggml.pretokenizer", `(?i:'s|'t|'re|'ve|'m|'ll|'d)|[^\r\n\p{L}\p{N}]?\p{L}+|\p{N}{1,3}| ?[^\s\p{L}\p{N}]+[\r\n]*|\s*[\r\n]+|\s+(?!\S)|\s+`), + Vocabulary: &model.Vocabulary{ + Values: c.Strings("tokenizer.ggml.tokens"), + Types: c.Uints("tokenizer.ggml.token_type"), + Merges: c.Strings("tokenizer.ggml.merges"), + BOS: c.Uint("tokenizer.ggml.bos_token_id"), + EOS: c.Uint("tokenizer.ggml.eos_token_id"), + }, + }, + } +} diff --git a/model/mllama/process_text_test.go b/model/mllama/process_text_test.go new file mode 100644 index 000000000..9016312e1 --- /dev/null +++ b/model/mllama/process_text_test.go @@ -0,0 +1,82 @@ +package mllama + +import ( + "encoding/json" + "errors" + "os" + "path/filepath" + "strconv" + "testing" + + "github.com/google/go-cmp/cmp" + + "github.com/ollama/ollama/model" +) + +func TestProcessText(t *testing.T) { + ours, err := model.New(filepath.Join("testdata", "model.bin")) + if errors.Is(err, os.ErrNotExist) { + t.Skip("no model.bin") + } else if err != nil { + t.Fatal(err) + } + + t.Run("decode", func(t *testing.T) { + f, err := os.Open(filepath.Join("testdata", "theirs.json")) + if errors.Is(err, os.ErrNotExist) { + t.Skip("no theirs.json") + } else if err != nil { + t.Fatal(err) + } + defer f.Close() + + var theirs [][]byte + if err := json.NewDecoder(f).Decode(&theirs); err != nil { + t.Fatal(err) + } + + for id := range theirs { + ids := []int32{int32(id)} + s, err := ours.(model.TextProcessor).Decode(ids) + if err != nil { + t.Fatal(err) + } + + if diff := cmp.Diff(string(theirs[id]), s); diff != "" { + t.Errorf("%d no match (-theirs +ours):\n%s", id, diff) + } + } + }) + + t.Run("encode", func(t *testing.T) { + f, err := os.Open(filepath.Join("..", "testdata", "inputs.json")) + if errors.Is(err, os.ErrNotExist) { + t.Skip("no inputs.json") + } else if err != nil { + t.Fatal(err) + } + defer f.Close() + + var inputs []struct { + Values []byte `json:"base64"` + IDs []int32 `json:"ids"` + } + + if err := json.NewDecoder(f).Decode(&inputs); err != nil { + t.Fatal(err) + } + + for i, input := range inputs { + t.Run(strconv.Itoa(i), func(t *testing.T) { + ids, err := ours.(model.TextProcessor).Encode(string(input.Values)) + if err != nil { + t.Fatal(err) + } + + if diff := cmp.Diff(input.IDs, ids); diff != "" { + t.Errorf("%s: no match (-theirs +ours):\n%s", input.Values, diff) + } + }) + } + }) +} diff --git a/model/mllama/testdata/model.bin b/model/mllama/testdata/model.bin new file mode 120000 index 000000000..34d6cfe11 --- /dev/null +++ b/model/mllama/testdata/model.bin @@ -0,0 +1 @@ +/Users/michaelyang/git/ollama/library/nltpt/Llama-3.2-11B-Vision-Instruct/merged.gguf \ No newline at end of file diff --git a/model/mllama/testdata/theirs.json b/model/mllama/testdata/theirs.json new file mode 100644 index 000000000..c6438cdc3 --- /dev/null +++ b/model/mllama/testdata/theirs.json @@ -0,0 +1 @@ +["IQ==","Ig==","Iw==","JA==","JQ==","Jg==","Jw==","KA==","KQ==","Kg==","Kw==","LA==","LQ==","Lg==","Lw==","MA==","MQ==","Mg==","Mw==","NA==","NQ==","Ng==","Nw==","OA==","OQ==","Og==","Ow==","PA==","PQ==","Pg==","Pw==","QA==","QQ==","Qg==","Qw==","RA==","RQ==","Rg==","Rw==","SA==","SQ==","Sg==","Sw==","TA==","TQ==","Tg==","Tw==","UA==","UQ==","Ug==","Uw==","VA==","VQ==","Vg==","Vw==","WA==","WQ==","Wg==","Ww==","XA==","XQ==","Xg==","Xw==","YA==","YQ==","Yg==","Yw==","ZA==","ZQ==","Zg==","Zw==","aA==","aQ==","ag==","aw==","bA==","bQ==","bg==","bw==","cA==","cQ==","cg==","cw==","dA==","dQ==","dg==","dw==","eA==","eQ==","eg==","ew==","fA==","fQ==","fg==","oQ==","og==","ow==","pA==","pQ==","pg==","pw==","qA==","qQ==","qg==","qw==","rA==","rg==","rw==","sA==","sQ==","sg==","sw==","tA==","tQ==","tg==","tw==","uA==","uQ==","ug==","uw==","vA==","vQ==","vg==","vw==","wA==","wQ==","wg==","ww==","xA==","xQ==","xg==","xw==","yA==","yQ==","yg==","yw==","zA==","zQ==","zg==","zw==","0A==","0Q==","0g==","0w==","1A==","1Q==","1g==","1w==","2A==","2Q==","2g==","2w==","3A==","3Q==","3g==","3w==","4A==","4Q==","4g==","4w==","5A==","5Q==","5g==","5w==","6A==","6Q==","6g==","6w==","7A==","7Q==","7g==","7w==","8A==","8Q==","8g==","8w==","9A==","9Q==","9g==","9w==","+A==","+Q==","+g==","+w==","/A==","/Q==","/g==","/w==","","AQ==","Ag==","Aw==","BA==","BQ==","Bg==","Bw==","CA==","CQ==","Cg==","Cw==","DA==","DQ==","Dg==","Dw==","EA==","EQ==","Eg==","Ew==","FA==","FQ==","Fg==","Fw==","GA==","GQ==","Gg==","Gw==","HA==","HQ==","Hg==","Hw==","IA==","fw==","gA==","gQ==","gg==","gw==","hA==","hQ==","hg==","hw==","iA==","iQ==","ig==","iw==","jA==","jQ==","jg==","jw==","kA==","kQ==","kg==","kw==","lA==","lQ==","lg==","lw==","mA==","mQ==","mg==","mw==","nA==","nQ==","ng==","nw==","oA==","rQ==","ICA=","ICAgIA==","aW4=","IHQ=","ICAgICAgICA=","ZXI=","ICAg","b24=","IGE=","cmU=","YXQ=","c3Q=","ZW4=","b3I=","IHRo","Cgo=","IGM=","bGU=","IHM=","aXQ=","YW4=","YXI=","YWw=","IHRoZQ==","Owo=","IHA=","IGY=","b3U=","ID0=","aXM=","ICAgICAgIA==","aW5n","ZXM=","IHc=","aW9u","ZWQ=","aWM=","IGI=","IGQ=","ZXQ=","IG0=","IG8=","CQk=","cm8=","YXM=","ZWw=","Y3Q=","bmQ=","IGlu","IGg=","ZW50","aWQ=","IG4=","YW0=","ICAgICAgICAgICA=","IHRv","IHJl","LS0=","IHs=","IG9m","b20=","KTsK","aW0=","DQo=","ICg=","aWw=","Ly8=","IGFuZA==","dXI=","c2U=","IGw=","ZXg=","IFM=","YWQ=","ICI=","Y2g=","dXQ=","aWY=","Kio=","IH0=","ZW0=","b2w=","ICAgICAgICAgICAgICAgIA==","dGg=","KQo=","IHsK","IGc=","aWc=","aXY=","LAo=","Y2U=","b2Q=","IHY=","YXRl","IFQ=","YWc=","YXk=","ICo=","b3Q=","dXM=","IEM=","IHN0","IEk=","dW4=","dWw=","dWU=","IEE=","b3c=","ICc=","ZXc=","IDw=","YXRpb24=","KCk=","IGZvcg==","YWI=","b3J0","dW0=","YW1l","IGlz","cGU=","dHI=","Y2s=","4oA=","IHk=","aXN0","LS0tLQ==","LgoK","aGU=","IGU=","bG8=","IE0=","IGJl","ZXJz","IG9u","IGNvbg==","YXA=","dWI=","IFA=","ICAgICAgICAgICAgICAg","YXNz","aW50","Pgo=","bHk=","dXJu","ICQ=","OwoK","YXY=","cG9ydA==","aXI=","LT4=","bnQ=","Y3Rpb24=","ZW5k","IGRl","MDA=","aXRo","b3V0","dHVybg==","b3Vy","ICAgICA=","bGlj","cmVz","cHQ=","PT0=","IHRoaXM=","IHdo","IGlm","IEQ=","dmVy","YWdl","IEI=","aHQ=","ZXh0","PSI=","IHRoYXQ=","KioqKg==","IFI=","IGl0","ZXNz","IEY=","IHI=","b3M=","YW5k","IGFz","ZWN0","a2U=","cm9t","IC8v","Y29u","IEw=","KCI=","cXU=","bGFzcw==","IHdpdGg=","aXo=","ZGU=","IE4=","IGFs","b3A=","dXA=","Z2V0","IH0K","aWxl","IGFu","YXRh","b3Jl","cmk=","IHBybw==","Ow0K","CQkJCQ==","dGVy","YWlu","IFc=","IEU=","IGNvbQ==","IHJldHVybg==","YXJ0","IEg=","YWNr","aW1wb3J0","dWJsaWM=","IG9y","ZXN0","bWVudA==","IEc=","YWJsZQ==","IC0=","aW5l","aWxs","aW5k","ZXJl","Ojo=","aXR5","ICs=","IHRy","ZWxm","aWdodA==","KCc=","b3Jt","dWx0","c3Ry","Li4=","Iiw=","IHlvdQ==","eXBl","cGw=","IG5ldw==","IGo=","ICAgICAgICAgICAgICAgICAgIA==","IGZyb20=","IGV4","IE8=","MjA=","bGQ=","IFs=","b2M=","Ogo=","IHNl","IGxl","LS0tLS0tLS0=","LnM=","ewo=","Jyw=","YW50","IGF0","YXNl","LmM=","IGNo","PC8=","YXZl","YW5n","IGFyZQ==","IGludA==","4oCZ","X3Q=","ZXJ0","aWFs","YWN0","fQo=","aXZl","b2Rl","b3N0","IGNsYXNz","IG5vdA==","b2c=","b3Jk","YWx1ZQ==","YWxs","ZmY=","KCk7Cg==","b250","aW1l","YXJl","IFU=","IHBy","IDo=","aWVz","aXpl","dXJl","IGJ5","aXJl","IH0KCg==","LnA=","IHNo","aWNl","YXN0","cHRpb24=","dHJpbmc=","b2s=","X18=","Y2w=","IyM=","IGhl","YXJk","KS4=","IEA=","aWV3","CQkJ","IHdhcw==","aXA=","dGhpcw==","IHU=","IFRoZQ==","aWRl","YWNl","aWI=","YWM=","cm91","IHdl","amVjdA==","IHB1YmxpYw==","YWs=","dmU=","YXRo","b2lk","ID0+","dXN0","cXVl","IHJlcw==","KSk=","J3M=","IGs=","YW5z","eXN0","dW5jdGlvbg==","KioqKioqKio=","IGk=","IHVz","cHA=","MTA=","b25l","YWls","PT09PQ==","bmFtZQ==","IHN0cg==","IC8=","ICY=","YWNo","ZGl2","eXN0ZW0=","ZWxs","IGhhdmU=","ZXJy","b3VsZA==","dWxs","cG9u","IEo=","X3A=","ID09","aWdu","U3Q=","Lgo=","IHBs","KTsKCg==","Zm9ybQ==","cHV0","b3VudA==","fQoK","ZGQ=","aXRl","IGdldA==","cnI=","b21l","IOKA","YXJhbQ==","Y2M=","ICov","RVI=","SW4=","bGVz","X3M=","b25n","aWU=","IGNhbg==","IFY=","ZXJ2","cHI=","IHVu","cm93","YmVy","IGRv","bGw=","IGVs","IHNlbGY=","YXRlZA==","YXJ5","IC4=","J10=","dWQ=","IGVu","IFRo","ICAgICAgICAgICAgICAgICAgICAgICA=","dGU=","X2M=","dWN0","IGFi","b3Jr","LmdldA==","ICM=","YXc=","cmVzcw==","b2I=","TmFtZQ==","MjAx","YXBw","Wyc=","IGFsbA==","b3J5","aXRpb24=","YW5jZQ==","ZWFy","IGNvbnQ=","dmVudA==","aWE=","IHdpbGw=","SU4=","ICAgICAgICAg","cmV0dXJu","IDwv","ZGF0YQ==","KQoK","UmU=","cGxl","aWxk","dGhlcg==","IHlvdXI=","Igo=","KCQ=","IG91dA==","KSw=","IGhhcw==","U3RyaW5n","c28=","IHVw","YXg=","IGRlZg==","IGJv","Z2U=","YWxzZQ==","T04=","cGVy","MTI=","aWNo","IGJ1dA==","IAo=","IF8=","X20=","YWRk","cXVlc3Q=","b2RlbA==","c2VsZg==","ZXJ5","ZnQ=","ZW5z","Ly8vLw==","YWtl","LkM=","IGdv","IGZ1bmN0aW9u","IEs=","aXZhdGU=","IGlt","IGNvbnN0","LnQ=","ICovCg==","KTsNCg==","IHZvaWQ=","IHNldA==","IFN5c3RlbQ==","Y3Jp","KCkK","bGk=","CWlm","Lm0=","YWxseQ==","c2V0","ZXA=","4oCZcw==","Ym8=","ZGVm","JywK","IG1l","ICE=","YXRjaA==","Ij4=","IiwK","ZWM=","IElu","cGg=","IHw=","X2Y=","IHZhcg==","ZW5jZQ==","SWQ=","cmVl","aW5r","bGVjdA==","dWc=","ZXRo","IGVsc2U=","LS0tLS0tLS0tLS0tLS0tLQ==","MTk=","Y29udA==","IHNv","YXRpYw==","IGxv","cHJv","dG9u","c3M=","b3du","YWJlbA==","b2ludA==","b3Vz","ZWxk","U1Q=","VGhl","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA=","UkU=","Ijo=","b2xvcg==","dHA=","ZWc=","a2V5","dWRl","IFN0","b3VuZA==","IGFy","Iik7Cg==","ZW5lcg==","c2Vy","MTE=","YmplY3Q=","ZXNzYWdl","ZmVy","IG1vcmU=","YXRpb25z","ZW50cw==","IGhpcw==","IHRoZXk=","LlM=","IFk=","dXNl","bmU=","aXNo","b2xk","X2Q=","aW8=","aWVsZA==","IHBlcg==","Q29udA==","aW5ncw==","IyMjIw==","IGRhdGE=","IHNh","ZWY=","Zm8=","IG9uZQ==","ZW5n","IGRpcw==","QVQ=","IG5hbWU=","IHRydWU=","dmFs","bGVk","LmY=","IG5l","IGVuZA==","MzI=","LlQ=","MTY=","Y3Jl","YXJr","bG9n","RXg=","ZXJyb3I=","X2lk","dXJyZQ==","YW5nZQ==","IG51bGw=","cnJheQ==","IG15","cGFu","aWN0","YXRvcg==","Vmlldw==","TGlzdA==","CXJldHVybg==","4oCd","IHByZQ==","IHg=","Y2x1ZGU=","YXJn","MTU=","b3Y=","Lmg=","ID4=","IHRoZWly","Jyk=","aXJzdA==","aWNr","Z2g=","TEU=","T1I=","IHByaXZhdGU=","dGVt","DQoNCg==","dXNlcg==","ICk=","Y29t","LkE=","IjsK","IGlk","cmVhZA==","IHdobw==","X2I=","Ij4K","IHRpbWU=","IG1hbg==","cnk=","PT09PT09PT0=","cm91cA==","cm9w","cHVibGlj","dmVs","dW1iZXI=","Ymxl","IHdoaWNo","KioqKioqKioqKioqKioqKg==","IGFueQ==","IGZhbHNl","d2U=","IHZhbHVl","IGxp","Iik=","bmRlcg==","Z3I=","IG5v","cGFyYW0=","MjU=","Zmln","LmNvbQ==","IGFwcA==","X2w=","aW9ucw==","LkQ=","IENo","IGFib3V0","IGFkZA==","IHN1","IHN0cmluZw==","SUQ=","IG92ZXI=","c3RyaW5n","Lmw=","b3VyY2U=","MDAw","X0M=","XQo=","IHF1","IFN0cmluZw==","Y2E=","U0U=","IHJv","c2g=","dWFs","VHlwZQ==","c29u","bmV3","ZXJu","IGFn","QVI=","XTsK","XS4=","ID8=","aWNhbA==","IGRlcw==","dXRo","aXg=","YXlz","IHR5cGU=","J3Q=","YXVsdA==","IGludGVy","dmFy","LmI=","IHBhcnQ=","LmQ=","dXJyZW50","SVQ=","RU4=","MzA=","ZW5j","KGY=","cmE=","dmFsdWU=","Y2hv","MTg=","dXR0b24=","b3Nl","MTQ=","ICE9","YXRlcg==","w6k=","cmVhdGU=","b2xs","cG9z","eWxl","bmc=","QUw=","dXNpbmc=","YW1lcw==","IHsNCg==","YXRlcw==","ZWx5","IHdvcms=","IGVt","aW5hbA==","IHNw","IHdoZW4=","LnNldA==","ICAgICAg","KToK","dG8=","cXVpcmU=","aW5kb3c=","bGVtZW50","cGVjdA==","YXNo","W2k=","IHVzZQ==","LkY=","cGVj","IGFk","b3Zl","Y2VwdGlvbg==","ZW5ndGg=","aW5jbHVkZQ==","YWRlcg==","ICAgICAgICAgICAgICAgICAgICAgICAgICAg","YXR1cw==","VGg=","aXRsZQ==","cml0","dm9pZA==","KCku","KAo=","IG9mZg==","IG90aGVy","ICYm","JzsK","bXM=","IGJlZW4=","IHRl","bWw=","Y28=","bmM=","MTM=","ZXJ2aWNl","ICU=","KioK","YW5u","YWRl","CgoKCg==","bG9jaw==","Y29uc3Q=","MTAw","cG9uc2U=","IHN1cA==","Kys=","ZGF0ZQ==","IGFjYw==","IGhhZA==","IGJ1","MjAw","IFJl","IHdlcmU=","IGZpbGU=","IHdvdWxk","IOKAnA==","dmVu","aXNz","IG91cg==","Y2xhc3M=","cmF3","IHllYXI=","RGF0YQ==","IHZhbA==","IHNvbWU=","ZnRlcg==","eXM=","IC8vLw==","cm91bmQ=","dmlldw==","IHBl","IHRoZXJl","IHNhaWQ=","ZHU=","b2Y=","bGluZQ==","Lyo=","ZHVjdA==","IGhlcg==","ICAgICAgICAgICAgIA==","UmVz","IGNv","IGNvbW0=","aXNl","bWlu","ICAgIAo=","I2luY2x1ZGU=","ZXRob2Q=","LlA=","dXRl","IGFzcw==","SW50","YXNr","bG9j","IGxpa2U=","b2R5","IGxldA==","bG9hZA==","IGFt","cm9s","IGdy","eXA=","IGFsc28=","IEl0","dXJs","aWZpYw==","b3Jz","X1A=","X24=","aWdo","IHRoYW4=","Q29t","QU4=","VUw=","YXRpbmc=","MTc=","IFRoaXM=","cmVm","X1M=","IHN0YXRpYw==","cm9sbA==","IGp1c3Q=","IHJlc3VsdA==","aWFu","aWR0aA==","IHRoZW0=","KSk7Cg==","ZGVy","cmVhaw==","Q29u","Oi8v","dWxl","Li4u","YXJjaA==","ZW1lbnQ=","IDw8","NTA=","dXNo","ZW5zZQ==","YXJy","IGludG8=","Y2Vzcw==","YW1w","aWVk","dW1lbnQ=","IFw=","XSw=","d28=","YWxz","IHdoYXQ=","YW5j","VmFsdWU=","PSc=","b2x1bQ==","IHBvcw==","YWdlcw==","YXllcg==","IHNj","dWVz","IikK","X1Q=","IGxpc3Q=","KHM=","IGNhc2U=","Q2g=","CQkJCQk=","Ly8vLy8vLy8=","cG9uZW50","IHo=","IGtu","bGV0","REU=","cmVk","IGZl","IH0sCg==","ICw=","KHQ=","IGZpcnN0","Jyk7Cg==","d29yZA==","IGltcG9ydA==","IGFjdA==","IGNoYXI=","Q1Q=","IFRy","b3BsZQ==","PXs=","CWY=","MjQ=","aWVudA==","Y2VudA==","Lmo=","bGVjdGlvbg==","KSkK","IG9ubHk=","IHByaW50","bWVy","Llc=","b2Nr","IC0t","VGV4dA==","IG9w","YW5r","IGl0cw==","IGJhY2s=","WyI=","IG5lZWQ=","IGNs","IHN1Yg==","IGxh","KCg=","LiI=","T2JqZWN0","IHN0YXJ0","ZmlsZQ==","KHNlbGY=","bmVy","ZXk=","IHVzZXI=","IGVudA==","IENvbQ==","aXRz","IENvbg==","b3VibGU=","b3dlcg==","aXRlbQ==","dmVyeQ==","IFdl","NjQ=","bGljaw==","IFE=","cGhw","dHRw","Jzo=","aWNz","IHVuZGVy","ICoK","Lkw=","KTs=","aWNlcw==","IHJlZw==","KQ0K","CXB1YmxpYw==","U1M=","IHRoZW4=","cmVhdA==","aW91cw==","Lkc=","ZWs=","aXJlY3Q=","aGVjaw==","Y3JpcHQ=","bmluZw==","IFVu","IG1heQ==","IFdo","Qm8=","SXRlbQ==","c3RydWN0","LnN0","cmVhbQ==","aWJsZQ==","bG9hdA==","IG9yZw==","dW5k","c3Vt","X2lu","Li4v","X00=","IGhvdw==","cml0ZQ==","Jwo=","VG8=","NDA=","d3c=","IHBlb3BsZQ==","aW5kZXg=","Lm4=","aHR0cA==","KG0=","ZWN0b3I=","IGluZA==","IGphdg==","XSwK","IEhl","X3N0","ZnVs","b2xl","KXsK","IHNob3VsZA==","b3B5","ZWxw","aWVy","X25hbWU=","ZXJzb24=","SU9O","b3Rl","IHRlc3Q=","IGJldA==","cnJvcg==","dWxhcg==","44A=","INA=","YnM=","dGluZw==","IG1ha2U=","VHI=","IGFmdGVy","YXJnZXQ=","Uk8=","b2x1bW4=","cmM=","X3Jl","ZGVmaW5l","MjI=","IHJpZ2h0","cmlnaHQ=","ZGF5","IGxvbmc=","W10=","KHA=","dGQ=","Y29uZA==","IFBybw==","IHJlbQ==","cHRpb25z","dmlk","Lmc=","IGV4dA==","IF9f","JykK","cGFjZQ==","bXA=","IG1pbg==","c3RhbmNl","YWly","YWN0aW9u","d2g=","dHlwZQ==","dXRpbA==","YWl0","PD8=","SUM=","dGV4dA==","IHBo","IGZs","Lk0=","Y2Nlc3M=","YnI=","Zm9yZQ==","ZXJzaW9u","KSwK","LnJl","YXRlZw==","IGxvYw==","aW5z","LXM=","dHJpYg==","IEludA==","IGFycmF5","LCI=","UHJv","KGM=","ZXNzaW9u","PgoK","IHNoZQ==","Il0=","YXBo","IGV4cA==","ZXJ0eQ==","IFNl","IHBhcg==","dW5j","RVQ=","IHJlYWQ=","cHJpbnQ=","IHJlbA==","IGZvcm0=","IGRy","RXhjZXB0aW9u","aW5wdXQ=","IHRyYW5z","IyMjIyMjIyM=","b3JkZXI=","Qnk=","IGF3","aXRpZXM=","dWZm","cGxheQ==","LmFkZA==","IOKAkw==","IHdhbnQ=","IGNvbXA=","bWVudHM=","IHx8","YXo=","YmU=","IG51bWJlcg==","IHJlcXVpcmU=","IEV4","NjA=","IGNvbA==","IGtleQ==","ZW1iZXI=","IHR3bw==","IHNpemU=","IHdoZXJl","VVQ=","cmVzdWx0","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA==","b3VnaA==","b3JsZA==","b29k","dWNo","YXRpdmU=","Z2Vy","YXJlbnQ=","IC8q","IGFyZw==","IHdoaWxl","MjM=","KHRoaXM=","IHJlYw==","IGRpZg==","U3RhdGU=","IHNwZWM=","cmlkZQ==","X0Y=","IGxvb2s=","QU0=","aWxpdHk=","ZXRlcg==","4oCZdA==","CgoK","YXlvdXQ=","LS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0=","YWdlcg==","IGNvdWxk","IGJy","ZW5kcw==","dXJlcw==","IGtub3c=","ZXRz","IElm","IFNo","Lnc=","YmFjaw==","IHNlcg==","ICs9","IGZy","KCkpOwo=","IGhhbmQ=","SW5k","VUxM","SW0=","KCk7Cgo=","IG1vc3Q=","IHRyeQ==","IG5vdw==","cm91Z2g=","Pg0K","YWNrYWdl","IGhpbQ==","Ll8=","aWZ5","IGJyZWFr","ICk7Cg==","cmVu","I2RlZmluZQ==","aXR0","IGFw","CWM=","KG4=","IFlvdQ==","OgoK","LW0=","IGV2ZXJ5","dXN0b20=","bGllbnQ=","b2N1bWVudA==","Y3JpcHRpb24=","RXJyb3I=","LWI=","0L4=","XVs=","OTk=","dHJhbnM=","IHBvaW50","IHN0ZA==","IGZpbA==","VGltZQ==","ODA=","IG1vZA==","IC0+","IGVycm9y","YWg=","IHRleHQ=","cm9sbGVy","bG9zZQ==","cWw=","IHBvbA==","Pjwv","IHNob3c=","VXNlcg==","YXNlZA==","IHsKCg==","IGZpbmQ=","0LA=","RUQ=","c3Bhbg==","ZW51","IGN1cnJlbnQ=","IHVzZWQ=","Y2VwdA==","Y2x1ZA==","IHBsYXk=","IGxvZw==","dXRpb24=","Zmw=","IHNlZQ==","aW5kb3dz","IGhlbHA=","IHRoZXNl","IHBhc3M=","IGRvd24=","IGV2ZW4=","YXNvbg==","dWlsZA==","ZnJvbQ==","KGQ=","IGJs","bGFiZWw=","ZWxzZQ==","0LU=","ICgh","aXplZA==","KCks","IG9i","IGl0ZW0=","dW1w","VVI=","b3Ju","IGRvbg==","U2U=","bWFu","Mjc=","YW1wbGU=","dG4=","PT09PT09PT09PT09PT09PQ==","SGU=","Z3JhbQ==","IGRpZA==","d24=","X2g=","aXZlcg==","IHNt","IHRocm91Z2g=","IEFu","Y2hl","IGludg==","b3VzZQ==","IGVz","IE5ldw==","ZXhwb3J0","bWFyeQ==","dXRv","bGVy","IGxhc3Q=","IGV2ZW50","dHJ5","77w=","aWx5","aWduZWQ=","aW5lcw==","b2xsb3c=","aWNlbnNl","c29sZQ==","bGVhcg==","KGludA==","IGFnYWlu","IGhpZ2g=","aHRtbA==","SW5kZXg=","dXRob3I=","IC8qKgo=","IGxpbmU=","RXZlbnQ=","X0Q=","IGRvZXM=","aXRpYWw=","IGNy","YXJz","Mjg=","IHRlbQ==","Y2F1c2U=","ZmFjZQ==","IGA=","X0E=","QnV0dG9u","YXR1cmU=","ZWN0ZWQ=","RVM=","aXN0ZXI=","CQo=","IGJlZm9yZQ==","YWxl","b3RoZXI=","IGJlY2F1c2U=","cm9pZA==","IGVk","aWs=","cmVn","IERl","IGRpc3Q=","fSwK","IHN0YXRl","IGNvbnM=","cmludA==","YXR0","IGhlcmU=","aW5lZA==","IGZpbmFs","ICIi","S2V5","TE8=","IGRlbA==","cHR5","dGhpbmc=","MjY=","IEFuZA==","IHJ1bg==","IFg=","eW0=","LmFwcA==","IHZlcnk=","Y2Vz","X04=","YXJlZA==","d2FyZA==","bGlzdA==","aXRlZA==","b2xvZw==","aXRjaA==","Qm94","aWZl","MzM=","IGFj","IG1vZGVs","IG1vbg==","IHdheQ==","bGV0ZQ==","IGNhbGw=","IGF0dA==","IGNhbA==","dmVydA==","IGRlYw==","bGVhc2U=","b3Vu","IH0pOwo=","ZnI=","Zm9ybWF0aW9u","ZXRhaWw=","IG51bQ==","YWo=","cXVlcnk=","IHdlbGw=","IG9iamVjdA==","IEFz","IHllYXJz","Q29sb3I=","SVM=","IGRlZmF1bHQ=","V2g=","IGlucw==","YWludA==","IGphdmE=","IHNpbQ==","IEFy","bW9u","dGls","KCk7DQo=","KTo=","U2V0","Mjk=","YXR0ZXI=","IHZpZXc=","IHByZXM=","YXJyYXk=","V2U=","QXQ=","IGJlbA==","IG1hbnk=","MjE=","TWFu","ZW5kZXI=","IGJlaW5n","IGdvb2Q=","CQkJCQkJ","YXRpb25hbA==","d2FyZQ==","LmxvZw==","ew0K","IHVzaW5n","X0I=","IDo9","X3c=","aXN0cw==","bGlzaA==","IHN0dWQ=","IEFs","IGd1","Y29uZmln","dXJpbmc=","dGltZQ==","b2tlbg==","YW1lc3BhY2U=","IHJlcXVlc3Q=","IGNoaWxk","IMM=","bG9i","IHBhcmFt","IH0NCg==","MDE=","IGVjaG8=","ZnVuY3Rpb24=","KioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKio=","cHM=","RWxlbWVudA==","YWxr","bGljYXRpb24=","Ynk=","U2l6ZQ==","cmF3aW5n","IHBlcnNvbg==","ICAgICAgICAgICAgICAgICA=","XG4=","b2JqZWN0","aW5jZQ==","RW4=","RmlsZQ==","dWY=","ZmZlY3Q=","QUM=","IHN0eWxl","c3VtbWFyeQ==","IHF1ZQ==","X3I=","ICgk","TW9kZWw=","aWRlbnQ=","IG1ldGhvZA==","SUw=","b3R0","bGVzcw==","SU5H","ICgp","IGV4cGVjdA==","eW5j","cGFja2FnZQ==","MzU=","dXJz","IHByb3Q=","Li8=","cHJl","ICkK","bWE=","IHN1cg==","IGZvdW5k","SW5mbw==","cGFy","aW1lcw==","LmU=","YWlucw==","IHBvc3Q=","LWQ=","NDU=","b2xlYW4=","IHNs","UEU=","IHN1Y2g=","c2VsZWN0","YWluZXI=","IHRoaW5r","IGRpZmZlcg==","LnI=","LyoqCg==","RkY=","b29s","cGxhdGU=","cXVhbA==","IEZvcg==","IG11Y2g=","dWM=","KG5ldw==","b2R1bGU=","IHNvbQ==","IGh0dHA=","IExpc3Q=","IGNvdW50","IGluc3Q=","Y2hhcg==","bWl0","Lmlk","YWtpbmc=","IGdlbmVy","cHg=","dmljZQ==","Mzc=","X2RhdGE=","IE5VTEw=","fQ0K","aWRk","44CC","IG1lZA==","b3Jn","aWRlcg==","YWNoZQ==","d29yaw==","IGNoZWNr","d2Vlbg==","ICgo","dGhl","YW50cw==","Pjw=","LkI=","LWM=","IG9wZW4=","IGVzdA==","ICAgICAgICAK","IG5leHQ=","SU0=","0YI=","T1Q=","w7M=","IGZvbGxvdw==","Y29udGVudA==","ICAgICAgICAgICAg","IGluY2x1ZA==","SEU=","IFJlcw==","IGhyZWY=","0Lg=","IGNhcg==","eXBlcw==","aW1hZ2U=","VW4=","IGJvb2w=","QUQ=","IGdhbWU=","LkZvcm0=","cm93cw==","Ki8=","dmVsb3A=","LkRyYXdpbmc=","IHBhdGg=","aXNpb24=","IGVhY2g=","IFBs","X3R5cGU=","UGF0aA==","bmVjdGlvbg==","IGF2","Jyku","IHN1cHBvcnQ=","RU5U","cmVt","Iiku","IG93bg==","IGNvcg==","Y291bnQ=","bWlzcw==","dWFsbHk=","IG1lbQ==","c3Rk","aWVuY2U=","c2VhcmNo","IgoK","Rm9ybQ==","IHNleA==","ZW5hbWU=","IHNpZ24=","IGV0","ICAgICAgICAgIA==","Jywn","IEFwcA==","IHRob3Nl","b2Zm","IGVycg==","IHN5c3RlbQ==","IGJlc3Q=","Y29kZQ==","IHNhbWU=","IGRp","dXNz","IGNyZWF0ZQ==","YXRoZXI=","QXJyYXk=","Lmlu","ZmU=","U2VydmljZQ==","VU4=","YXRz","IFo=","YWx0aA==","IG1hZGU=","dHJ1ZQ==","QUI=","IG1hcms=","cmlk","aWZpZWQ=","LA0K","eW4=","cHJlc3M=","IGdyb3Vw","IGZpbg==","IExpY2Vuc2U=","RmllbGQ=","ZWdlcg==","IHdvcmxk","aW5lc3M=","dHk=","IHByb2Nlc3M=","KGI=","IGNyZQ==","YXJu","aXZlcw==","IG1haW4=","aWRlbw==","MzY=","X2c=","QUc=","dmFsaWQ=","aW1n","UEk=","IGNvbG9y","IHJlcG9ydA==","IHRha2U=","cmli","T00=","IGRheQ==","UmVxdWVzdA==","IHNr","YmVycw==","CXM=","LkFkZA==","b290","SW1hZ2U=","IGNvbXBsZQ==","b2xsZWN0aW9u","IHRvcA==","IGZyZWU=","QVM=","RGU=","IE9u","SUc=","OTA=","ZXRh","RGF0ZQ==","IGFjdGlvbg==","MzQ=","T3Zlcg==","aXRvcg==","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA=","bm90","IGluZGV4","aGVy","aWNvbg==","T24=","Ow0KDQo=","aXZpdHk=","bWFuZA==","LldpbmRvd3M=","T0w=","IHJlYWw=","IG1heA==","bGFuZA==","Li4uLg==","cmFwaA==","IGJ1aWxk","bGVn","YXNzd29yZA==","PwoK","4oCm","b29r","dWNr","IG1lc3NhZ2U=","dGVzdA==","aXZlcnM=","Mzg=","IGlucHV0","IGFydA==","IGJldHdlZW4=","R2V0","ZW50ZXI=","Z3JvdW5k","ZW5l","w6E=","Lmxlbmd0aA==","Tm9kZQ==","KGk=","Q2xhc3M=","Zm9y","IOKAlA==","dGVu","b2lu","IGtl","dWk=","IElO","IHRhYmxl","c3Vi","IExl","IGhlYWQ=","IG11c3Q=","Ly8vLy8vLy8vLy8vLy8vLw==","LnV0aWw=","Q29udGV4dA==","IG9yZGVy","IG1vdg==","b3Zlcg==","IGNvbnRpbg==","IHNheQ==","c3RhdGlj","LlRleHQ=","IGNsYXNzTmFtZQ==","cGFueQ==","IHRlcg==","aGVhZA==","cmc=","IHByb2R1Y3Q=","VGhpcw==","LuKAnQ==","IEJ1dA==","NzA=","bG95","IGRvdWJsZQ==","c2c=","IHBsYWNl","Lng=","bWVzc2FnZQ==","IGluZm9ybWF0aW9u","cHJpdmF0ZQ==","IG9wZXI=","Y2Vk","ZGI=","Ij48Lw==","UGFyYW0=","aWNsZQ==","IHdlZWs=","IHByb3A=","dGFibGU=","aWRnZXQ=","cGxhY2U=","UHJvcA==","IEFsbA==","ZWxz","Ym94","LgoKCgo=","LlI=","IFRv","aXRlcg==","U2g=","dXJhdGlvbg==","b2xkZXI=","X2xpc3Q=","Y29tZQ==","IHN3","aXphdGlvbg==","CWZvcg==","Ymw=","IHByb2dyYW0=","KGU=","YXBl","Y2hlY2s=","LkZvcm1z","IHVuZA==","YXRlZ29yeQ==","NzU=","YWdz","IHJlc3BvbnNl","VVM=","cmVxdWVzdA==","IHN0cnVjdA==","ZXNjcmlwdGlvbg==","IGNvZGU=","X0g=","dWZmZXI=","IHdpdGhvdXQ=","bG9iYWw=","TWFuYWdlcg==","aWx0ZXI=","UE8=","CXRoaXM=","b3B0aW9u","IHNvbA==","ID09PQ==","YWtlcw==","Q29udHJvbGxlcg==","NDQ=","TWVzc2FnZQ==","IHJlZg==","ZXZlcg==","IFNv","YWluaW5n","LmFwcGVuZA==","IHN0aWxs","IHByb3ZpZA==","IGFzc2VydA==","bWVk","IGNhcA==","dXNpbmVzcw==","IHJlcA==","dGluZ3M=","dmVk","Lk4=","YXBp","T0Q=","IGZpZWxk","aXZlbg==","b3Rv","4oCc","Y29s","KHg=","Z2h0","UmVzdWx0","Q29kZQ==","Lmlz","bGluaw==","IGNvdXI=","QW4=","IHRlYW0=","CWludA==","aWZ0","NTU=","IHNlY29uZA==","IGdvaW5n","IHJhbmdl","X0U=","bmVzcw==","Mzk=","IGZhbQ==","IG5pbA==","IENvbnQ=","YWlsYWJsZQ==","dXRlcw==","YXRhYg==","IGZhY3Q=","IHZpcw==","KCY=","IEFO","MzE=","QWw=","dGl0bGU=","IGFuZHJvaWQ=","Q0U=","XCI=","aXJ0","IHdyaXQ=","0L0=","CW0=","ZnR3YXJl","b25k","IHJldA==","b3NpdGlvbg==","IGhvbWU=","IGxlZnQ=","YXJncw==","bWVyaWM=","NDg=","IGRpcmVjdA==","b2Np","UGw=","QXM=","cmV0","YWRv","T2Y=","Y2hu","IEdldA==","ZWU=","cm9zcw==","KCk7","X19fXw==","LnBo","SXQ=","b3V0ZQ==","IGV4cGVy","Y2hvb2w=","d3d3","fSw=","IGFsbG93","IMI=","KCkp","c2l6ZQ==","aXNt","YWk=","dHJhY3Q=","YW5l","Li4uCgo=","Y29udGV4dA==","IGJlZw==","Q0g=","IHBhZ2U=","aGlw","bm8=","Y29yZQ==","c3A=","IGRpZmZlcmVudA==","aWFibGU=","IE1l","X0lO","YnV0dG9u","IElz","ZXJ2aWNlcw==","IGNh","IGFyb3VuZA==","QXBw","cmF0aW9u","IHJlY2U=","IHJlYWxseQ==","IGltYWdl","IHRhcmdldA==","IGRlcA==","b3B5cmlnaHQ=","dHJh","aW5nbGU=","aXRhbA==","TGF5b3V0","IGJvdGg=","T3ZlcnJpZGU=","YXJt","PT4=","YXRlcmlhbA==","aWxlZA==","IHB1dA==","UXU=","0YA=","dW5n","bWFw","CQkJCQkJCQk=","IGxldmVs","Q29tcG9uZW50","Ym9vaw==","Y3JlZW4=","X1JF","IGNvbmZpZw==","44E=","T3I=","LmRhdGE=","IGRvY3VtZW50","Iiwi","dHJpYnV0ZQ==","dXg=","TG9n","ZmVyZW5jZQ==","cG9zdA==","X2U=","IGxvY2Fs","YW5kb20=","YXNzZXJ0","VmFs","bGVjdGVk","aW5h","YXRhYmFzZQ==","QWRk","IGNvbnRlbnQ=","LnByaW50","c2lnbmVk","cmlj","LiIKCg==","IGZh","IQoK","LWY=","aXZlZA==","IHF1ZXN0","LmV4","IGZsb2F0","IGRldmVsb3A=","0L7Q","TWFw","YWRpbmc=","IHBvc3M=","VUU=","bmFtZXNwYWNl","X08=","CWI=","LkdldA==","Pig=","anNvbg==","ZXRhaWxz","NjY=","IHRvbw==","IGV4dGVuZHM=","IE5vbmU=","IGZvcmU=","KFN0cmluZw==","Zm9ybWF0","IGdyZWF0","aW50ZXI=","Y2FsZQ==","0YE=","cm9u","aXZpbmc=","RW50","ZW5jeQ==","eHQ=","b3k=","MDU=","IG1vbnRo","IGhhcHA=","IHN1cGVy","YmFy","ZGVmYXVsdA==","X2Rl","b3Jkcw==","bG4=","KHsK","IEluZA==","YXNlcw==","IHRpdGxl","IGNvbnRleHQ=","MDg=","b2g=","LXA=","RW0=","IG1ldA==","VGVzdA==","IGxpZmU=","X3Y=","IFVT","VUk=","b2NhdGlvbg==","bWQ=","IFsK","IF0=","c3c=","IGluY3Jl","c2NyaXB0","ZW50aWFs","d2F5cw==","LmRl","IHNyYw==","IGNhdGNo","IEFtZXJpYw==","Ly8K","ICAgICAgICAgICAgICA=","IHBheQ==","cGxpdA==","4oCU","IGNvdW4=","b2Jq","LnBocA==","IGNoYW5nZQ==","ZXRoaW5n","J3Jl","YXN0ZXI=","bG9z","bGF0aW9u","ICAK","TGU=","w6Q=","KHs=","cmVhZHk=","IE5v","IHBvc2l0aW9u","IG9sZA==","IGJvb2s=","YWJsZWQ=","YnVn","MjAy","SGFuZA==","fTsKCg==","aXNwbGF5","YXZpbmc=","MDQ=","IGdvdmVy","IHZlcnNpb24=","U3lzdGVt","bmVjdA==","cmVzcG9uc2U=","U3R5bGU=","VXA=","YW5ndQ==","IHRocmVl","aW5pdA==","ZXJv","IGxhdw==","ZW5kaWY=","IGJhc2U=","ZW1haWw=","KGw=","X1Y=","IGNvbmY=","QVRF","IGR1cmluZw==","dGVz","IGNvbnNvbGU=","IFBy","IHNwZQ==","dmVz","NjU=","cGF0aA==","aWFsb2c=","ZGl0aW9u","X3Rv","YXJkcw==","IGFnYWluc3Q=","ZXR3b3Jr","IFBo","X0w=","Y3Vy","aW1pdA==","V2l0aA==","IHBvd2Vy","aXVt","JzsKCg==","IHdvbQ==","bGVmdA==","b3VyY2Vz","YXRyaQ==","IElt","IE1hbg==","b3J0aA==","JHs=","ODg=","cXVhbHM=","ZXNl","X3NpemU=","IGlzcw==","b3RhbA==","LWc=","aXF1ZQ==","cmFtZQ==","IHdpZHRo","ZXJn","KSg=","aXR0bGU=","VFI=","IFRoZXk=","ZW5jZXM=","MDI=","cmw=","b25z","IGxhYmVs","Lnk=","LXQ=","dXBkYXRl","YW5lbA==","c2M=","LnRv","IHByb2plY3Q=","w7w=","IGVsZW1lbnQ=","IHN1Y2Nlc3M=","CQkK","LnNo","cmFt","Y2hlZA==","KCkpCg==","ICgK","IGRhdGU=","IHRvdA==","X1NU","QWxs","aWZpY2F0aW9u","CXZhcg==","IHRyaQ==","Y2hlbQ==","bXk=","IGJpZw==","IEFk","IEF0","b3Rz","bnVt","QWN0","IG1hcA==","ZXJh","Y29wZQ==","LiQ=","LOKAnQ==","IHBvcA==","IGZldw==","IGxlbg==","dWlk","ZXRlcnM=","dWxlcw==","w60=","c291cmNl","aHR0cHM=","IGRlbQ==","IGVhcg==","IyMjIyMjIyMjIyMjIyMjIw==","IG1hdGNo","b3JpZXM=","NDk=","YWNlcw==","IENs","IG5vZGU=","Nzg=","aXJj","bG9jYWw=","dW5pdHk=","fTsK","IGFub3RoZXI=","PDw=","b2dsZQ==","IHNpdA==","ZXdvcms=","VEU=","Lkk=","TlM=","b2xvZ3k=","b3VnaHQ=","LkNvbnQ=","Pj4=","IGNhcmU=","c3RhdGU=","CXByaXZhdGU=","IGVmZmVjdA==","Kysp","X2ZpbGU=","ZW5kaW5n","TGluZQ==","Rm9y","aW9y","IFNj","IGZ1bg==","LlNpemU=","CWVsc2U=","XSk=","c3RhcnQ=","dmlvdXM=","IH0s","b3Vycw==","IGxlZw==","IHNlcnZpY2U=","IHNpbmNl","aXJvbg==","TGFiZWw=","IG5vbg==","IGxvcw==","aWN0aW9u","IGZ1bGw=","YWN0ZXI=","Ym9hcmQ=","Z3Jlc3M=","IHR1cm4=","aXRoZXI=","MDk=","LnNpemU=","IGJvZHk=","cmVzaA==","ZXR1cm4=","MTk5","KF8=","eWxlcw==","b3JtYWw=","cGk=","IHNvbWV0aGluZw==","IS0t","dWludA==","IHByb2R1","IHN0YW5k","IHByb2JsZQ==","IGF2YWlsYWJsZQ==","bXQ=","IEJs","IC4uLg==","IGJsb2Nr","SW5wdXQ=","IGtlZXA=","Q291bnQ=","b3Blbg==","IFsn","IHRocm93","dWlsZGVy","QWN0aW9u","IHRoaW5ncw==","VHJ1ZQ==","IHVybA==","IEJv","cHJpbnRm","IHJlZA==","anM=","LmNyZWF0ZQ==","IE9y","U3RhdHVz","SW5zdGFuY2U=","IGNvbnRyb2w=","IGNvbWU=","IGN1c3RvbQ==","bG9jYXRpb24=","MDc=","bW9kZWw=","IA0K","IHNvdXJjZQ==","IGVhcw==","Lm91dA==","XQoK","b25leQ==","IGF3YWl0","IHBhcnRpYw==","QVA=","dWJsaXNo","b2Rlcw==","X3Bybw==","cGx5","cml0ZXI=","IHByb3Y=","IG1pbGw=","SFQ=","XSkK","IGNoYW5n","IGFzaw==","ICAgICAgICAgICAgICAgICAgICAg","IG91dHB1dA==","IGVtYWls","Njg=","LnB1c2g=","IH0NCg0K","aW5hdGlvbg==","NDc=","YXRyaXg=","VGFibGU=","dWNjZXNz","XSk7Cg==","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg","IGRpc2M=","KFs=","IGJ1c2luZXNz","aGVpZ2h0","Lmh0bWw=","dGE=","ZmllbGQ=","IHJlcXVpcmVk","X1I=","IGdvdmVybg==","fQ0KDQo=","bGV4","NTAw","Liw=","IFNldA==","dXJjaA==","Ly8v","dHM=","YWY=","IG1pZ2h0","aXN0b3J5","U3Ry","IG5ldmVy","UmVzcG9uc2U=","YXJzZQ==","YWRh","IEhvdw==","ICop","IDs=","IGhhcmQ=","QWQ=","IGludGVybg==","dXNlZA==","KGRhdGE=","bW9k","YW5uZWw=","IG5w","dWdn","IC8+Cg==","IGNhbGxlZA==","Ym9keQ==","IGNobw==","KHI=","X3NldA==","aXJk","ID49","IH07Cg==","IG9wdGlvbnM=","IEdlbmVy","IGhlaWdodA==","UG9pbnQ=","WW91","ZXR5","Q2xpY2s=","IHNtYWxs","IGlkZQ==","IGFjY2Vzcw==","YW5ndWFnZQ==","IHByb3RlY3RlZA==","IGpvYg==","IFRoZXJl","RGVm","IGFkZHJlc3M=","IHVpbnQ=","Tm90","b28=","YXBz","PGRpdg==","YWluZWQ=","YXR1cg==","IHN1bQ==","LXc=","IERhdGU=","IGxpdHRsZQ==","IGZyaQ==","WVBF","IHBvcnQ=","ZWg=","cHJpbmc=","X3BhdGg=","IHN0YXR1cw==","MDY=","YWlt","Ym9vbA==","IGFwcGU=","IG9z","Lm5hbWU=","ZW5zaW9u","X0c=","IHVwZGF0ZQ==","Q29uZmln","YWZm","RVJS","IDw9","YXRlbHk=","I2lm","dWN0aW9u","OTU=","IFRl","IGxpbms=","IFVzZXI=","LmZpbmQ=","Lm9yZw==","bWU=","IGdpdmVu","T3V0","I2VuZGlm","IGJldHRlcg==","UGFnZQ==","IGZlZWw=","ZW5u","TUw=","IGFscmVhZHk=","IGluY2x1ZGluZw==","b29nbGU=","cnU=","aWNhbGx5","cHJvcA==","bGVhbg==","b3V0ZXI=","IGFsd2F5cw==","b3JkaW5n","SWY=","b3JhZ2U=","IHBhcmVudA==","dmlz","CQkJCQkJCQ==","IGdvdA==","c3RhbmQ=","IGxlc3M=","L3M=","IEFzcw==","YXB0","aXJlZA==","IEFkZA==","IGFjY291bnQ=","cGxveQ==","IGRlcg==","cmVzZW50","IGxvdA==","IHZhbGlk","CWQ=","IGJpdA==","cG9uZW50cw==","IGZvbGxvd2luZw==","X2V4","U09O","IHN1cmU=","b2NpYWw=","IHByb20=","ZXJ0aWVz","aGVhZGVy","LnBybw==","IGJvb2xlYW4=","IHNlYXJjaA==","a2Vu","IG9yaWc=","IGVy","RWQ=","RU0=","YXV0","bGluZw==","YWxpdHk=","QnlJZA==","YmVk","CWNhc2U=","NDY=","ZXRoZXI=","cG9zaXQ=","IGludmVzdA==","IE9S","IHNheXM=","bWlzc2lvbg==","QU1F","IHRlbXA=","b2Fk","IHJlc3Q=","aW5mbw==","IGludGVyZXN0","QXJn","IHBlcmZvcm0=","cG9ucw==","IFZpZXc=","IHZlcg==","bGli","KGNvbnN0","VXRpbA==","TGlzdGVuZXI=","YXJnZQ==","Nzc=","IG11bHQ=","IGRpZQ==","IHNpdGU=","Li4vLi4v","RUw=","IHZhbHVlcw==","IH0pCg==","cGVu","Tm8=","aWNybw==","IGJlaA==","ICcuLw==","YWN5","cmVj","KCktPg==","CSAgIA==","Iikp","Q29udGVudA==","X1c=","cGxlbWVudA==","IHdvbg==","IHZpZGVv","YWRp","cG9pbnQ=","JSU=","MDM=","IGds","ZXJ2ZWQ=","dmlyb24=","SUY=","dXRlZA==","44M=","J20=","IGNlcnQ=","IHByb2Y=","IGNlbGw=","YXJp","IHBsYXllcg==","YWlz","IGNvc3Q=","IGh1bQ==","KFI=","IG9mZmlj","a3M=","LnRleHQ=","YXR1cmVz","IHRvdGFs","ICovCgo=","b3Bl","IHN0YXQ=","VU0=","IGxvYWQ=","aWdodHM=","IGNsZWFy","dXJv","IHRlY2hu","dXBwb3J0","SVI=","IHJvdw==","IHNlZW0=","IHE=","IHNob3J0","IE5vdA==","aXBw","R3JvdXA=","c2VjdGlvbg==","bWF4","aXJs","IG92ZXJyaWRl","IGNvbXBhbnk=","IGRvbmU=","Iik7DQo=","IGdyZQ==","LlJl","IGJlbGll","cmlzdA==","IGhlYWx0aA==","QU5U","KCkKCg==","IEJl","LnZhbHVl","IEdy","b3R0b20=","IGFyZ3M=","UFQ=","c3RhdHVz","ZnVuYw==","dW1lbnRz","LWg=","TnVtYmVy","Og0K","IExvZw==","ZXJ2ZXI=","ICksCg==","YW1lbnQ=","IG9iag==","aW5j","IGNoaWxkcmVu","aWN5","SVo=","YW5kcw==","YWJseQ==","IGRpc3RyaWI=","IGN1cg==","ZXJpYWw=","IGRheXM=","cmVhdGVk","cmVjdA==","LWw=","aXJt","aWRkZW4=","b21i","IGluaXRpYWw=","Lmpz","IOI=","UXVlcnk=","IG9ubGluZQ==","aW1hbA==","LmNvbg==","YXU=","VXJs","Y29udHJvbA==","aXJlY3Rpb24=","IGluc3RhbmNl","T1JU","IEZy","d2hlcmU=","IGphdmF4","IG9yZ2Fu","YXB0ZXI=","IHJlYXNvbg==","b3B0aW9ucw==","NTk=","IE1hcg==","KGE=","IHdpdGhpbg==","LuKAnQoK","T0RF","X0RF","YWRtaW4=","ZW5kZWQ=","IGRlc2lnbg==","IERhdGE=","dW5l","IEZpbGU=","cm9vdA==","IGNlbnQ=","IGFycg==","X2FkZA==","bGVu","cGFnZQ==","LCc=","X3N0cg==","IGJybw==","YWJpbGl0eQ==","b3V0aA==","NTg=","L2M=","cG9zZQ==","aXJ0dWFs","ZWFyY2g=","X3VybA==","YXJnaW4=","SHR0cA==","IHNjaG9vbA==","YXZh","IGNvbnNpZGVy","LmxhYmVs","IEFycmF5","NDI=","d2Vi","b3B0","LnByaW50bG4=","dWxhdGlvbg==","IGZ1bmM=","UEw=","ICJc","IFRleHQ=","YWN0b3J5","KGZ1bmN0aW9u","bnVsbA==","IGVuZw==","ZG93bg==","IGluY2x1ZGU=","IEVu","IERy","IGRi","ISE=","c2lkZQ==","IGluaXQ=","cXVpcmVk","IFNoZQ==","Q29sdW1u","cmVhY3Q=","IGFubg==","IHN0b3A=","IGxhdGVy","IFRoYXQ=","ZW50aW9u","ZGY=","VUc=","SUxF","IGNsaWVudA==","cmFmdA==","ZmZlcg==","UE9TVA==","ZWxwZXI=","IGxvdmU=","cXVvdGU=","b3Vk","IGpzb24=","IGFibGU=","IG1lbg==","QVg=","IENvcHlyaWdodA==","w7Y=","YXZpZw==","cmVx","Q2xpZW50","fSk7Cg==","LkNvbQ==","ZXJj","aWx0","cGVjaWFs","X2NvbQ==","cm9vbQ==","Lk5hbWU=","IGdpdmU=","YW1i","aWtl","IGNvbmRpdGlvbg==","Y2xpZW50","YXRvcnM=","OiI=","IGNvcHk=","dXR1cmU=","aXZlcnNpdHk=","ZXJuYWw=","e3s=","IENhbg==","b3VuYw==","ZG8=","IG9jYw==","IGFwcHJv","dGhlcnM=","emU=","IGVpdGhlcg==","IEZs","IGltcG9ydGFudA==","IGxlYWQ=","YXR0cg==","QVJU","RXF1YWw=","IGRh","ZXRjaA==","ZW50aXR5","IGZhbWlseQ==","YWRkaW5n","IG9wdGlvbg==","IGV4aXN0","aWNh","IE9iamVjdA==","Njk=","J3Zl","dmVycw==","aXRpb25hbA==","Njc=","b3V0cHV0","IFRydWU=","IE9G","X3RpbWU=","IG9mZmVy","IH0pOwoK","SEVS","ZWdpbg==","IiI=","IHdhdGVy","IGNoZQ==","IE15","b3JlZA==","IHN0ZXA=","YW5jZXM=","Q0s=","QVk=","4Lg=","c3RydWN0aW9u","KEM=","MzAw","b3VjaA==","U3RyZWFt","YWN0aXZl","YW1h","RW50aXR5","cHJvZHVjdA==","KCl7Cg==","IGdvdmVybm1lbnQ=","IElE","YWpvcg==","QW5k","IGRpc3BsYXk=","0Ls=","IHRpbWVz","IGZvdXI=","IGZhcg==","IHByZXNlbnQ=","IE5T","IFwK","dWVzdA==","IGJhcw==","ZWNobw==","Y2hpbGQ=","aWZpZXI=","SGFuZGxlcg==","IGxpYg==","UHJvcGVydHk=","dHJhbnNsYXRpb24=","IHJvb20=","IG9uY2U=","IFtd","Y2VudGVy","PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT0=","IHJlc3VsdHM=","IGNvbnRpbnVl","IHRhbGs=","X2dldA==","IGdyb3c=","LnN3","ZWI=","IFB1YmxpYw==","T1A=","ZWN1dGU=","b2xz","ICoq","Iik7Cgo=","IG1hc3M=","dXJlZA==","LmNsYXNz","b21pYw==","IG1lYW4=","aXBz","IGF1dA==","KTsNCg0K","IHVudGls","IG1hcmtldA==","IGFyZWE=","dWl0","IGxlbmd0aA==","IFdpdGg=","c3RydWN0b3I=","ZXZlbnQ=","Ij48","IFNw","SVY=","IG11cw==","aWZm","IGtpbmQ=","YXV0aG9y","b3VuZHM=","bWI=","X2tleQ==","NDE=","d2lkdGg=","cG9zaXRvcnk=","IGxpZ2h0","dWs=","Um93","b2hu","YWxm","dmlyb25tZW50","YXBwZXI=","b2xsZWN0aW9ucw==","IHNpZGU=","X2luZm8=","IGV4YW1wbGU=","aW1hcnk=","IHdy","IGNhbXA=","Y3JpYmU=","MjU1","Ii8=","IG1pc3M=","d2F5","IGJhc2Vk","IHBsYW4=","Vmlz","b21haW4=","dW5r","IGF3YXk=","VVA=","PFQ=","T1M=","aW9k","IE1vbg==","4oCZcmU=","IGxpaw==","w6c=","aXZlbHk=","LnY=","aW1lcg==","aXplcg==","U3Vi","IGJ1dHRvbg==","IFVw","IGV4cGVyaWVuY2U=","Q0w=","IHJlbmRlcg==","X3ZhbHVl","IG5lYXI=","VVJM","YWx0","IGNvdW50cnk=","aWJpbGl0eQ==","NTc=","KCksCg==","ZWFk","IGF1dGhvcg==","IHNwZWNpZmlj","YmFzZQ==","KG5hbWU=","b25lcw==","IERv","IGFsb25n","eWVhcg==","IGV4cHJlc3M=","Lic=","ZW52","IGJlZ2lu","IHNvZnR3YXJl","IGltcA==","IHdpbg==","w7Nu","IHRoaW5n","VHJhbnM=","IFRIRQ==","IDw/","IHdoeQ==","IGRvZXNu","aWo=","Z2luZw==","CWc=","IHNpbmdsZQ==","b2Zmc2V0","YXJuaW5n","b2dyYXBo","bGV5","X2NvdW50","IGFuYWw=","Y3JlYXRl","L20=","IFJlZw==","OTg=","dW5jaA==","PSQ=","aXNr","IHJpZ2h0cw==","KE0=","ICIiIgo=","YXBlcg==","Lm1vZGVs","IHBv","ZW1wdHk=","YXJ0bWVudA==","IGFudA==","IFdoZW4=","IHdvbWVu","IEVk","IHNlYXNvbg==","IGRlc3Q=","w6M=","KGg=","IHBvc3NpYmxl","IHNldmVy","IGJ0bg==","IGRpZG4=","IHNlbnQ=","IGVuYw==","IGNvbW1hbmQ=","IF0sCg==","X3g=","IHJlY2VudA==","b2x1dGlvbg==","dmVjdG9y","IEJ5","IE1heQ==","IEFjdA==","u78=","IG1vbmV5","SU5U","YnNpdGU=","CXA=","Lg0K","77u/","c2w=","YXR0ZXJu","IENsYXNz","IHRvbGQ=","dWRpbw==","Y3VycmVudA==","IGVxdQ==","IGF1dG8=","IFN0YXRl","ZGE=","bXNn","KSk7Cgo=","IHdvcmtpbmc=","IHF1ZXJ5","IEJy","IHdpbmRvdw==","YXV0aA==","b25seQ==","CXQ=","IGxlYXN0","YWdu","IGV4cGw=","aXR0ZXI=","YXJpbmc=","IGNvbHVtbg==","IEdlbmVyYWw=","Ijoi","ZXJhbA==","cmlvcg==","IHJlY29yZA==","SUI=","RVg=","IGRhdA==","IG1ha2luZw==","dWVk","IENhcg==","ZW1w","Ii4=","IE1lZA==","IGNsb3Nl","IHBlcmNlbnQ=","IHBhc3Q=","KGc=","Oig=","IHdyaXRl","IG1vdmU=","IHBhdA==","Q29udHJvbA==","LlRv","IHZp","Ki8K","aW5hdGU=","J2xs","YWdlZA==","TnVsbA==","IHNwZWNpYWw=","SVpF","IGNpdHk=","LyoK","IEVuZw==","aXhlZA==","aW5hcnk=","cHk=","IGVmZg==","YXJpbw==","IHRlbGw=","YXZvcg==","IHNlbGVjdA==","bGV2ZWw=","aW11bQ==","b3Blcg==","QnVpbGRlcg==","SVA=","JyksCg==","ZXNj","IGZvbnQ=","IjsKCg==","IEFt","aXNoZWQ=","aWxscw==","SW50ZXI=","T1c=","IGNvdXJzZQ==","IGxhdGU=","aWRkbGU=","NDM=","IGFtb3VudA==","IGFzeW5j","aW5v","Y3Vs","IOw=","YW5kbGU=","X3VzZXI=","IGJlbg==","IENhbA==","ICRf","IFJlcA==","IGVub3VnaA==","VG9rZW4=","LnVzZXI=","KGo=","U2M=","V2lkdGg=","bm93","YXRmb3Jt","IGxvb2tpbmc=","IGhvbGQ=","TW9kdWxl","SVRZ","dm8=","aXNvbg==","LkRhdGE=","eWM=","IHBvdA==","IFRydW1w","aWR1YWw=","aWRlcw==","cnQ=","IHByb3BlcnR5","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA==","YW1ld29yaw==","Z28=","IGxvdw==","IHBhcmE=","IHByaWNl","dXJ5","IHRvZGF5","cm95","ICcv","IHBvbGl0","ICcn","eW1i","UGg=","IGFkdg==","IGF0dGFjaw==","IFN0ZQ==","Uk9N","NDAw","YW5h","IG1lYW5z","IHN0b3J5","aWRz","YWtlbg==","IG1lZXQ=","IG1vbQ==","IOKAmA==","ID8+","IGRlbg==","b2JpbGU=","Y2hhbmdl","ICAgICAgICAgICAgCg==","aWNp","bmE=","IEZvcm0=","IHNvcnQ=","U2VsZWN0","cGFyZQ==","IHRob3VnaHQ=","X2Nvbg==","IHRhc2s=","b2N1cw==","IERF","IE1pbg==","IG9wdA==","CWJyZWFr","dW1lcg==","S0U=","dGhlbg==","IGRldA==","IFRlc3Q=","cG9ydHM=","IHJldmlldw==","KCcv","bW92ZQ==","IHN3aXRjaA==","RVJU","cGF0Y2g=","YW5ub3Q=","44I=","IGFib3Zl","aXRpdmU=","NTY=","IHF1ZXN0aW9u","IFF1","44CCCgo=","Z2xl","IHdvcmQ=","IHByb3ZpZGU=","IFJldHVybg==","IHJlc2VhcmNo","w6Nv","dXN0cg==","IHB1Ymxpc2g=","Y2hlbWE=","fX0=","IENPTg==","LWlu","YWxsYmFjaw==","IGNvdmVy","XFw=","Y29sb3I=","IElT","IHdoZXRoZXI=","aW1hdGU=","aXNj","QmFy","IGRpdg==","QmU=","b3Vybg==","IGhhdmluZw==","bGVt","cGxheWVy","YWJz","YW1lcmE=","bmV5","IGV4Yw==","Z2V0aGVy","cGxpZWQ=","YW8=","WyQ=","ICsr","aXBl","c2hvdw==","L2Q=","Wzo=","YWdlbWVudA==","bGV2","X0lE","OTc=","cmFyeQ==","YWRlcw==","X3Nl","YXVzZQ==","IGVtcGxveQ==","ICovDQo=","IGZyZQ==","ICdA","IGNvbXBsZXQ=","IGxhcmdl","cmFs","XHg=","IGZhYw==","PFN0cmluZw==","IGNyZWF0ZWQ=","dXBlcg==","LnN0YXRl","IGhvc3Q=","ZW5lcmlj","L2I=","KCE=","d2hpbGU=","aWFz","QlVH","ICk7Cgo=","IHJvbGU=","UmVn","IENvbG9y","U3RhcnQ=","IHBvcm4=","dG9w","IHdlYg==","IGRldg==","IGRlYWw=","KyspCg==","SW50ZWdlcg==","cG9zaXRpb24=","Lm9u","ICgi","5Lg=","IHByb2JsZW0=","c3Y=","IHByZXNz","QUJMRQ==","QVRJT04=","IFNlZQ==","YW5jaA==","IHRob3VnaA==","bGVlcA==","IDwhLS0=","IHBvaW50cw==","ICAgICAgICAgICAgICAgICAgICAgICAgIA==","Lko=","IDo6","cHRy","REI=","Kys7Cg==","LnBuZw==","bm9kZQ==","c29mdA==","cG9uZA==","IGV2ZXI=","LS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQ==","TWVudQ==","KCcj","IHNlcnZpY2Vz","cGc=","fSkK","cGFyYW1z","IGFjdHVhbGx5","ICIv","RW1wdHk=","TWV0aG9k","IGlkZW50","dW5pYw==","IG1pbGxpb24=","IGFmZg==","c3R5bGU=","IGNvbmM=","aW9z","aWdubWVudA==","VUxU","UHI=","IjsNCg==","IHVuZGVyc3RhbmQ=","dWFyeQ==","IGhhcHBlbg==","IHNlcnZlcg==","IENv","U0M=","IGxlcw==","IGZpbGVz","R3JpZA==","c3Fs","IG9mdGVu","IGluZm8=","X3Ry","c3Jj","b255","IHNwYWNl","dW1i","IHBhc3N3b3Jk","IHN0b3Jl","LAoK","IFdoYXQ=","Z2Vk","IEZhbHNl","VXM=","c3dlcg==","X2luZGV4","IGZvcm1hdA==","bW9zdA==","c20=","TmV3","IGRldGFpbHM=","IHByb2I=","IEFORA==","KCkNCg==","aWxhcg==","ICR7","cnlwdA==","LkNvbGxlY3Rpb25z","JHRoaXM=","IEZyZWU=","X29m","KGZhbHNl","ZGF0ZWQ=","ID4+","IGZhY2U=","Q1RJT04=","IHNhdmU=","IHR5cA==","ZGV2","KCIj","QUdF","Y29udGFpbmVy","ZWRpdA==","UUw=","IGl0ZW1z","IHNvY2lhbA==","aWVu","IFJlYWN0","KS4KCg==","IG1hcg==","IHJlZHU=","IFJF","LnB1dA==","IG1ham9y","Q2VsbA==","bmV4dA==","IGV4cGVjdGVk","IHlldA==","IGluZGl2","dHJpYnV0ZXM=","YXRpcw==","YW1lZA==","IGZvb2Q=","U291cmNl","KHN0cmluZw==","ICsK","aXRlcw==","ZHI=","IG1lbWJlcnM=","IGNvbWI=","aXRlbXM=","IFBlcg==","VEg=","PVRydWU=","IGJhcg==","X1NF","Y29tbQ==","KHc=","KQoKCg==","IHNlbmQ=","IGluYw==","dW5zaWduZWQ=","RkE=","IHBhcmFtcw==","YXBwaW5n","cm9z","dWdpbg==","ZmE=","IGNvbm5lY3Rpb24=","IH07Cgo=","IGJlY29tZQ==","TW9kZQ==","IGV2","IGRpZmY=","IFVuaXRlZA==","SGVpZ2h0","ZnVsbHk=","aW1hZ2Vz","IG1ha2Vz","IGdsb2JhbA==","IGNvbnRhY3Q=","JzoK","IGFicw==","0LDQ","ZmxvYXQ=","IGV4Y2VwdA==","IFBvbA==","Q2hpbGQ=","dHlw","IGNlcnRhaW4=","acOzbg==","T1VU","IGltcHJv","aWxlcw==","IC0tPgo=","IFBhcnQ=","dmFsdWVz","b3Nz","Lyoq","aWxpdA==","IEV2ZW50","Y3VyaXR5","c3Rlcg==","IGNoYXJhY3Rlcg==","MTk4","IG5ld3M=","ICIs","IGRldmljZQ==","Y2Vs","bG9naW4=","aGVldA==","RGVmYXVsdA==","QCI=","CSA=","Y2xpY2s=","KHZhbHVl","IEFi","IHByZXZpb3Vz","RVJST1I=","b2NhbA==","IG1hdGVyaWFs","IGJlbG93","IENocmlzdA==","IG1lZGlh","Y292ZXI=","IFVJ","IGZhaWw=","IGJsYWNr","IGNvbXBvbmVudA==","IEFtZXJpY2Fu","IGFkZGVk","IGJ1eQ==","c3RpdA==","IGNhbWU=","IGRlbGV0ZQ==","cHJvcGVydHk=","b2Rpbmc=","IGNhcmQ=","cm9wcw==","IGh0dHBz","IHJvb3Q=","IGhhbmRsZQ==","Q0M=","QmFjaw==","ZW1wbGF0ZQ==","IGdldHRpbmc=","X2J5","bWFpbA==","X3No","LmFzc2VydA==","IERlYw==","KHRydWU=","IGNvbXB1dA==","IGNsYWlt","Jz0+","IFN1Yg==","IGFpcg==","b3Bz","bmF2","ZW1lbnRz","KGlk","IGVudGVy","YW5nZWQ=","RW5k","IGxvY2F0aW9u","IG5pZ2h0","IGRvaW5n","IFJlZA==","bGlu","fQoKCg==","dmlkZXI=","IHBpY2s=","IHdhdGNo","ZXNzYWdlcw==","IGh1bWFu","IGRhbQ==","cGVuZA==","ZGly","IHRheA==","IGdpcmw=","cmVldA==","IGJveA==","IHN0cm9uZw==","KHY=","cmVs","IGludGVyZmFjZQ==","IG1zZw==","ZmVjdA==","X2F0","IGhvdXNl","IHRyYWNr","Jyk7Cgo=","amU=","IEpvaG4=","aXN0cg==","KFM=","dWJl","IGNl","aXR0ZWQ=","VkVS","Kik=","cGFyZW50","IGFwcGxpY2F0aW9u","YW55","LnN3aW5n","IHBhY2s=","XHU=","IHByYWN0","IHNlY3Rpb24=","Y3R4","IHVuc2lnbmVk","LlBvaW50","IE9uZQ==","xLE=","aXBsZQ==","YWlk","0YM=","VmVjdG9y","Ynl0ZQ==","IHdhaXQ=","IMOg","w6U=","IHRvZ2V0aGVy","IHRocm93cw==","Rk8=","Jykp","aG9zdA==","aXNpbmc=","LnZpZXc=","IHRlcm1z","ZnJhbWV3b3Jr","LXI=","IGFwcGx5","IHNlc3Npb24=","T3B0aW9ucw==","dWdnZXN0","IG90aGVycw==","d2l0dGVy","IGZ1bmQ=","SW5pdA==","X18o","ZW5zb3I=","R0VU","IHNldmVyYWw=","aWk=","W2o=","SU8=","IHRlbXBsYXRl","UG9zaXRpb24=","IGVjb24=","YWNoaW5l","IGls","LnNwcmluZw==","bWFpbg==","ZWx0","aW1lbnQ=","UmVj","bW0=","IFVuaXZlcnNpdHk=","dXJzb3I=","ICAgICAgICAgICAgICAgICAgICA=","R0w=","aWN0dXJl","aXRodWI=","Y2Vy","Y2FzdA==","RnJvbQ==","YWxlcw==","IHN1YmplY3Q=","cGFzc3dvcmQ=","bnk=","IGVzYw==","LndyaXRl","77yM","V2hhdA==","Lkg=","IGhpc3Rvcnk=","IEZl","IGluZGl2aWR1YWw=","dW5pdA==","IC0tPg==","IGR1","SVNU","IHVzZXJz","ZnM=","ZmFsc2U=","dW50","VGl0bGU=","IG1vdA==","IGZ1dHVyZQ==","YWNoZWQ=","IHN0YXJ0ZWQ=","IG1vZGU=","ICc8","X2FycmF5","IGF4","J107Cg==","aXJlcw==","VGhlcmU=","dWdodA==","dG1s","cG9zZWQ=","aWN1bHQ=","IHRvb2s=","IGdhbWVz","IH19","ID8+Cg==","IHByb2R1Y3Rz","SXM=","IGJhZA==","IERlcw==","LnBhdGg=","JwoK","IFBvc3Q=","YXZlbA==","KDo=","MTUw","IG5lZWRz","IGtub3du","Rmw=","IGV4ZWM=","IHNlZW4=","NTE=","dW1l","IGJvcmRlcg==","IGxpdmU=","dGVtcA==","UGVy","IHZhcmlhYmxl","aWV0","IERlZg==","IGdl","ZW1l","X2JhY2s=","Zmlyc3Q=","IHByb3ZpZGVk","Ly8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8=","IGZpbGVuYW1l","IGhvcGU=","dWx5","YXV0bw==","ZmluZA==","X3N0cmluZw==","YnRu","aXR1ZGU=","QXR0cmlidXRl","IHlvdW5n","LnR4dA==","IHdlYnNpdGU=","IFByb3A=","IGV5","PigpOwo=","aW9uYWw=","QVJS","aWN0aW9uYXJ5","dXJ0aGVy","Ljwv","QUxM","IHN0dWR5","aWxp","IG5ldHdvcms=","eWw=","aXN0YW5jZQ==","T0s=","TlU=","cmVzdA==","IFNU","aWNyb3NvZnQ=","IGxpbWl0","IGN1dA==","KCk6Cg==","IGNvdQ==","b2du","IHNpemVvZg==","aXZhbA==","IHdlbnQ=","Lno=","TGluaw==","IGZpcmU=","IGFjcm9zcw==","IGNvbW11bml0eQ==","cmVnaW9u","TkU=","UmVm","IG9mZmljaWFs","IHZpc2l0","b2x2ZQ==","IHJlY2VpdmVk","IHRva2Vu","IG1vbnRocw==","IGFuaW0=","IHBhcnRpY3VsYXI=","c3R5bGVz","aWNv","IGVzcw==","ODc=","LkNvbnRyb2w=","IMOp","YmFsbA==","IGxlYXJu","aW5kaW5n","VmFy","IGRlY2w=","KGVycg==","TEVDVA==","T25l","cGhh","IH4=","Zm9ydA==","YXN1cmU=","IG1pbmQ=","IEVuZA==","Q2hlY2s=","IHF1aWNr","Iiks","QU5E","dXRpb25z","QmFzZQ==","X19fX19fX18=","IGNvbW1lbnQ=","SU5F","4oCZdmU=","QnV0","IEVs","IFVz","IGFkbWlu","bWFyaw==","IE5hbWU=","YAo=","IFR5cGU=","YW1pYw==","cGM=","bG9vcg==","RlQ=","IG9wcA==","Y2tldA==","KS0+","dHg=","IHB1cg==","dWVs","eW1ib2w=","dWF0aW9u","YW5nZXI=","IGJhY2tncm91bmQ=","ZWNlc3M=","ZWZpbmVk","Li4uLi4uLi4=","IGRlc2NyaXB0aW9u","IHJlcHJlc2VudA==","IikpOwo=","cHJlc3Npb24=","cm93c2Vy","IHNlcmllcw==","d2FyZHM=","NTI=","KCRf","YWlzZQ==","IGhvdA==","YWNpdHk=","cmllcw==","YWN0aW9ucw==","Q3JlYXRl","YWRpbw==","YW1wbGVz","IG9yaWdpbmFs","ZW5zaXZl","Zm9udA==","c3RyZWFt","77u/dXNpbmc=","LnNwcmluZ2ZyYW1ld29yaw==","MDAx","c2VydmVy","IGJpbGw=","QUNL","aWxlbmFtZQ==","IGZyYW1l","ID0K","RWRpdA==","YWRpdXM=","IGRyYXc=","YW5rcw==","IGRldGVy","IGNvbWVz","X2ludA==","IGZvcmVhY2g=","YW5nbGU=","IGVsZWN0","cGVjdGVk","SGVhZGVy","aXN0cmF0aW9u","RmFsc2U=","IEdhbWU=","IGZpbHRlcg==","QWN0aXZpdHk=","IGxhcmc=","aW5pdGlvbg==","ICI8","MjU2","aXNlZA==","IHJlbW92ZQ==","IFRyYW5z","bWV0","c2Vl","Rm9ybWF0","Q29tbWFuZA==","IEVY","Tm9uZQ==","IGZyb250","QVNF","IFJlYw==","b3VuZGF0aW9u","IHZv","OTY=","PVwi","KCo=","Q2hhbmdl","LldyaXRl","Z3JvdXA=","aWVudHM=","dXk=","KioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKg==","IGRpZw==","aHI=","KC0=","IGdlbg==","bnVtYmVy","dmVj","dXJvcGU=","ZW50cnk=","TEw=","IHN0ZQ==","VmFsaWQ=","J10s","X3BhcmFt","IHNlbGVjdGVk","IGFjY29yZGluZw==","IERpcw==","IHV0aWw=","QnVmZmVy","X2Vycm9y","IGFzc29jaQ==","X1NJWkU=","IHdvcg==","IHByaW50Zg==","cmFn","wqA=","REQ=","IFZhbA==","IGFjdGl2","RW5n","ZXRpbWU=","IHZpcnR1YWw=","YWlnbg==","YXVy","IFByZXM=","IEV4Y2VwdGlvbg==","IGFueXRoaW5n","IE9mZg==","IGhvdXJz","IHdhcg==","QXJncw==","YWdpbmc=","IG1vZGVscw==","IFRpbWU=","T2I=","YW1z","am95","IGVhcmx5","LnJlYWQ=","ODY=","IGNlbnRlcg==","IEluaXRpYWw=","IGxhbmd1YWdl","bGVuZ3Ro","eHk=","IHNu","IGluZg==","UG9zdA==","IGFnbw==","IGVhc3k=","X2NvZGU=","IEFOWQ==","X2No","IGRvd25sb2Fk","KFQ=","YXZlZA==","4oCT","IHN0dWRlbnRz","IGZpZw==","bGlnaHQ=","eHg=","IGJ1ZmZlcg==","IERlcA==","IE1hdGg=","SVRI","IHZhcmk=","IGR1ZQ==","RmFjdG9yeQ==","IHBvcg==","IGVw","b3R5cGU=","IGNhbm5vdA==","IHdoaXRl","PGludA==","dGVybg==","IHJlZ2lzdGVy","IHByZWQ=","Y2x1cw==","X2RhdGU=","IC8qKg==","IGF1dGg=","IFtdCg==","IHBlcmlvZA==","bm93bg==","IHZvdA==","IHNjcmVlbg==","J2Q=","VHlwZXM=","IHRtcA==","0LXQ","dXJhbA==","IGJlbmVm","X3k=","IG5ldA==","IFN0YXRlcw==","J11bJw==","IE5l","IE5PVA==","IG5lZw==","MTAy","IGNvbW1vbg==","c2NvcGU=","IGNyZWQ=","Z2Vz","X1RZUEU=","IHN1Z2dlc3Q=","b29t","LgoKCg==","IGFjY2VwdA==","IHJhbmRvbQ==","ZXJt","IFZlY3Rvcg==","d2l0aA==","VEVS","KHN0cg==","IHJlc3BvbnM=","IGhpdA==","LlNldA==","Z3JpZA==","cmlh","IGNsaWNr","dW5kbGU=","Q2FzZQ==","aW5zZXJ0","VXRpbHM=","ICIiIg==","IGltcGxlbWVudA==","YXRhbA==","dGVtcHQ=","dGVtcGxhdGU=","b2Ny","cmV0dXJucw==","IHBsYXllcnM=","dXNlcnM=","ZWRlZg==","IFRoZXNl","IGFtb25n","IGRlYg==","aGE=","LmdldEVsZW1lbnQ=","IGNpcmM=","IGFuc3dlcg==","IHdhbGs=","IHRyZWF0","IEdl","IENyZWF0ZQ==","IGFnZQ==","IHJlcQ==","T1NU","YW5ndWxhcg==","0Y8=","IGZpdmU=","NTM=","IGRpc3RyaWJ1dGVk","IGZyaWVuZA==","VFA=","IGNsZWFu","b3dz","LkNvbnRyb2xz","ZGlz","IHdvcmRz","Lmlv","enk=","IGhlYWRlcg==","IENoZWNr","4oCZbQ==","anVzdA==","aG9sZGVy","PSI8Pw==","IEdOVQ==","IENvbA==","aW1lc3Q=","ZW50aWM=","ewoK","IHRyZQ==","bGFzdA==","bGE=","IFlvcms=","TG8=","IGRpc2N1c3M=","IEdvZA==","IGlzc3Vl","cmV3","V2luZG93","IGxhbmQ=","MTIw","IHN0cmVhbQ==","IFBhcg==","IHF1YWxpdHk=","UGFy","X251bQ==","NTQ=","IHNhbA==","ZWx2ZXM=","T1JE","KHVzZXI=","IHdvcmtz","IGhhbGY=","ZW5zZXM=","dmFz","IHBvbGljZQ==","KCIv","dWE=","IHNpbXBsZQ==","QWRkcmVzcw==","IGVtcHR5","ZXNo","MTI4","VXBkYXRl","IENyZWF0ZWQ=","KCcu","KS4K","ICAgICAgICAgICAgICAgICAg","IGFncmU=","IEZST00=","IGNvb2s=","IGV2ZXJ5dGhpbmc=","aWxpdGllcw==","LnN0YXR1cw==","IHJlbGF0aW9ucw==","ZXh0ZXJu","IG5vdGhpbmc=","IHJ1bm5pbmc=","CXZvaWQ=","Ukk=","X2E=","X0NPTg==","cG9y","LnN1Yg==","cmVxdWlyZQ==","IENpdHk=","IFdlc3Q=","IG1vcg==","c3RvcmU=","RXF1YWxz","b2Rlcg==","IG5h","IFtb","ICgn","IERvbg==","RVJT","L3A=","Lmpzb24=","YWJvcg==","IHNvbWVvbmU=","X3RleHQ=","LmNzcw==","LlRhYg==","IFNvbWU=","YXRv","ZG91Ymxl","IHNoYXJl","KHZvaWQ=","X2Rpcg==","IHVy","U3RhY2s=","IFdvcmxk","Llg=","c3RyYWN0","SG93","LkdlbmVyaWM=","aWNsZXM=","IGVudHJ5","IGNoYW5nZXM=","IHBlcnNvbmFs","KEE=","IG9mZnNldA==","X3B0cg==","IHBpZQ==","IEphbg==","LWdyb3Vw","bW9kdWxl","SXRlbXM=","IEhvd2V2ZXI=","dmVyYWdl","LkZvbnQ=","IGV2ZW50cw==","Lm1pbg==","IGludm9s","emE=","IHdob2xl","IG5lZWRlZA==","IGxpa2VseQ==","cmllZg==","T1JN","dmVyc2lvbg==","IGZpZ2h0","IGVpbg==","RnJhbWU=","MTk3","Z2Vu","IE91dA==","YXZpZ2F0aW9u","TGVuZ3Ro","aWxsZWQ=","cXVlbmNl","ICE9PQ==","IFNvZnR3YXJl","IHdyaXRpbmc=","IHJhdGU=","J10sCg==","UGFuZWw=","aW5uZXI=","IFsi","IHR3","Y2Q=","IDsK","X3N0YXRl","IFNt","IE1hcms=","KSkKCg==","cHJvdA==","IE1y","bWV0aG9k","dXN0b21lcg==","SWNvbg==","IGNvcnJlY3Q=","KG9iamVjdA==","IE1vcmU=","IGZhbGw=","IHZvbA==","IGRldmVsb3BtZW50","ZW50bHk=","IHNp","bWVkaQ==","dmluZw==","UFA=","YWtlcg==","IGluZHU=","IGVsaWY=","IHByZXQ=","IGJlbGlldmU=","bnM=","b21ldA==","MTIz","IEludGVybg==","UmVjdA==","U28=","LmVycm9y","UmVhZA==","IGZlYXR1cmVz","IG1pbnV0ZXM=","LS0t","YXNpbmc=","Y3JldA==","Ij4NCg==","LmFubm90","IGNvbGxlY3Rpb24=","Jy4=","IHNpbWlsYXI=","IHRha2Vu","KCIl","T3JkZXI=","J10K","LW1k","IFRI","YWNlZA==","IGlzbg==","L2o=","IHNvbg==","Z3JhcGg=","IEludGVnZXI=","IG5lY2Vzcw==","cmVlbg==","IHVt","IFw8","IG1vbWVudA==","IGJyaW5n","IGluZGlj","eXNpcw==","TGV2ZWw=","dmVyc2U=","dXJyZW5j","X3Rlc3Q=","IGVudGlyZQ==","RG93bg==","IH0KCgo=","KHJlc3VsdA==","IFJlYWQ=","w6g=","TW9k","IHRyeWluZw==","IiksCg==","IG1lbWJlcg==","IENvcg==","T0RP","LWNvbnRyb2w=","dW50aW1l","IFNpbQ==","RGlhbG9n","cGxvdA==","X29u","IHBoeXM=","fS8=","IG5hbWVzcGFjZQ==","CQ0K","YWNj","UGxheWVy","QVJF","ODk=","IGZvb3Q=","IGJvYXJk","cGFydA==","IHN1cw==","d2lzZQ==","IE1j","IHB1c2g=","QVRB","IHBsZWFzZQ==","cmllZA==","d2VldA==","Yml0","aWRlZA==","VkU=","IFN3","VUI=","IHR5cGVz","ZWRpYQ==","IGNsb3M=","YWNlYm9vaw==","V2hlbg==","IGVkaXQ=","aWdnZXI=","IGVuZXJn","Q29udGFpbmVy","IHBob3Q=","IENvdW50","IEV1cm9wZQ==","Lklz","IFJ1c3M=","cGVlZA==","IFN0cg==","IHB5","IGN1bHQ=","IGRlZmluZWQ=","Y2NvdW50","IG9idA==","LkxvY2F0aW9u","IHRocmVhZA==","aWxsZQ==","IGluc3RlYWQ=","c3Ryb25n","IFNlYw==","VVJF","IGlkZWE=","LnNl","ZW15","c2VsZWN0ZWQ=","Q29ubmVjdGlvbg==","YWNpbmc=","dGhyZWFk","Lm5leHQ=","IGNvbGw=","IGZpbG0=","aXN0aWM=","IGNvbXBldA==","IGNvbm4=","dGhvdWdo","IGNvbXBhbg==","b2NrZXQ=","IHRlYWNo","PSg=","IHBob25l","IGFjdGl2ZQ==","Nzk=","ZGVsZXRl","MTAx","dHJpZXM=","IG1v","IGRlYXRo","fSk7Cgo=","b2NvbA==","V2lkZ2V0","IGFydGljbGU=","cm9kdQ==","YW5kaWQ=","0Ys=","IENy","a2E=","KCk6","bG9vZA==","CQkJCg==","IGFsbW9zdA==","IHNlbGw=","ZXJ2bGV0","cmlw","VW5pdA==","IGFwcGxpYw==","IGNvbm5lY3Q=","IGZlYXR1cmU=","IHZpYQ==","Jyks","IGxpbQ==","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA=","IEd1","RW5naW5l","IGVucw==","IGVudmlyb25tZW50","YmxvY2s=","SEVSRQ==","TlVMTA==","Z3k=","dGFn","KSku","ZXhw","IGNvbXBs","IGluc3RhbGw=","IGNvbXBsZXRl","cXVldWU=","YXR1cmFs","IGdlbmVyYWw=","dGhvbg==","IGFza2Vk","b3Jlcw==","KHJlcw==","IHJlc2VydmVk","U1A=","IOKApg==","xYI=","IHNpZ25pZmlj","T2Zm","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICA=","IEFn","IEp1c3Q=","IEVycm9y","IGluZmw=","YWRhdGE=","IGljb24=","YXNrcw==","Jyc=","X0xP","Py4=","YWNjb3VudA==","ICgq","JykKCg==","cmFw","X3Zhcg==","IEZPUg==","IHBhcnR5","IFlvdXI=","Y2F0","c3RyeQ==","Lm5ldw==","Ym9vdA==","IE5vdg==","IHZlY3Rvcg==","IG5vcm1hbA==","IGZ1cnRoZXI=","UmVwb3NpdG9yeQ==","ODAw","IGRhdGFiYXNl","YXR0bGU=","IG11c2lj","IHNwZWVk","IGRvYw==","cHJvY2Vzcw==","SUdIVA==","LnBhcnNl","IHRha2luZw==","IHZpb2w=","Y2VlZA==","IEFmdGVy","IGZvcndhcmQ=","IGNyaXQ=","Ii8+Cg==","cm90","IGZhaWxlZA==","ZWZvcmU=","IGNvbmNlcm4=","b2U=","YmE=","IHNlbmRlcg==","IHRlcm0=","aGFz","PSIj","IHBvdGVudGlhbA==","TnVt","IHB1Ymxpc2hlZA==","LmNsb3Nl","IEltYWdl","c3RyYWludA==","VUQ=","IE9i","IHByb2JhYmx5","bGlt","IjoK","b2x1bWU=","IGNvbnN1bQ==","NzY=","YWd1ZQ==","ZW5zaW9ucw==","IGludmVzdGln","LXllYXI=","Jyk7","LXNt","IGVuam95","b3JpZw==","ZXJpbmc=","Y3A=","bGVhc2Vk","cGxlbWVudHM=","IHJldHVybnM=","cGF0","Qk8=","IEhvdXNl","LkxhYmVs","IHdlaWdodA==","aWdoYg==","IGNvbmRpdGlvbnM=","IGV4Y2VwdGlvbg==","ZGVzY3JpcHRpb24=","IHRyYWQ=","LXRv","IHt9","IG1vZHVsZQ==","RU5E","LmFw","LnByb3Bz","IGNvbnN0cnVjdG9y","YXZlcw==","IGZhdm9y","IE5vdw==","O2k=","IE1haW4=","X2s=","ZXJpZXM=","4oCZbGw=","dHJhbnNmb3Jt","aW1lc3RhbXA=","UHJl","IG1lcg==","LnJlcw==","c3RhbnQ=","TG9jYXRpb24=","X05BTUU=","IGxvc3M=","IAoK","bmV0","IGVuZ2luZQ==","QmxvY2s=","IGlzc3Vlcw==","IHBhcnNl","IEJhcg==","IHN0YXk=","IEpTT04=","IGRvbQ==","YWlycw==","d25lcg==","IGxvd2Vy","IiwNCg==","IERlbQ==","dWZhY3Q=","IHBz","IHBlcmZlY3Q=","Ukw=","IGVkdWM=","bHM=","ZW1vcnk=","QVJSQU5U","dWdl","IGV4YWN0","LmtleQ==","YWxsZWQ=","ZWNo","aWVm","XC8=","b2tl","IGZvcm1lcg==","YWxsb2M=","IHNpeA==","aWRh","IG1hcmdpbg==","IGhlYXJ0","YWxk","cGFjaw==","LmdldEVsZW1lbnRCeUlk","IFdBUlJBTlQ=","IHJhdGhlcg==","IGJ1aWxkaW5n","ZXJtYW4=","bGljZQ==","IHF1ZXN0aW9ucw==","aXplcw==","bGVnZQ==","aXJlY3Rvcnk=","IGpl","IGNhcw==","cHJvcHM=","dXRm","IHNlY3VyaXR5","IGhvd2V2ZXI=","d2VpZ2h0","IGluc2lkZQ==","IHByZXNpZGVudA==","Q2hhcg==","IFdJVEg=","Lm1hcA==","IGdyYXBo","IHRhZw==","X3N0YXR1cw==","IGF0dGVtcHQ=","b3Bw","dXNlcw==","CWNvbnN0","IHJvdW5k","LCQ=","IGZyaWVuZHM=","RW1haWw=","Pz4=","UmVzb3VyY2U=","S0VZ","b3Nw","LnF1ZXJ5","IE5vcnRo","YWJsZXM=","aXN0cmli","X2NsYXNz","ZWxsbw==","VGhhdA==","0Lo=","cGVjaWFsbHk=","IFByZXNpZGVudA==","IGNhbXBhaWdu","IGFsdA==","YXJlYQ==","IGNoYWxs","IG9wcG9ydA==","LkNvbg==","IGVuZXJneQ==","bGlrZQ==","LnN0cmluZw==","aW5ndG9u","KSo=","eXk=","IHByb2Zlc3Npb24=","aXJ0aA==","IHNlZw==","5pw=","IGhvcg==","aWVycw==","Y2Fu","IGJlaGluZA==","UHJvZHVjdA==","Zmc=","IFNr","LmpwZw==","Pzo=","XTsKCg==","IGNhbGxiYWNr","IEh0dHA=","0Yw=","bG9uZw==","TVM=","QVRI","IHJhaXNl","IHdhbnRlZA==","cm93bg==","dXRvcg==","bHQ=","XT0=","ZWxpbmU=","TUE=","IHNlcGFy","Y3M=","c2VtYg==","RGlz","YnNlcnY=","IFdpbGw=","IHBvbGljeQ==","IHRoaXJk","cGhvbmU=","IGJlZA==","L2c=","Ll9f","IEluYw==","aXppbmc=","LnJlbW92ZQ==","aW5zdGFuY2U=","LnR5cGU=","IHNlcnY=","RWFjaA==","IGhhcg==","IE1lc3NhZ2U=","KGtleQ==","U0VMRUNU","UG9z","KSk7DQo=","IHJlY29tbQ==","IHRyYWluaW5n","IEVudA==","IENoYXI=","aWNodA==","KGZpbGU=","IHByaW9y","R2FtZQ==","IGV4aXQ=","UGFyYW1z","LmNvcmU=","UEM=","bmVz","YW5jZWQ=","KHJlcXVlc3Q=","UGFzc3dvcmQ=","fT4K","IG1hZw==","IHJlbGVhc2U=","IHNoYWxs","dWRlbnQ=","IFNvdXRo","YW5kbw==","Oic=","LlRhYkluZGV4","c2s=","YW5uZXI=","aXNzZXQ=","IG91dHNpZGU=","bGVkZ2U=","IOU=","IFJvYg==","IGltbQ==","IQo=","IFdlYg==","RGVz","QkM=","YW5jaWFs","Um91dGU=","RGVj","ZmVyZW5jZXM=","IHB1cmNo","IE1vZGVs","Y3Rvcg==","Z24=","X3N0YXJ0","X3Vu","Lio=","aXNlcw==","IGdyb3VuZA==","IHVuaXF1ZQ==","IGJlYXV0","eyI=","IHBvdXI=","IE9jdA==","IHRyZWU=","c2V0cw==","X3Jlcw==","JyktPg==","X3JlZw==","KCJc","IGJ5dGU=","Qmw=","IGRhdGluZw==","IG1hdHRlcg==","IFJlbQ==","ICcuLi8=","IEF1Zw==","IExh","ICQo","b3VybmFs","MTEx","aWFt","IHNob3dz","d3JpdGU=","IGJhbGw=","IHNpbXBseQ==","IGZhc3Q=","IG1lbW9yeQ==","QVNT","IE9m","b3ZlZA==","YW50ZQ==","YXVs","aXN0cnk=","KSkpOwo=","IGZpdA==","PHN0cmluZw==","IHBvbGl0aWNhbA==","YW5jZWw=","Xy4=","Y2FyZA==","LmN1cnJlbnQ=","b2No","X2ltYWdl","XHQ=","Iwo=","KEw=","IGluZHVzdHJ5","Y29taW5n","IGV4dHJh","NjAw","IHJlcG9ydGVk","LnN0YXJ0","IHJlc291cmNlcw==","IGltZw==","Zmxvdw==","X0VY","KG51bGw=","IFByZQ==","IHdyb25n","aW50ZXJmYWNl","UGFyYW1ldGVy","bmVycw==","4bs=","dHVyZQ==","ZXJzaXN0","b3VudHJ5","IHNlZW1z","YWxhbmNl","ZGVzdA==","CVN0cmluZw==","IG1haW50","IHVuaXQ=","YWN0ZXJz","IFRS","aWZ1bA==","ZXhwb3J0cw==","cHJvamVjdA==","QXBwbGljYXRpb24=","bGVnYXRl","IHRha2Vz","dGVybQ==","IGV0Yw==","dXN0ZXI=","IGFwcGVhcg==","YWRkcmVzcw==","IGZlbQ==","aHM=","IGhvbQ==","LC0=","IGRpZmZpY3VsdA==","IGNvbWluZw==","T3Blbg==","IHNldHRpbmdz","IFdhcg==","IFRoZW4=","IGF1dG9t","IEZvdW5kYXRpb24=","IHF1aXRl","RGVzY3JpcHRpb24=","IGJsb2c=","aXF1","UFM=","MTEw","X2ZpZWxk","SnNvbg==","U1NJT04=","IFNjaA==","IExP","IGRlc2NyaQ==","IGV2ZXJ5b25l","IHByZXR0eQ==","IGxvbmdlcg==","IG1lbnU=","IGN1cnJlbnRseQ==","c2Vj","IHJlbGF0aW9uc2hpcA==","IyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyM=","IE1hcA==","YXNldA==","IHBhcmFtZXRlcnM=","IGNydXNo","Ig0K","SUxJVFk=","aWdyYXRpb24=","IGNvdXQ=","dG90YWw=","IG5hbWVz","bmRlZg==","Iik7","cmllbmQ=","eW5hbWlj","IGVmZm9ydA==","IGFjdHVhbA==","IGZpZWxkcw==","T1VO","dGVycw==","MjUw","IGZpeA==","X21vZGVs","IGNhc2Vz","Q0E=","TXk=","SW50ZXJmYWNl","IFNF","MTk2","XV0=","YWxsZQ==","IE5hdGlvbmFs","IEFycmF5TGlzdA==","aW5saW5l","LlY=","YXJh","cmVmaXg=","YXNj","UmVhZGVy","INC/","YXN0aWM=","KCgp","Q2w=","LmFubm90YXRpb24=","IHBlcmZvcm1hbmNl","YWlseQ==","LnRvU3RyaW5n","Lm5ldA==","dmlld3M=","LmVuZA==","YXllcnM=","bGF0ZQ==","IEFwcg==","ZWRlcmFs","J10p","LmJvZHk=","IGhpZ2hlcg==","X2Zs","Y3I=","YWxlcnQ=","X25vZGU=","IEdvb2dsZQ==","IGl0c2VsZg==","QXV0aA==","dXJyZW5jeQ==","IHNpZ25pZmljYW50","YXBwZW5k","IHJlc3BlY3Q=","c3RyYXA=","IHVuYQ==","cml0ZXJpYQ==","UE9SVA==","LmFwYWNoZQ==","T3V0cHV0","IHByb2dyZXNz","IG1pZA==","IE1pY3Jvc29mdA==","IHJlc291cmNl","YWJsaXNo","IGRpbQ==","LmxvYWQ=","LkFwcA==","IGRpcmVjdGlvbg==","IGFkZGl0aW9uYWw=","ICAgICAgICAgICAgICAgICAgICAgICAg","IG51bWJlcnM=","IGNvbXBhbmllcw==","LlRo","IHNvdW5k","dXNlcm5hbWU=","IHN0YXRlbWVudA==","IGFsZXJ0","IGNvbnRyYWN0","aG9tZQ==","X2xlbmd0aA==","LkNvbXBvbmVudA==","ZXY=","LkV4","77ya","Ijs=","IEhpZ2g=","ICkKCg==","IFBvaW50","b3Bo","IGxpbmVz","LT5f","IikKCg==","b3g=","YXBwbGljYXRpb24=","IF0K","CgoKCgoK","MTgw","IHNvb24=","Y3Rpb25z","aW5nZXI=","IGpvaW4=","IFBl","IOs=","IGxhcw==","LkU=","Y3Nz","L29y","IFN0YXJ0","IFRP","IHN1YnM=","Y29ubg==","Y29tcG9uZW50cw==","REVCVUc=","cXVhcmU=","RnVuY3Rpb24=","ZW5kYXI=","LmluZGV4","IGZpbGw=","xJk=","IGNob29zZQ==","aG93","IEFtZXJpY2E=","YXNzZXRz","LS0tLS0tLS0tLS0t","IFZhbHVl","IG9mZmljZQ==","IHZlaA==","IHRyYW5zZm9ybQ==","IEFydA==","IGluZGU=","IGZu","IGltcGxlbWVudHM=","YW5nbw==","cGxldGU=","KyI=","dG1w","YW1pbHk=","IGhhc2g=","bWlzc2lvbnM=","RVNU","Z3Q=","UHJvdmlkZXI=","ICAgICAgICAgICAgICAgICAgICAgIA==","IGZsYWc=","IHBhcnRpY2lw","ZGVu","IFJldHVybnM=","IG5vdGU=","w7xy","cG0=","aWRlb3M=","IHNwZWNpZmllZA==","IEVO","ZXN0ZXI=","b2xpZA==","IHVwb24=","KHN0ZA==","CXY=","ICdc","dXo=","IHZlcnQ=","IHZpY3Q=","CXNlbGY=","ICIk","ODU=","Lms=","IGdyb3Vwcw==","Z2l0aHVi","bGFuZw==","IG11dA==","VE8=","IHZl","IFBsZWFzZQ==","OwoKCg==","YWNjZXNz","IHsi","cmVh","IHJpc2s=","aWNrZXI=","b2dnbGU=","CXdoaWxl","QU5H","LnNlbmQ=","NzI=","IHdvbWFu","IGdldHM=","IGlnbg==","IElk","X2xvZw==","T05F","IGV2aWQ=","IEhhcg==","X3N1Yg==","IGVuZGw=","IGluY2x1ZGVk","KCkpOwoK","IEFw","aWdy","IHNlbQ==","IEJsYWNr","ZG9j","X3RhYmxl","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA==","LXVw","IGNhdXNl","IC4u","IHZhbg==","X2RpY3Q=","IGZvY3Vz","SU5E","Q0VTUw==","LkxvZw==","IG11bHRpcGxl","aWRv","IHJlZ2FyZA==","LU0=","YW5kbGVy","b3Vyc2U=","IGRlZw==","LlU=","IGFkZGl0aW9u","IHZhcmlvdXM=","IHJlY2VpdmU=","0LXQvQ==","IEhU","T2Jq","REY=","IGluY3JlYXNl","IE9wZW4=","XTs=","IGNvbW1pdA==","Pwo=","YXRlZ29yaWVz","YXRvcnk=","c2hpcA==","IE1pY2g=","IGh0bWw=","cm9taXNl","IGxlYXZl","IHN0cmF0ZWc=","YXZlbg==","IENvbnNvbGU=","a25vd24=","LW4=","X0xF","LmNvbXBvbmVudA==","IGJyZQ==","U2Vzc2lvbg==","aWFuY2U=","IGFsaWdu","dHlwZWRlZg==","X3Jlc3VsdA==","IFdIRVJF","LnNwbGl0","IHJlYWRpbmc=","RkFVTFQ=","IGNsbw==","IG5vdGljZQ==","X3By","YXJ0ZXI=","IGxvY2s=","IHN0YW5kYXJk","ZXRpYw==","ZWxsb3c=","IHBhZGRpbmc=","IEhpcw==","IHN0YXRlcw==","X2Nhc3Q=","KFA=","YWE=","IGludGVybmFs","ZWFu","IFBSTw==","IEtleQ==","IGVzcGVjaWFsbHk=","bWluZw==","IGNyb3Nz","IG5hdGlvbmFs","X29iamVjdA==","ZmlsdGVy","IHNjcmlwdA==","LnVwZGF0ZQ==","X2k=","IEFzc2VydA==","L2NvcmU=","JSUlJQ==","IHByb2JsZW1z","aXN0b3I=","IC49","IGFyY2g=","IHdyaXR0ZW4=","IG1pbGl0","TUVOVA==","LmNo","Y2FwZQ==","IE11cw==","X2NvbmZpZw==","IEFQSQ==","Zm9vdA==","IGltYWdlcw==","ZW5kbA==","Lklu","Rmlyc3Q=","IHBsYXRmb3Jt","LnByb3Q=","T3B0aW9u","c3Rl","IFRPRE8=","IGZvcmNl","LmNvbnQ=","CWVjaG8=","IERhdg==","UHRy","KEI=","UlQ=","IEJhc2U=","XVsn","IGFubm91bmM=","Y29uc29sZQ==","IFB5","ZHM=","LmFz","IHByZXZlbnQ=","YXBhbg==","IHsn","fTwv","IFNlcnZpY2U=","IFNlbg==","YWRvcg==","cHJvZmlsZQ==","VG9w","IGl0ZXI=","cG8=","SUVT","SlNPTg==","SUU=","aWFudA==","44CB","X2o=","IFNlcHQ=","X21hcA==","YnVt","KGNvbnRleHQ=","IEhvbWU=","aWFucw==","R0I=","NjM=","IGxpdmluZw==","IHBhdHRlcm4=","KGlucHV0","aWNpZW50","OTk5","Q29yZQ==","IGVudGl0eQ==","IGludGVn","Q2hhbmdlZA==","IHVzZWZ1bA==","LmluZm8=","IHRvb2w=","KGl0ZW0=","IG9r","IGZlZWQ=","SVg=","w6lz","IE5ld3M=","cmVtb3Zl","ZXJyeQ==","CQkJCQkJCQkJ","aXBtZW50","YXJlcw==","RG8=","Q3VycmVudA==","LmNvbnRlbnQ=","Lkdyb3Vw","dXN0cmFs","INGB","fSk=","IHBvcHVsYXI=","IHN0cmU=","IG1ldGhvZHM=","X0VSUk9S","TGVmdA==","Y2Fs","YnNw","LlRvU3RyaW5n","IGRpcg==","IGFsbG93ZWQ=","IGltcGFjdA==","IildCg==","NjI=","LmNvbmZpZw==","IGVsZW1lbnRz","IHByb3Rl","IHRyYWlu","LnRy","cnM=","IFJlcHVibGlj","IFRhc2s=","NjE=","YXJpZXM=","KEQ=","KGdldA==","4oCmCgo=","IHJlbGF0ZWQ=","IHZlcnM=","IHNpbA==","ICIiOwo=","IGNtZA==","IHRlY2hub2xvZ3k=","LndpZHRo","RmxvYXQ=","IFVzZQ==","Qm9keQ==","c2hvdWxk","LmpvaW4=","Rm9udA==","bGx1bQ==","eWNsZQ==","IEJyaXQ=","IG1pdA==","IHNjYWxl","IChf","ZXJuZWw=","IikpCg==","IHNjb3Jl","L3Y=","IHN0dWRlbnQ=","VUM=","LnNob3c=","IGF2ZXJhZ2U=","RW5hYmxlZA==","KGV4","Y29tbW9u","aW1hdGlvbg==","OkAi","Y2hpZQ==","IC4uLgoK","cml2ZXI=","IE1hcmNo","Y2F0ZWdvcnk=","Zmlu","IGNvdXJ0","0LI=","U2VydmVy","IGNvbnRhaW5lcg==","LXN0","X2Zvcg==","IHBhcnRz","IGRlY2lzaW9u","b2Jz","b3Vi","bWl0dGVk","ICQoJyM=","IHNhdw==","IGFwcHJvYWNo","SUNF","IHNheWluZw==","IGFueW9uZQ==","bWV0YQ==","U0Q=","IHNvbmc=","ZGlzcGxheQ==","T3Blcg==","b3V0ZXM=","IGNoYW5uZWw=","IGNoYW5nZWQ=","w6o=","IGZpbmFsbHk=","X251bWJlcg==","UGxlYXNl","4KQ=","b3Jpbmc=","LXJl","IGtpbGw=","IGRydWc=","d2luZG93","IGNvbnZlcnQ=","b21icmU=","IHdheXM=","SGVscGVy","IEZpcnN0","KF9f","dXJpdHk=","IFdpbmRvd3M=","ZWVz","IG1hdA==","cmFwcGVy","IHBsdXM=","YW5nZXM=","Il0u","YXpvbg==","L3Q=","bGF0","YXN0ZQ==","IHByb2ZpbGU=","IHJlYWR5","I2lmbmRlZg==","cm90ZQ==","IHNlbnNl","R2VuZXI=","IENvbmZpZw==","b215","IEp1bmU=","IGxhdGVzdA==","IHNhZg==","IHJlZ2lvbg==","IGRlZXA=","d2l0Y2g=","IFBhcms=","fWA=","IEZyb20=","SUk=","IGN2","IHJlYWNo","IGNvdW50ZXI=","IFdvcms=","IFVSTA==","IFVwZGF0ZQ==","JywNCg==","IGltbWVkaQ==","Y2xvc2U=","YWRvcw==","ZmVycmVk","IHdlZWtz","dXJn","IGRhbWFnZQ==","IGxvc3Q=","YW5p","X2xv","IGhpbXNlbGY=","IGRvZw==","KV0K","778=","cGly","dHQ=","IHBhcGVy","IHRoZW1z","c2Vjb25k","IHN0YWZm","IElucHV0","Iis=","IEZhY2Vib29r","IGFsbG9j","IHNjaGVk","QUNF","IHRoZW1zZWx2ZXM=","IENvbXBvbmVudA==","IGRyaXZlcg==","amE=","KHBhdGg=","IGNhdGVnb3J5","YWxscw==","cHU=","bGx1bWluYXRl","IEFjdGlvbg==","LmJ1dHRvbg==","IEdM","aXN0aWNz","IG9pbA==","IHN0b2Nr","Pic=","IGRlYWQ=","VkFM","UVVF","KioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioq","IGNoYXJn","UmV0dXJu","IGZ1bA==","ZG9t","IHJ1bGVz","IG1vZGlmeQ==","IGV2YWw=","aGFt","YXRlbWVudA==","XDw=","dWxh","PUZhbHNl","UkE=","IGNvbnRhaW5z","NzQ=","IHN0YWNr","bWFy","IHt9Cg==","IHVuZGVmaW5lZA==","QXNz","IENoaW5h","dmV5","Kgo=","IHBsYXlpbmc=","KS8=","YWN0b3I=","IGJvdHRvbQ==","bGllcg==","IE51bWJlcg==","IGNvdXBsZQ==","REM=","IFNP","Z29y","LnNldFRleHQ=","c3VjY2Vzcw==","Y29tbWFuZA==","RmlsdGVy","IE91cg==","X2l0ZW0=","IGN0eA==","IHJvYWQ=","VmVyc2lvbg==","Y2FzZQ==","dXJ0","YXZpb3I=","eWNo","c2VtYmx5","IFByb2R1Y3Q=","IGhlbGQ=","YWZl","IGluY2x1ZGVz","PHF1b3Rl","IGF2b2lk","IEZpbg==","IE1vZA==","IHRhYg==","YW5v","w7E=","aXBwaW5n","LWU=","IGluc2VydA==","dGFyZ2V0","Y2hhbg==","Lk1vZGVs","SU1F","XAo=","IG1hY2hpbmU=","YXZ5","IE5P","IEludGVy","IG9wZXJhdGlvbg==","bW9kYWw=","VGFn","XTo=","IHByb2R1Y3Rpb24=","IGFyZWFz","IHJlbg==","X2Zyb20=","bmJzcA==","IG9wZXJhdG9y","bWVu","YXBwZWQ=","X3Blcg==","emVu","KCIu","LnNhdmU=","PSJ7ew==","IHRvcg==","KHJlc3BvbnNl","IGNhbmRpZA==","IGNvbnY=","YWlsZWQ=","IExpYg==","Y29tcA==","dXJh","77+9","IEhlcmU=","IGFyZ3VtZW50","aG9vZA==","IGVzdGFibGlzaA==","b2dyYXBoeQ==","IG9uQ2xpY2s=","YW1iZGE=","IHNjaA==","IG1vdmll","IHNlYw==","IGFjdGl2aXR5","2Kc=","IHNxbA==","X2FsbA==","aW5jaXA=","IHByb3ZpZGVz","IHN5cw==","YWNrZXQ=","IHdhc24=","IHVzZXM=","IEZ1bmN0aW9u","Lmdvb2dsZQ==","IFJlc3VsdA==","ODQ=","VmlzaWJsZQ==","YWdtYQ==","ZWxjb21l","IFN5","IENlbnQ=","QUxTRQ==","YWNpw7Nu","RVhU","IGxpY2Vuc2U=","IExvbmc=","IGFjY29t","IGFiaWxpdHk=","LmhlaWdodA==","QWN0aXZl","b2xvZ2ljYWw=","b2x5","KSks","LlNl","IHBhcmFtZXRlcg==","cHJpdGU=","QUJJTElUWQ==","LnNlcnZpY2U=","IEdyb3Vw","X3F1ZXJ5","IEl0ZW0=","aW5pbmc=","IGp1ZA==","aW1z","Zml4","aW5kZXI=","YWdyYW0=","IGZ1bmN0aW9ucw==","IGV4cGVyaQ==","IEVt","IHJvdA==","IHBlbg==","LmJ0bg==","IEFT","I2lmZGVm","IGNob2ljZQ==","IFBhZ2U=","X1BSTw==","UVU=","5Y8=","YW50aXR5","wq0=","d29yZHM=","IHJlYWRvbmx5","IGZsZXg=","cHJvdGVjdGVk","IEFueQ==","IGNoYXJhY3RlcnM=","ZW5jZWQ=","IEp1bHk=","aWxlcg==","Q2FyZA==","dXJhbmNl","IHJldg==","LmV2ZW50","YWx5","MTMw","IHdvbmRlcg==","IFBvcnQ=","IGxlZ2Fs","cm9sZQ==","IHRlbg==","IGdvZXM=","TVA=","d2hpdGU=","KToNCg==","KSkNCg==","IHJlZmVyZW5jZQ==","IG1pcw==","IFByb2plY3Q=","aWNrcw==","PiY=","Q09O","IHJlcGw=","IHJlZ3VsYXI=","U3RvcmFnZQ==","cmFtZXdvcms=","IGdvYWw=","IHRvdWNo","LndpZGdldA==","IGJ1aWx0","ZGVz","UGFydA==","KHJl","IHdvcnRo","aGli","Z2FtZQ==","OTE=","MTky","INCy","YWNpb24=","IFdoaXRl","KHR5cGU=","KGA=","ODE=","IG5hdHVyYWw=","IGluag==","IGNhbGN1bA==","IEFwcmls","Lkxpc3Q=","IGFzc29jaWF0ZWQ=","CVN5c3RlbQ==","fn4=","PVs=","IHN0b3JhZ2U=","IGJ5dGVz","IHRyYXZlbA==","IHNvdQ==","IHBhc3NlZA==","IT0=","YXNjcmlwdA==","Lm9wZW4=","IGdyaWQ=","IGJ1cw==","IHJlY29nbg==","QWI=","IGhvbg==","IENlbnRlcg==","IHByZWM=","YnVpbGQ=","NzM=","SFRNTA==","IFNhbg==","IGNvdW50cmllcw==","YWxlZA==","dG9rZW4=","a3Q=","IHF1YWw=","TGFzdA==","YWRvdw==","IG1hbnVmYWN0","aWRhZA==","amFuZ28=","TmV4dA==","eGY=","LmE=","IHBvcm5v","IFBN","ZXJ2ZQ==","aXRpbmc=","X3Ro","Y2k=","PU5vbmU=","Z3M=","IGxvZ2lu","YXRpdmVz","J10pOwo=","xIU=","IGlsbA==","SUE=","Y2hpbGRyZW4=","RE8=","IGxldmVscw==","IHt7","IGxvb2tz","ICIj","VG9TdHJpbmc=","IG5lY2Vzc2FyeQ==","ICAgCg==","Y2VsbA==","RW50cnk=","ICcj","IGV4dHJlbQ==","U2VsZWN0b3I=","IHBsYWNlaG9sZGVy","TG9hZA==","IHJlbGVhc2Vk","T1JF","RW51bWVy","IFRW","U0VU","aW5x","UHJlc3M=","IERlcGFydG1lbnQ=","IHByb3BlcnRpZXM=","IHJlc3BvbmQ=","U2VhcmNo","YWVs","IHJlcXU=","IEJvb2s=","Lwo=","KHN0","IGZpbmFuY2lhbA==","aWNrZXQ=","X2lucHV0","IHRocmVhdA==","KGlu","U3RyaXA=","7J0=","w6fDo28=","NzE=","IGV2aWRlbmNl","KSk7","IEJybw==","IFtdOwo=","IG91","YnVm","U2NyaXB0","ZGF0","IHJ1bGU=","I2ltcG9ydA==","PSIv","U2VyaWFs","IHN0YXJ0aW5n","W2luZGV4","YWU=","IGNvbnRyaWI=","c2Vzc2lvbg==","X25ldw==","dXRhYmxl","b2Jlcg==","ICIuLw==","IGxvZ2dlcg==","IHJlY2VudGx5","IHJldHVybmVk","DQ0K","KSkpCg==","aXRpb25z","IHNlZWs=","IGNvbW11bmlj","ICIu","IHVzZXJuYW1l","RUNU","RFM=","IG90aGVyd2lzZQ==","IEdlcm1hbg==","LmF3","QWRhcHRlcg==","aXhlbA==","IHN5c3RlbXM=","IGRyb3A=","ODM=","IHN0cnVjdHVyZQ==","ICQoIiM=","ZW5jaWVz","YW5uaW5n","IExpbms=","IFJlc3BvbnNl","IHN0cmk=","xbw=","IERC","5pc=","YW5kcm9pZA==","c3VibWl0","b3Rpb24=","OTI=","KEA=","LnRlc3Q=","ODI=","CgoKCgoKCgo=","XTsNCg==","IGRpcmVjdGx5","ICIl","cmlz","ZWx0YQ==","QUlM","KXsNCg==","bWluZQ==","ICAgICAgICAgICAgICAgICAgICAgICAgICA=","KGs=","Ym9u","YXNpYw==","cGl0ZQ==","X19f","TWF4","IGVycm9ycw==","IFdoaWxl","IGFyZ3VtZW50cw==","IGVuc3VyZQ==","UmlnaHQ=","LWJhc2Vk","V2Vi","IC09","IGludHJvZHU=","IEluc3Q=","IFdhc2g=","b3JkaW4=","am9pbg==","RGF0YWJhc2U=","IGdyYWQ=","IHVzdWFsbHk=","SVRF","UHJvcHM=","Pz4K","IEdv","QE92ZXJyaWRl","UkVG","IGlw","IEF1c3RyYWw=","IGlzdA==","Vmlld0J5SWQ=","IHNlcmlvdXM=","IGN1c3RvbWVy","LnByb3RvdHlwZQ==","b2Rv","Y29y","IGRvb3I=","IFdJVEhPVVQ=","IHBsYW50","IGJlZ2Fu","IGRpc3RhbmNl","KCkpLg==","IGNoYW5jZQ==","IG9yZA==","Y2FtZQ==","cHJhZ21h","IHByb3RlY3Q=","cmFnbWVudA==","IE5vZGU=","ZW5pbmc=","0Yc=","IHJvdXRl","IFNjaG9vbA==","aGk=","IG5laWdoYg==","QWZ0ZXI=","bGljaXQ=","IGNvbnRy","IHByaW1hcnk=","QUE=","LldyaXRlTGluZQ==","dXRpbHM=","IGJp","UmVk","LkxpbnE=","Lm9iamVjdA==","IGxlYWRlcnM=","dW5pdGllcw==","IGd1bg==","b250aA==","IERldg==","RklMRQ==","IGNvbW1lbnRz","X2xlbg==","YXJyb3c=","YW1vdW50","UmFuZ2U=","c2VydA==","R3JpZFZpZXc=","IHVwZGF0ZWQ=","IE1v","IGluZm9ybQ==","b2NpZXR5","YWxh","QWNjZXNz","IGhhYg==","IGNyZWF0","X2FyZw==","IEphbnVhcnk=","IERheQ==","IikNCg==","dXBsZQ==","ZG9jdW1lbnQ=","Z29yaXRo","bWVudQ==","IE92ZXI=","YmI=","LnRpdGxl","X291dA==","IGxlZA==","dXJp","ID8+PC8=","Z2w=","IGJhbms=","YXltZW50","CXByaW50Zg==","TUQ=","IHNhbXBsZQ==","IGhhbmRz","IFZlcnNpb24=","dWFyaW8=","IG9mZmVycw==","aXR5RW5naW5l","IHNoYXBl","IHNsZWVw","X3BvaW50","U2V0dGluZ3M=","IGFjaGll","IHNvbGQ=","b3Rh","LmJpbmQ=","QW0=","IHNhZmU=","U3RvcmU=","IHNoYXJlZA==","IHByaXY=","X1ZBTA==","IHNlbnM=","KXs=","IHJlbWVtYmVy","c2hhcmVk","ZWxlbWVudA==","IHNob290","VmVydA==","Y291dA==","IGVudg==","X2xhYmVs","ID4K","cnVu","IHNjZW5l","KGFycmF5","ZGV2aWNl","X3RpdGxl","YWdvbg==","XQ0K","YWJ5","IGJlY2FtZQ==","Ym9vbGVhbg==","IHBhcms=","IENvZGU=","dXBsb2Fk","cmlkYXk=","IFNlcHRlbWJlcg==","RmU=","IHNlbg==","Y2luZw==","Rkw=","Q29s","dXRz","X3BhZ2U=","aW5u","IGltcGxpZWQ=","YWxpbmc=","IHlvdXJzZWxm","LkNvdW50","Y29uZg==","IGF1ZA==","X2luaXQ=","Lik=","IHdyb3Rl","MDAz","Tkc=","LkVycm9y","5Ls=","LmZvcg==","IGVxdWFs","IFJlcXVlc3Q=","IHNlcmlhbA==","IGFsbG93cw==","WFg=","IG1pZGRsZQ==","Y2hvcg==","MTk1","OTQ=","w7g=","ZXJ2YWw=","LkNvbHVtbg==","cmVhZGluZw==","IGVzY29ydA==","IEF1Z3VzdA==","IHF1aWNrbHk=","IHdlYXA=","IENH","cm9wcmk=","aG8=","IGNvcA==","KHN0cnVjdA==","IEJpZw==","IHZz","IGZyZXF1","LlZhbHVl","IGFjdGlvbnM=","IHByb3Blcg==","IGlubg==","IG9iamVjdHM=","IG1hdHJpeA==","YXZhc2NyaXB0","IG9uZXM=","Lmdyb3Vw","IGdyZWVu","IHBhaW50","b29scw==","eWNs","ZW5jb2Rl","b2x0","Y29tbWVudA==","LmFwaQ==","RGly","IHVuZQ==","aXpvbnQ=","LnBvc2l0aW9u","IGRlc2lnbmVk","X3ZhbA==","YXZp","aXJpbmc=","dGFi","IGxheWVy","IHZpZXdz","IHJldmU=","cmFlbA==","IE9O","cmljcw==","MTYw","bnA=","IGNvcmU=","KCkpOw0K","TWFpbg==","IGV4cGVydA==","CQkNCg==","X2Vu","IC8+","dXR0ZXI=","SUFM","YWlscw==","IEtpbmc=","Ki8KCg==","IE1ldA==","X2VuZA==","YWRkcg==","b3Jh","IGly","TWlu","IHN1cnBy","IHJlcGU=","IGRpcmVjdG9yeQ==","UFVU","LVM=","IGVsZWN0aW9u","aGFwcw==","LnByZQ==","Y20=","VmFsdWVz","ICIK","Y29sdW1u","aXZpbA==","TG9naW4=","aW51ZQ==","OTM=","IGJlYXV0aWZ1bA==","IHNlY3JldA==","KGV2ZW50","IGNoYXQ=","dW1z","IG9yaWdpbg==","IGVmZmVjdHM=","IG1hbmFnZW1lbnQ=","aWxsYQ==","dGs=","IHNldHRpbmc=","IENvdXI=","IG1hc3NhZ2U=","CWVuZA==","IGhhcHB5","IGZpbmlzaA==","IGNhbWVyYQ==","IFZlcg==","IERlbW9jcg==","IEhlcg==","KFE=","Y29ucw==","aXRh","ICcu","e30=","CUM=","IHN0dWZm","MTk0","IDoK","IEFS","VGFzaw==","aGlkZGVu","ZXJvcw==","SUdO","YXRpbw==","IEhlYWx0aA==","b2x1dGU=","RW50ZXI=","Jz4=","IFR3aXR0ZXI=","IENvdW50eQ==","c2NyaWJl","ID0+Cg==","IGh5","Zml0","IG1pbGl0YXJ5","IHNhbGU=","cmVxdWlyZWQ=","bm9u","Ym9vdHN0cmFw","aG9sZA==","cmlt","LW9sZA==","IERvd24=","IG1lbnRpb24=","Y29udGFjdA==","X2dyb3Vw","b2RheQ==","IHRvd24=","IHNvbHV0aW9u","dWF0ZQ==","ZWxsaW5n","XS0+","b3Rlcw==","ZW50YWw=","b21lbg==","b3NwaXRhbA==","IFN1cA==","X0VO","IHNsb3c=","U0VTU0lPTg==","IGJsdWU=","YWdv","IGxpdmVz","IF4=","LnVu","aW5zdA==","ZW5nZQ==","IGN1c3RvbWVycw==","IGNhc3Q=","dWRnZXQ=","77yB","aWNlbnM=","IGRldGVybWlu","U2VsZWN0ZWQ=","X3Bs","dWV1ZQ==","IGRhcms=","Ly8KCg==","c2k=","dGhlcm4=","IEphcGFu","L3c=","UFU=","IEVhc3Q=","b3ZpZQ==","IHBhY2thZ2U=","IG5vcg==","IGFwaQ==","Ym90","Il07Cg==","X3Bvc3Q=","dWxhdGU=","IGNsdWI=","JykpOwo=","IGxvb3A=","UElP","aW9uZQ==","c2hvdA==","SW5pdGlhbA==","IHBsYXllZA==","cmVnaXN0ZXI=","cm91Z2h0","X21heA==","YWNlbWVudA==","bWF0Y2g=","cmFwaGljcw==","QVNU","IGV4aXN0aW5n","IGNvbXBsZXg=","REE=","LkNo","LmNvbW1vbg==","bW8=","ICcuLi8uLi8=","aXRv","IGFuYWx5c2lz","IGRlbGl2ZXI=","ICAgICAgICAgICAgICAgIAo=","aWR4","w6A=","b25nbw==","IEVuZ2xpc2g=","PCEtLQ==","IGNvbXB1dGVy","RU5TRQ==","IHBhcw==","IHJhaXM=","SGFzaA==","IG1vYmlsZQ==","IG93bmVy","RklH","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg","dGhlcw==","IGF0dHI=","d2Q=","LnRpbWU=","YXdu","IHRyZWF0bWVudA==","IEFj","LlZpZXc=","aW1wbA==","bW9yZQ==","cGFzcw==","IGhh","LmZyb20=","IGxlYWRpbmc=","RkZGRg==","KGVycm9y","LnVp","YXRhcg==","YWRlcnM=","ZGF0ZXM=","IHp1","IGZsb3c=","VGFyZ2V0","IGludm9sdmVk","IGlv","cGFyc2U=","JF8=","aGVzdA==","LmludA==","LWl0ZW0=","YXN5","U3A=","IHNoaWZ0","TlQ=","IHRm","X1RS","LndlYg==","Q1M=","IH0p","IGV5ZXM=","MTI1","MTA1","X3o=","Jyk7DQo=","aWZvcm4=","IHtA","IG5pY2U=","Lmxpc3Q=","ICAgIA0K","IGZsb29y","IHJlZGlyZWN0","IFVL","KFsn","IHdpc2g=","IGNhcHQ=","bGVnYWw=","IElP","IHN0YWdl","LlN0cmluZw==","IEFmcg==","aWdlbg==","IFNI","RGVsZXRl","ZWxscw==","IHNvbGlk","IG1lZXRpbmc=","IHdvcmtlZA==","IGVkaXRvcg==","aW55","0Lw=","X3JlYWQ=","Lklk","ZWZm","T2Zmc2V0","Y2hh","VVNFUg==","CQkgICA=","aXBwZWQ=","IGRpY3Q=","IFJ1bg==","LmhwcA==","IGFuZw==","eG1s","aW1wbGU=","IG1lZGljYWw=","X3Rva2Vu","Y29ubmVjdA==","IGhvdXI=","IGNvbnRyb2xsZXI=","X21lc3NhZ2U=","VUlE","R3I=","YW5kZWQ=","X0NI","IGJvb2tz","IHNwZWFr","YW1pbmc=","IG1vdW50","UmVjb3Jk","CXN0cnVjdA==","LldlYg==","b25kb24=","IC8vCg==","IGZlbHQ=","LkF1dG8=","aWRnZQ==","X3Bvcw==","UFI=","IG1vZGVybg==","Q29sbGVjdGlvbg==","X21zZw==","Q0Q=","IExv","IHNlY29uZHM=","aWJseQ==","LmVxdWFscw==","IGludGVybmF0aW9uYWw=","I3ByYWdtYQ==","b290aA==","V3JpdGVy","aWF0ZQ==","IGNlbGU=","IEJpdA==","aXZv","aXZlcnk=","cmQ=","SEVDSw==","IGNhY2hl","LmNvdW50","IHJvbGw=","LlJlYWQ=","MTA4","UkVE","IHNldHVw","aXpvbnRhbA==","bW9kZWxz","YXJndg==","IGNvbnNpZGVyZWQ=","PSIuLi8=","c2V0dGluZ3M=","IFJlbA==","IGdyb3d0aA==","IG1peA==","IFdhc2hpbmd0b24=","IHBsdA==","IElN","4bo=","IHR1cm5lZA==","IERhdGVUaW1l","IFdlZA==","KHVybA==","ICIt","IGxldHRlcg==","QXN5bmM=","ICAgICAgICAgICAgICAgICAgICAgICAgICAgIA==","IE9jdG9iZXI=","X2xpbmU=","IGF0dGVudGlvbg==","IGNvbGxlY3Q=","IEhhc2g=","IGltYWc=","VHJlZQ==","IHNpdHVhdGlvbg==","ZXR0ZQ==","X25v","SVZF","IHZvbg==","LnRhcmdldA==","IGtub3dsZWRnZQ==","IGRyaXZl","LnBvc3Q=","IGJsb29k","IGNpdA==","cHJpbWFyeQ==","IGNvbmZpZ3VyYXRpb24=","dGVl","IHBob3Rv","aXNvZGU=","VHJhY2U=","IGdhdmU=","IHNob3Q=","IEFpcg==","IG1vdGhlcg==","cHJpY2U=","IG1vcm5pbmc=","KSl7Cg==","LXg=","IHRyYWRl","IGRlc2M=","ICYmCg==","IHBhcmVudHM=","QXBp","5Yg=","dGVk","d2Vy","IOY=","IHN5","IEtl","UGFyc2Vy","5YU=","YW5jeQ==","IHBpZWNl","aWZvcm5pYQ==","dG9TdHJpbmc=","cmFu","aWRpbmc=","UFRJT04=","Y29tZXM=","L2xpYw==","LmNsaWVudA==","RWw=","TG9uZw==","IHByb2Zlc3Npb25hbA==","cnVwdA==","dmE=","IGNvbXBsZXRlbHk=","IHByYWN0aWNl","MDAy","IHNlbGVjdGlvbg==","UmVt","aW5p","IGNhbQ==","UkVF","IHNpdGVz","cGE=","QVRVUw==","0YHRgg==","YXJyYW50","Kig=","X0tFWQ==","IEJ1dHRvbg==","IEZyaWRheQ==","c2VxdQ==","IHJlYWRlcg==","IG1lc3NhZ2Vz","6K8=","IGJ1Zg==","S2U=","IG5vdg==","SFA=","TXNn","YWxpZ24=","YXJpbHk=","ICcs","X3dpdGg=","IGRhcw==","IGhlYXJk","YXRvbWlj","cmlhbA==","KVs=","IGRpc2U=","QGVuZA==","IGdvbGQ=","IGZhaXI=","IHNhbGVz","LkJ1dHRvbg==","c3RyaWN0","c2F2ZQ==","IG1lYXN1cmU=","ICIr","ZWNhdXNl","Vmlld0NvbnRyb2xsZXI=","IFRhYmxl","LnBhcmFt","IGRlY2lkZWQ=","KCgo","SU5GTw==","IG9wcG9ydHVuaXR5","VGU=","SUNFTlNF","Y2NvcmRpbmc=","a2k=","IFVO","IGNvbnRhaW4=","IG1hbmFnZXI=","IHBhaW4=","IEZpcmU=","cm9tZQ==","IHBsYW5z","Rm91bmQ=","bGF5","IERlY2VtYmVy","IGluZmx1","w7o=","cmVuY2g=","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg","YXppbmc=","YnJpZWY=","Y2FsbA==","d29vZA==","IGxvYWRlZA==","IGdyYW5k","L2Y=","aW1w","X1U=","MTI3","U1RS","4oCi","IGNyZWRpdA==","LkNvbG9y","b3JnZQ==","UVVFU1Q=","IGRpZmZlcmVuY2U=","IFBD","d2FyZ3M=","IHB1Yg==","dW5kYXk=","IGZyYQ==","Lm1heA==","IHRyaWVk","YW5uZWxz","c2VuZA==","IHJlcG9ydHM=","IGFkdWx0","5Lo=","IGNvbnNpc3Q=","IFN0cmVldA==","IFByb2dyYW0=","U1FM","TWF0cml4","b3VuY2ls","LUE=","CXc=","IHdob3Nl","IHJlbGln","IFNleA==","IGdpdmVz","bm9uZQ==","Lm1lc3NhZ2U=","KEc=","LmF3dA==","LXJpZ2h0","IE5vdmVtYmVy","ZWxsaWc=","MzYw","dXRpdmU=","xIM=","b3Zlcm4=","IGVhc2lseQ==","IGlkZWFz","MTA0","INC9","L2Nzcw==","bHlpbmc=","ZWxsZQ==","Q2Fu","X2NvbG9y","0L7Qsg==","IHBhaXI=","bmd0aA==","IHNwbGl0","MTQw","ZHJvcA==","YXJ0eQ==","b25h","IGNhcGl0YWw=","IGhlYXI=","IGV4aXN0cw==","CWxvZw==","ZW1v","UnVu","b2k=","IHBhcnNlcg==","IE1ldGhvZA==","IGVkdWNhdGlvbg==","W2s=","IGxpYnJhcnk=","PiI7Cg==","X1VO","CXN0ZA==","b2RlZA==","IGNhbGxz","aGVyZQ==","UmVs","IGJyYW5k","YmFja2dyb3VuZA==","Z2E=","X2FkZHJlc3M=","X3BhcmFtcw==","Q2F0ZWdvcnk=","MTAz","IEluZGlh","X2V2ZW50","IGluZw==","UmVuZGVy","LmNs","dW1weQ==","IHBldA==","RkM=","IEFudA==","RXh0","IGNoYXJnZQ==","ZW5lZA==","Z3JhZA==","RU8=","IGRlcGVuZA==","IC4KCg==","ZnJhbWU=","IGRm","IGh1Z2U=","IFBBUlQ=","ZWRz","Ozs=","IEFN","IGJhc2lj","IExldA==","bGljaA==","IGFybQ==","IHN0YXI=","IGZlZGVyYWw=","V29yaw==","IGNhcnJ5","IElzcmFlbA==","KG9iag==","PXt7","IHNhdmVk","IHN5bg==","IGNvbnN0YW50","VkVOVA==","IHBvc2l0aXZl","IGNvbmR1Y3Q=","IHNraW4=","IGVhcmxpZXI=","IGxheW91dA==","IElQ","T1VS","IHRpbQ==","c3R5bGVzaGVldA==","X2Ns","IENhcmQ=","Kyspewo=","IHRlbXBlcg==","IERhdmlk","CXRyeQ==","LmRhcnQ=","IHdhbnRz","IHBpY3R1cmU=","IHZpZGVvcw==","IENvbW0=","aXNpb25z","X01BWA==","TWFwcGluZw==","LWNvbnRlbnQ=","IEVhcg==","LWRl","IHByZW0=","YnJ1YXJ5","IGNvbXBvbmVudHM=","IHRocm91Z2hvdXQ=","IHB1bGw=","IHBhZ2Vz","ZW50ZQ==","cmVzcG9uZA==","IGdhcw==","Y3JpcHRvcg==","IGVkZ2U=","IGJvdW5k","QUNU","KioqKioq","IGNyZWF0aW5n","IENI","IG51bGxwdHI=","QnI=","Kyc=","LmNv","Pjo6","IGxlYXJuaW5n","Lkxlbmd0aA==","X1NI","IHBhdGllbnRz","QUlO","IGtpZHM=","IGNvbWZvcnQ=","IHNob3du","dWdpbnM=","IEJhY2s=","ZWxsYQ==","X0NM","IGxhdA==","IGRpc3BhdGNo","IGNsYXNzZXM=","LmF0","LmJlZ2lu","IHN1Y2Nlc3NmdWw=","YmFu","IG9idGFpbg==","IFNs","IGxhY2s=","aXRlcmF0b3I=","VGhyZWFk","KHNpemU=","IG5vbmU=","Lmhhcw==","X1g=","c29ydA==","bmFw","cGV0","Ymlu","NzAw","IENhbmFkYQ==","VGhleQ==","IGRhbnM=","IE1hdA==","PHRk","IGhhaXI=","ICcnLAo=","IGN1","IGxhd3M=","bGV0ZWQ=","cGVk","IHBvdw==","IGtuZXc=","X0NPTQ==","Xyw=","IE1hZw==","aWRlbnRz","KHJlcQ==","ICks","LWNlbnRlcg==","MTkw","IHdpZGU=","IEF1dGhvcg==","c3RhbnRz","IGpvYnM=","IG1hdGg=","ZXRpbWVz","Qm9vbGVhbg==","IHNjb3Bl","X2lz","IG1lYXM=","IGtleXM=","ZWxheQ==","IGV4YWN0bHk=","Jz0+Jw==","IFBhdWw=","bWFz","CXByaW50","KGxlbg==","ZmQ=","ICk7","LkV2ZW50","cWxp","aXJpdA==","aWVsZHM=","b21hbg==","IFRvcA==","IHZvdGU=","IG1hc2s=","IHRoZW1l","LQo=","IHByb3Bz","IGZpbmU=","IHdyaXRlcg==","X29mZnNldA==","Y2Fy","IGFsdGVybg==","IGNvcHlyaWdodA==","IGRlc3Ryb3k=","cHBlcg==","IGdlbmVyYXRl","cHBlZA==","4oCZZA==","ICAgICAgCg==","bWFrZQ==","IFNob3c=","IGJyb3dzZXI=","IGZhdm9yaXRl","IGNhcmVlcg==","IGhhcHBlbmVk","KGNoYXI=","IHJlY29tbWVuZA==","IGxpdGVy","LmZpbHRlcg==","Z3JhZGU=","IMKj","UGhvbmU=","b21z","IG5hbWVk","LWxhYmVs","aXBv","IE90aGVy","IHBhbmVs","IHJvY2s=","U2NhbGU=","CWFzc2VydA==","0LQ=","IHRydXN0","ZnJvbnQ=","IGRlbW9u","QXI=","TmV0","IGVjb25vbWlj","Zm9vdGVy","IHJhY2U=","KG5vZGU=","IE9wdGlvbg==","c3BsaXQ=","IHBoeXNpY2Fs","aWZlc3Q=","IHJlbW92ZWQ=","Lmh0dHA=","KSksCg==","IGxvb2tlZA==","Jzs=","ZGluZw==","Z2VzdA==","YXR1cmRheQ==","L2xpY2Vuc2Vz","UHJpY2U=","IGRybw==","IHRvd2FyZHM=","IHVucw==","IENM","CXN0YXRpYw==","IHJvd3M=","IGRlZmluZQ==","LnJlcGxhY2U=","IGZhdGhlcg==","IERlc2lnbg==","YXNzaWdu","bXV0","RGV2aWNl","RGlk","JykpCg==","b21ldHJ5","YXlsb2Fk","IGhpc3Rvcg==","IFBhcmFt","IEJvb2xlYW4=","IG5hdHVyZQ==","IGpz","IG5hdGlvbg==","aWg=","IGRpc2NvdmVy","c2Vt","SGFuZGxl","CXI=","IFRlY2hu","IHdhbGw=","eyQ=","QHByb3BlcnR5","ICIuLi8=","IGV4YW0=","LmRyYXc=","b3BwaW5n","IG5lYXJseQ==","IGNvb2w=","IGluZGVwZW5k","UkVT","IGhhbmRsZXI=","IE1vbmRheQ==","IHN1bg==","U3R5bGVz","b3VzbHk=","IAk=","dmVzdA==","RGlzcGxheQ==","KHk=","YXRpY2FsbHk=","IHByZWRpY3Q=","eWluZw==","IHNvbWV0aW1lcw==","Il0K","IGRyaW5r","IGJ1bA==","aWZpY2F0aW9ucw==","Lmluc2VydA==","LnJlZw==","IHRlc3Rz","QWxpZ25tZW50","IGFsbGVn","IGF0dHJpYnV0ZQ==","IE5vdGU=","IG15c2VsZg==","YXJ0cw==","Tm93","IGludGVyZXN0aW5n","bGllbnRz","IHBvcHVsYXRpb24=","IENhbGlmb3JuaWE=","Ikk=","5bk=","IGdyZWF0ZXI=","dWVzZGF5","IHRob3Vz","IGNvc3Rz","IGxhdW5jaA==","XEh0dHA=","a2Vy","YmFuZA==","IFBsYXk=","IGJhbmQ=","LnNoYXBl","ZXNvbWU=","YXJ0aWNsZQ==","LnJm","IHdlcg==","w6Fz","ZW1iZXJz","dXNy","QkE=","aWNhbg==","ZXR0","dmFsaWRhdGU=","dWx0aQ==","IGltbWVkaWF0ZWx5","emVy","IGZpZ3VyZQ==","b2Vz","ZWxsZXI=","aXJjbGU=","IFNpZ24=","LmRi","IHJhbms=","Qnl0ZXM=","IHByb2plY3Rz","X3JlYw==","VUxBUg==","QVBJ","IExpbmU=","UG9ydA==","IHBvbGw=","IGdpdmluZw==","aWRlbmNl","LS0K","IHBsb3Q=","aWNpYWw=","IHdhcnJhbnQ=","SVRJT04=","IERvdWJsZQ==","IGJpbGxpb24=","Z29yaXRobQ==","IGVxdWlwbWVudA==","REFURQ==","IEAi","RUU=","IHBsZQ==","aWF0aW9u","IGhlYWRlcnM=","IHByb2NlZA==","LkNvbXBvbmVudE1vZGVs","IE9iYW1h","IHBh","IEJlc3Q=","aW1hdGVseQ==","LmdldFN0cmluZw==","Llw=","bXBsb3k=","IHJhdw==","X2Jsb2Nr","dW5kcmVk","In0sCg==","MTEy","Lkdyb3VwTGF5b3V0","IGJyb3VnaHQ=","TlNTdHJpbmc=","dGhyb3c=","Y3JlYXRlZA==","Lk5ldw==","X3ZpZXc=","Q1A=","ZXBz","T3A=","IGdyYXRpcw==","ICci","IGludGVydmlldw==","IiIiCg==","IHBhcnRpYWw=","IGFyaWE=","YmluZw==","QXV0aG9y","Qm9vaw==","IFBhdA==","dW1hbg==","VXNlcnM=","cGx1cw==","MTkz","IERpcmVjdA==","dmVudWU=","YWxwaGE=","VUNDRVNT","IENhbGw=","ICk7DQo=","aW1hdGVk","IHJlbWFpbg==","IGFudGk=","IExvbmRvbg==","IHNhZmV0eQ==","UE9TRQ==","b2xlcw==","Y29udHJvbGxlcg==","Qnl0ZQ==","IENvdXJ0","IFBoaWw=","IEFzc29jaQ==","ZW5h","5ZA=","X1NUUg==","Y29pbg==","cmVzaG9sZA==","IGJhdGNo","X0NsaWNr","ZW50aWNhdGlvbg==","Pic7Cg==","ZW50eQ==","IGJlZ2lubmluZw==","IHplcm8=","IENvbnZlcnQ=","IHRlcnI=","IHBhaWQ=","IGluY3JlYXNlZA==","Y2F0Y2g=","LXNpemU=","MTE1","YWN0aXZpdHk=","ZXF1YWxz","IHF1ZXVl","ICIn","IEludGVybmF0aW9uYWw=","IGbDvHI=","dXJzZGF5","IHNjaWVudA==","YWxsb3c=","YXhpcw==","IGFwcHJvcHJp","ZWRnZQ==","IGlkeA==","U3VjY2Vzcw==","ZW50aWZpZXI=","Olw=","eGlz","IG1heGltdW0=","YXJrcw==","IGJpcnRo","KGluZGV4","IG1heWJl","LnB5","ZmlsZXM=","IGxpbWl0ZWQ=","X2NoZWNr","bG9vaw==","cGxpZXM=","IG1vdmVtZW50","J10u","IGJyb2Fk","IEJF","IFVuaXR5RW5naW5l","LmNwcA==","IEV2ZXJ5","QWRtaW4=","IGZhbnM=","cGFyZWQ=","CiAgICAK","IGZvcmVpZ24=","IHBhbg==","IHRvdXI=","IE9yZGVy","IG1vdmluZw==","IGF1Zg==","Q2FsbA==","Y2I=","xZ8=","dmVudG9yeQ==","IFNxbA==","IGZ1bGx5","Q2xpY2tMaXN0ZW5lcg==","V09SRA==","IGFubm91bmNlZA==","KQ0KDQo=","IGFncmVlZA==","cmll","IGVhcm4=","X2xpbms=","LmFycmF5","KHRleHQ=","IG1hdGVyaWFscw==","LHA=","ZmZmZg==","dmc=","IMKp","IHVubGVzcw==","YWpheA==","TE9H","IHNleHVhbA==","IFwi","LXRpbWU=","IGNvYWNo","IHN1cHBvcnRlZA==","IHBob3Rvcw==","aWZvcm0=","LkNyZWF0ZQ==","KV0=","cmllcg==","IGRpYWxvZw==","YXZlcg==","aWdl","KSs=","X2lkeA==","Ols=","X21pbg==","IENvbmc=","IHByZXNzdXJl","IHRlYW1z","U2lnbg==","YmVnaW4=","cmlhbg==","TkVTUw==","TFM=","IGltcHJvdmU=","IFN1bmRheQ==","IGRlZmluaXRpb24=","aWdlcg==","cm9sbGVycw==","IHRoaW5raW5n","VGVtcGxhdGU=","LUY=","IGVtZXJn","cGxhdGVz","IFVTQQ==","LnNldFN0YXRl","IEFsc28=","cmV2","IGVuYWJsZQ==","IENP","UEVDVA==","IGNvbmNlcHQ=","KS0=","IOKAog==","IHNldHM=","IG1lYW5pbmc=","ZW1vbg==","IENvbnM=","Y21w","ZWRlcg==","YW5uZWQ=","aWNlbnNlZA==","IFN1cGVy","IGRhaWx5","IG11bHRp","X3U=","IGNoYWxsZW5n","X21vZGU=","IFByb21pc2U=","IHN0cmljdA==","am8=","aW50b24=","KGxpc3Q=","T25seQ==","Pns=","IHZlaGljbGU=","7ZU=","IFBsYXllcg==","MTA2","IERlbA==","IHBvb2w=","LnVybA==","bmVzZGF5","KCk7DQoNCg==","OTAw","ICIpOwo=","TG9jYWw=","LiIpOwo=","IG9yZ2FuaXphdGlvbg==","cmVuZGVy","IEFwcGxpY2F0aW9u","IHN1bW1lcg==","ZXhwZWN0ZWQ=","TkE=","IHJhcA==","X29iag==","IHN1cmZhY2U=","IFBVUg==","IH0sCgo=","IHZhcmlhYmxlcw==","KG1lc3NhZ2U=","IG9waW4=","LmJhY2s=","0LDQvQ==","IHdvcmtlcnM=","dm0=","Q28=","dWdodGVy","IG1hc3Rlcg==","ICIiLA==","IHN0b3JpZXM=","LlVzZXI=","IGNlbGVicg==","aW5lc2U=","QlM=","IENvbW1hbmQ=","YXNoYm9hcmQ=","IG9n","a2c=","LmltYWdl","LnN0eWxl","IHN0ZXBz","IEJlbg==","KGFyZ3M=","NDA0","IFBlcnNvbg==","LHk=","IG9mZmljaWFscw==","fAo=","IHNraWxscw==","dmM=","IGJ1aWxkZXI=","IGdhcg==","QWNjb3VudA==","IEF1dGg=","55Q=","J10pCg==","IEFU","bm4=","LkludA==","U1NFUlQ=","IGVmZmVjdGl2ZQ==","TEVURQ==","IHRvb2xz","QVJE","IGRpZ2l0YWw=","MTkx","RG91Ymxl","IEZpbmQ=","UkM=","IGlubGluZQ==","L3I=","QVJBTQ==","QVNL","IGludGVudA==","YWlnaHQ=","X2FkZHI=","IHJlcXVlc3Rz","LmZpcnN0","IGRlYnVn","IHNwZW50","KCkpKTsK","xZs=","IHByaW5jaXA=","TG9nZ2Vy","Y2x1ZGVz","LnVzZQ==","IHN1cnY=","bWVkaWE=","IEZlYnJ1YXJ5","IE1hYw==","IG1pc3Npbmc=","IHdpZmU=","IHRhbGtpbmc=","IE1ha2U=","IGNhcnQ=","IGxvY2F0ZWQ=","RW5j","LWE=","Y2hyb24=","IGNhcmRz","IGd1eQ==","IHBlcnM=","IFllcw==","YXRldmVy","IEFuZw==","b2xhcg==","IEV2ZW4=","IGFjY3Vy","IFBvd2Vy","IEdvbGQ=","Y2xlYXI=","UHJvY2Vzcw==","IHJlY29yZHM=","IGtpbGxlZA==","LmNsZWFy","IFdBUlJBTlRJRVM=","IHB1cnBvc2U=","cGFuZWw=","SkVDVA==","w61h","IGV4ZXJj","V1M=","L0w=","LmV4cG9ydHM=","IF9fXw==","IHNpbg==","U2VydmxldA==","IGTDqQ==","LmRlbGV0ZQ==","cm9rZQ==","U2w=","dWdo","ZWFycw==","IHBvaW50ZXI=","IGhvcA==","YWxsZXJ5","IG9icw==","Y292ZXJ5","CWNoYXI=","CQkJCQkJCQkJCQ==","CWRlZg==","b2NpdHk=","aXRjaGVu","dWxhdGlvbnM=","IEZJVA==","ICku","c3RyYWludHM=","dmVudGlvbg==","IHJlcXVpcmVz","IE9wZXI=","TUU=","T1VOVA==","YWxsZXQ=","IG5vcm0=","SVJF","ZXhhcw==","IHByb2dyYW1z","IHdlYWs=","Jy4k","dWluZw==","CSAgICAgICA=","IG1pbA==","IGZpcm0=","aW5pdGVseQ==","X1ZBTFVF","YXBzZQ==","YXRpc2Y=","IGRlbWFuZA==","X21vZA==","IGRlc2NyaWJlZA==","IHBsYWNlcw==","VklE","IGFsb25l","IGV4cG9ydA==","IHZlYw==","IE1heA==","IGFjdGl2aXRpZXM=","aWN0dXJlcw==","Z2VuZXI=","IG1h","gqw=","IGV4cHJlc3Npb24=","Q2FsbGJhY2s=","X2NvbnRlbnQ=","IE1vc3Q=","IHRlc3Rpbmc=","RUM=","Q0hBTlQ=","IGFkanVzdA==","LlRocmVhZGluZw==","KGN0eA==","IGFncmVl","aWdoZXN0","IHVp","IExhdw==","Llk=","Pjw/","IHBvZA==","LWxn","4oCdCgo=","IGRlc2NyaWJl","IEV1cm9wZWFu","LXNo","IFBVUlBPU0U=","T1JZ","IGNvbnZlcnM=","IElsbHVtaW5hdGU=","IEF2","KGNo","PyI=","Y2hlbg==","aW1h","RG9jdW1lbnQ=","IG9wZXJhdGlvbnM=","d2lu","CWZ1bmN0aW9u","LkltYWdl","IHNjZW4=","L2g=","IFND","IGV4cGxv","OiU=","LyoqDQo=","TkFNRQ==","5og=","KHZhcg==","IGRpcmVjdG9y","T05H","IHlpZWxk","IGZlZXQ=","IFNlYXJjaA==","IEls","IHJlc3RhdXI=","ZHVj","IGludGVnZXI=","MTA3","ICcnOwo=","IGhpZ2hseQ==","Y2hlY2tlZA==","IFBBUlRJQw==","RVJDSEFOVA==","77yJ","IG9wdGlt","UXVldWU=","IExJ","aXRhdGlvbg==","IHRyYW5zcG9ydA==","aXNzaW9u","ZmlsbA==","dXNpb24=","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg","CWJvb2w=","LXRo","dXB0","IGVzc2VudGlhbA==","YW50ZWQ=","IGJlbmVmaXRz","CVM=","JzsNCg==","aWtp","IGdpcmxz","aWNlZA==","YnVmZmVy","XSs=","IHNvY2tldA==","IHByaWNlcw==","IEZyZQ==","IHNhdA==","IHdvb2Q=","TWVudUl0ZW0=","QVJH","IEFkbWlu","T1dO","ZGs=","IHJlc2V0","IGZvcm1z","INC4","5pY=","IFR1ZXNkYXk=","MTA5","IEluaXRpYWxpemVk","X3RyYWlu","b3Jhcnk=","YXRlZ29y","IGR0","VG90YWw=","Y29uc3RydWN0","aWxpZXM=","IGd1eXM=","0LXRgA==","IGluc3RydWN0aW9u","MDEw","eWxlZA==","IGludGVybmV0","ZXRhZGF0YQ==","YWR5","ZmFjZXM=","amVjdGlvbg==","IEphY2s=","IHJlY3Q=","Wy0=","IExlZw==","IGRldmljZXM=","T0M=","ICoNCg==","b3JhdGlvbg==","ZXJ0YWlu","IGd1YXJk","b3N0cmVhbQ==","IGVudW0=","LmxheW91dA==","ICI7Cg==","dm9rZQ==","IE9r","SG9tZQ==","KHRy","RVRI","IGRlbGF5","IHB1cmNoYXNl","ZGM=","IGFyZW4=","X29uY2U=","CQkJCQo=","cm9y","ZHJhdw==","LnJ1bg==","KG1vZGVs","VGltZW91dA==","bGlr","IEFyZw==","LmVu","IGZpc2g=","Y3B5","X2Zl","RVJDSEFOVEFCSUxJVFk=","KFg=","X291dHB1dA==","Pz8=","IGpv","YW5kYXJk","IGRvbGw=","ZXJyb3Jz","X2Jhc2U=","IFBBUlRJQ1VMQVI=","IGxlYWRlcg==","IGNvbXBhcg==","IGRvdWI=","IFZpcw==","U3RhY2tUcmFjZQ==","LUM=","IFN0dWQ=","c3RpdHV0ZQ==","TW9yZQ==","IERlc2NyaXB0aW9u","V0FSRQ==","YWRz","INC6","YmluZA==","PXNlbGY=","ZW1wbG95","W24=","LmFsbA==","LUI=","JiY=","YWxt","IGN1bHR1cmU=","aG91c2U=","IHN1ZmZlcg==","ICcl","IHN0cmFpZ2h0","IFN0YXI=","dWRv","IGRlZA==","IENPTQ==","IGNvbmZpcm0=","IEdvb2Q=","LnNj","X19fX19fX19fX19fX19fXw==","RFI=","Q29uZmlndXJhdGlvbg==","RGF0ZVRpbWU=","IGFkdmVydA==","IGNvdWxkbg==","YXN5bmM=","c3RhY2s=","JykNCg==","S2l0","IGhvdXM=","IG1lY2hhbg==","cmF0ZQ==","MjA0","IGF1ZGlv","CWNvdXQ=","Y29yZXM=","IHNwb3Q=","IGluY3JlYXNpbmc=","ICMj","KSkp","cG9pbnRz","IGNvbXBhcmVk","bGln","IGJlaGF2aW9y","IEJZ","IEF0dA==","Y3JhZnQ=","aGVhZGVycw==","ZXRl","ZW5kcmVnaW9u","IGRldGFpbA==","VUxF","IENvbW1vbg==","CXByb3RlY3RlZA==","c3Rvbg==","IEZJVE5FU1M=","IGZyZXNo","Ij4KCg==","LmV4YW1wbGU=","YmVyZw==","IG1vdmVk","CWU=","IFNhdHVyZGF5","IHBheWxvYWQ=","xIc=","KToKCg==","IGJleQ==","dXJlcg==","PHNjcmlwdA==","IHN5bWJvbA==","IGFzc3Vt","IHB1bA==","RWZmZWN0","IGh1bmRyZWQ=","VG9vbA==","YWtlZA==","Y29ubmVjdGlvbg==","IHZvaWNl","IHBk","IHRyYW5zYWN0aW9u","IGxpbmtz","RXJy","IEluZGlhbg==","VEM=","YXRhbG9n","bmk=","c2lnbg==","PDwi","amk=","eWE=","IGRlbW9uc3Ry","dWxhdGVk","LlN0","IGluc3RpdA==","IGJvb3N0","IGNlbGxz","b2xpYw==","LlBybw==","Ojwv","RXZlbnRMaXN0ZW5lcg==","aWZ5aW5n","IERp","b3Jyb3c=","LmV4ZWN1dGU=","IGNvbGxlZ2U=","WW91cg==","IGxhcmdlc3Q=","LmRpcw==","IHF1aQ==","IGluZGl2aWR1YWxz","X2J1ZmZlcg==","IG5n","U0E=","IENvbnRyb2w=","IHNpbmc=","IHN1aXQ=","ICAgIAk=","U0c=","IGp1bXA=","IHNtYXJ0","b21h","IEV4cA==","ICct","IGFzc2lzdA==","IHN1Y2Nlc3NmdWxseQ==","c3lz","IENyZQ==","X3JlZg==","IFRodXJzZGF5","IGJ1cg==","INC0","IGJleW9uZA==","IG5vZGVz","RGV0YWlscw==","aW5jdA==","IEphbWVz","IGFmZmVjdA==","ZXhjZXB0aW9u","IHR5cGVvZg==","KA0K","LXNl","IGZldGNo","YCw=","IGNydXNoZXI=","fS4=","IEJP","U2hvdw==","IHJhdGVz","IGJvbg==","LWljb24=","IE1lZGlh","UkVTUw==","IFZhbGlk","0L7Quw==","IGZ1Y2s=","YWNrcw==","IHN0dWRpZXM=","TWU=","IG93bmVycw==","fWVsc2U=","IGdyb3dpbmc=","VmFyaWFibGU=","IEJlbA==","LnJhbmRvbQ==","dmVtZW50","b255bQ==","KEY=","IEZBTFNF","IHRvcmNo","KHJvdw==","aWdv","c3RydWN0dXJl","MTIx","IGNlcnRhaW5seQ==","RGVw","IEdyZWVu","cXVlc3Rpb24=","IGFkZGluZw==","IERldmVsb3A=","X2RlZg==","IG1hY2g=","PSU=","CQkg","Y29uZHM=","UHJvamVjdA==","IHJlamVjdA==","IM4=","IHBvb3I=","IGF3YXJl","MTE0","IEJ1aWxk","IEJyaXRpc2g=","IE5F","IG51bWVy","cmVlcw==","Y2xhaW0=","IG1vY2s=","IG9t","IHNjcmU=","T0xE","LnBs","ZWxlcg==","IGNvcnJlc3BvbmQ=","X0hF","IGJpbmFyeQ==","MTE2","X29yZGVy","IFNRTA==","IGFkdmFudA==","IHByZXY=","Lls=","LmFzc2VydEVxdWFs","cGxpZXI=","YXJw","IGNsb3NlZA==","IGVuY291cg==","IFFTdHJpbmc=","YXVk","IGRldmVsb3BlZA==","IHBlcm1pc3Npb24=","LmRlYnVn","b3BlcmF0b3I=","ICcK","IHN5bQ==","YXRpdmVseQ==","w6ll","LWNvbG9y","IEdFVA==","a3k=","IGFsdGhvdWdo","X3JlcXVlc3Q=","X2VsZW1lbnQ=","Li4uLi4uLi4uLi4uLi4uLg==","X0RBVEE=","IGFtYXppbmc=","IHNi","IERlZmF1bHQ=","RXZlbnRz","IGZhaWx1cmU=","YWNsZQ==","UHJvcGVydGllcw==","IGRyZWFt","IGRpc3Ry","IGF1","IGdlbmVyYXRlZA==","5pU=","IFRlYW0=","VVNF","IGluY29tZQ==","IGV5ZQ==","X25vdA==","Il0s","X2Zvcm0=","U3VwcG9ydA==","b3JkZXJz","LlByaW50","dmlsbGU=","IFdlZG5lc2RheQ==","b2x2ZXI=","IG9wcG9z","aXNhdGlvbg==","b2xh","Q2xvc2U=","PHA=","X3dpZHRo","SW52YWxpZA==","eGI=","IHN0cnVnZw==","X2FjdGlvbg==","IHR4dA==","IFBhdGg=","YWxhcg==","IE1FUkNIQU5UQUJJTElUWQ==","c2VydmljZQ==","IE1pY2hhZWw=","YWJsZVZpZXc=","RGVidWc=","b2tlcw==","U2hl","IGd1ZXNz","IEphdmE=","X1BBVEg=","IHBhcnRpY3VsYXJseQ==","IElJ","IGRvbWFpbg==","5bm0","IHJlZHVjZQ==","LWxlZnQ=","cmVhbA==","IGFwcGVhcnM=","IGNvbW8=","IFVuaXQ=","IEdvdmVybg==","YWxp","YWxsZWw=","IEpldw==","X0k=","IGNvcw==","LmNvbG9y","IEdsb2JhbA==","IHRlbGU=","YmVu","X3RyYW5z","IHJlYXNvbnM=","IGVtYg==","ZW5zaXR5","bGluZXM=","b21pbg==","U2NyZWVu","0LDRgg==","cGVjdHM=","Y2xpcA==","Zm9v","cmVudA==","IGFm","IGRhbmdlcg==","aWxpbmc=","TmFtZXM=","T3Vy","IGRpc3RyaWJ1dGlvbg==","V2hpbGU=","U0w=","V3JpdGU=","IGdvdG8=","IGNvbG9ycw==","IHBvd2VyZnVs","a2lu","IGRlcHRo","ZXJjaWFs","IENvbmdyZXNz","IE1hcmtldA==","RGI=","dW5kZXI=","IExhc3Q=","w58=","Z3JlZw==","IHBvc3Rz","X1VSTA==","b3Rvcw==","RG9u","IG1pY3Jv","IGFycmVzdA==","0L8=","IChA","IEhvdA==","IEluZGV4","OyY=","IyE=","IE5vcg==","IENhcA==","LSg=","IGludGVyZXN0ZWQ=","cGVhcg==","IHJlbnQ=","IGFsYnVt","b2xpY3k=","Lmxhbmc=","LnRyYW5z","LmZvcm1hdA==","IHsNCg0K","cGhlcmU=","IGF4aXM=","IEJ1c2luZXNz","ZXJzaXN0ZW5jZQ==","dXJy","IG1pbmltdW0=","ZW5kb3I=","IFNE","MTEz","IEludGVybmV0","5aQ=","RXhw","aXZlcnNl","TU0=","IG9idmlvdXM=","IGJhc2lz","IHNjaWVuY2U=","IGJ1ZGdldA==","aXphdGlvbnM=","UEE=","IGZsYWdz","cHJldA==","TE9DSw==","IHZhcmlldHk=","IHRydXRo","ZHQ=","IGdvbmU=","IGJhdHRsZQ==","PHN0ZA==","IFNpbA==","cmY=","dWRh","IGVyb3Q=","IENhbQ==","IHN0YXRpb24=","ICc8Lw==","Y2hlbWU=","IFN1bg==","IGZpbmlzaGVk","IHNob3A=","IEtvcmU=","IGVpZ2h0","X1JFRw==","TkQ=","Piw=","Ij48Pw==","KG51bQ==","CWlubGluZQ==","VHJhbnNhY3Rpb24=","Lk9u","IG1haWw=","cmV5","cmVzdWx0cw==","IG5hdg==","SU1JVA==","X2lkcw==","TWFrZQ==","5Yo=","TW9kYWw=","IExPRw==","IFN1cg==","IGluc3RhbmNlb2Y=","IG92ZXJhbGw=","IEluZm9ybWF0aW9u","IGNvbnN0cnVjdGlvbg==","X0ZJTEU=","YnV0","IG1lZGlj","IGR1cmF0aW9u","aXRuZXNz","YWdlbnQ=","QVY=","IHNldmVu","b2xm","IH19Cg==","Il0sCg==","MTcw","MTIy","IGNhbGxpbmc=","IGFucw==","dGhyb3dz","b3Jpem9udGFs","IHVzZVN0YXRl","LmZs","IFN0YXR1cw==","IE9ubGluZQ==","UlI=","IFJpY2g=","IEhpbGw=","IGJyYWlu","IGZvbGxvd2Vk","MjQw","ZW1pYw==","IHNsaWdodA==","IGluc3VyYW5jZQ==","LkFycmF5","IGFic3RyYWN0","IFN1bQ==","cmVkaXJlY3Q=","b3duZXI=","KG1zZw==","IENsaW50b24=","Tm9u","CWV4","IHZvbHVtZQ==","IEV2ZW50QXJncw==","LUw=","IERpbQ==","IE1hcnQ=","IGN1cnNvcg==","IGltcGxlbWVudGF0aW9u","dXJyZWQ=","IGxhcmdlcg==","KTsKCgo=","Jys=","LnRyYW5zZm9ybQ==","IHVwbG9hZA==","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA==","RHJhdw==","bmVs","CWZsb2F0","cXJ0","IE5ldHdvcms=","IHRpdA==","QXhpcw==","LmFuZHJvaWQ=","IGNvbXBsZXRlZA==","IG11cg==","IGNvbHVtbnM=","eGM=","IHN1cHBseQ==","aW1pbmFs","IHNwcg==","PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PQ==","IHVuaXRz","KHU=","bWk=","cmVwbGFjZQ==","W2tleQ==","4Lk=","YW50aWM=","IHBheW1lbnQ=","LEI=","IEFwcGxl","Z2lu","UmVxdWlyZWQ=","Iys=","bGFuZHM=","IHNxdQ==","IGZhY3Rvcg==","ZGVj","IHN0cmVuZ3Ro","IGJveQ==","IGJhbGFuY2U=","IHNvdXJjZXM=","c2NyZWVu","LXRvcA==","IEFtYXpvbg==","IGhpZGRlbg==","0LXRgg==","X2NsaWVudA==","IGVhdA==","LmRpc3BsYXk=","IMK7","IHRyaWdnZXI=","YW5hZ2Vy","IHRybw==","IGNsYWltcw==","Zm9yZA==","IENvbXBhbnk=","IGdpZnQ=","LDo=","X2FwcA==","aGFuZGxl","IHByb2R1Y2U=","L2xpYg==","NTEy","IC0q","CXNldA==","J107","YXJj","YW5kZXI=","IEVuZ2luZQ==","IGF0dHJpYnV0ZXM=","dGFzaw==","PD0=","KE4=","IHdhcm0=","d2hpY2g=","IEZvcmU=","YWdub3N0","bXlz","IHRhbA==","IFNhbA==","Z2k=","IFByaW50","IFRSVUU=","INC+","LlVJ","IGZsYXNo","cm9wZXJ0eQ==","LmxvY2F0aW9u","IE1pbGw=","Ymk=","Y29udHI=","LnJlcXVlc3Q=","IFNhbQ==","IG5lZ2F0aXZl","a2l0","IHNldHQ=","LnByaW50U3RhY2tUcmFjZQ==","YWJl","CWk=","IGJ1cm4=","IHNvY2lldHk=","Q2FjaGU=","IFNlY3VyaXR5","Lm1vZGVscw==","IFdBUlJBTlRZ","X3Vw","Y2VpdmU=","IGNsaWVudHM=","LlRy","IHByb3ZpZGluZw==","IHJvdXQ=","bWF0ZXJpYWw=","IHx8Cg==","IFNlcg==","IE9mZmljZQ==","RlRXQVJF","ICck","IGZvYw==","IGV4Y2VsbA==","IGNhdA==","bm9ybWFs","IGRldGVybWluZQ==","CXVpbnQ=","UGFuZQ==","IGVtcGxveWVlcw==","IFRleGFz","IHRyYWZm","IFJlcG9ydA==","YW50YQ==","IEJveA==","IGRqYW5nbw==","IHBhcnRuZXI=","RUI=","TElORQ==","IGZlZWxpbmc=","IGNpdmls","KGZsb2F0","U3Fs","IHdvdWxkbg==","LmluaXQ=","LmxlZnQ=","LXY=","X2xldmVs","J30=","QUY=","IGxvYWRpbmc=","IE9ubHk=","IGNvb2tpZXM=","IEds","Q08=","IHN0cmF0ZWd5","KCcuLw==","IHNoaXA=","cG9zZXM=","IHNpZ25hbA==","IGFscGhh","LnBvcA==","UmFkaXVz","IHJlcGxhY2U=","X0RJUg==","Y291bnRlcg==","YnNlcnZhYmxl","ZWxh","V2VpZ2h0","aGFzaA==","Ym9zZQ==","Zng=","IEVtYWls","IHJlZmVy","bG9jYWxob3N0","X1JP","aXF1ZXM=","U3RlcA==","IGFoZWFk","KFZpZXc=","IFNlcnZpY2Vz","IEpzb24=","ZXNzb3I=","IHB1bg==","IGFwcHJvcHJpYXRl","YWtlcnM=","b3Nlbg==","cG9zaW5n","IGFnZW50","ZmM=","IHRyYW5zZmVy","IGludmFsaWQ=","IFJlc2VhcmNo","VmVydGV4","IGdheQ==","IGpvdXJuYWw=","W3g=","ICIiLAo=","IFdlbGw=","LlRhc2tz","U3BlYw==","IG9s","IHNwZW5k","IEF1c3RyYWxpYQ==","TWF0Y2g=","Lmp1bml0","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA==","IE1BWA==","aXphYmxl","Y2x1c2l2ZQ==","X3ZhbGlk","IHF1YXJ0ZXI=","eWFu","MDA1","IEVkaXQ=","YXJkZW4=","PW5ldw==","IGZyYWc=","Qml0","emk=","YWluZQ==","dWRk","Lk9iamVjdA==","ZGVidWc=","IGNhc2g=","X0lN","IGVlbg==","IGNvbW1lcmNpYWw=","IFZpZGVv","bG9hZGVy","IGZpeGVk","IGFwcGxpY2F0aW9ucw==","IF8s","IFJ1c3NpYQ==","aXRlY3Q=","Xyg=","IEJsb2Nr","IHNhbg==","IFRvbQ==","IHBlcmhhcHM=","IHNpZw==","bGV2YW50","IGNvcnBvcg==","YXRhc2V0","cm9uaWM=","eGU=","IGV0aA==","U29tZQ==","cG9w","X09L","IHRlbmQ=","LlJlcw==","X2FuZA==","IHJldmlld3M=","IHdpbGQ=","MTE3","IGRlZ3JlZQ==","Lk8=","Lm9iamVjdHM=","X2FyZ3M=","bmls","IGRpc2FibGVk","UGFyZW50","IG5vdGVz","ICIiCg==","KHN0YXRl","aXN0cmljdA==","IGxvZ2dpbmc=","LklP","IE1hbA==","RE0=","IHhtbA==","IFJvYmVydA==","ZWxlbg==","bGF5b3V0","Zm9s","J10pKQ==","LGI=","IEplcg==","ZmlsZW5hbWU=","IGZhbg==","IEN1c3RvbQ==","PSIi","IERpZQ==","QnVuZGxl","LnV0aWxz","IHRyaXA=","TUI=","IHNvZnQ=","X01PREU=","IGFwcGxpY2FibGU=","IHVwcGVy","RVJWRVI=","X2Fs","X0xPRw==","SGVyZQ==","d3A=","IFNlcnZlcg==","IENsaWVudA==","IGNoZW0=","U2Nyb2xs","IGhpZ2hlc3Q=","IFNlbGVjdA==","ICJA","IFdoeQ==","U2Vj","aGVlbA==","T3BlcmF0aW9u","IGNvbm5lY3RlZA==","aXJtZWQ=","IGNpdGl6","IENoZQ==","IGZvcmNlcw==","IHd3dw==","Um9vdA==","QU5DRQ==","TWFueQ==","aWNpcA==","cmdhbg==","MjIw","IFRvcg==","IFByZXNz","IE1vcg==","LWxpbmU=","dWxlZA==","Plw=","IHRodXM=","IFJlZ2lzdGVy","aG9s","IENoaW5lc2U=","IHBvc3RlZA==","IG1hZ24=","YWJpbGl0aWVz","IGRpc2Vhc2U=","IHJlbWFpbnM=","IFByb2Y=","LWZvcm0=","IGNpbg==","b3JnYW4=","aWNhdGU=","IHN0cmVzcw==","XSo=","IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0=","X2NvbnRleHQ=","b3JyeQ==","IGRpZWQ=","bWF0","IHN0YXJ0cw==","Lk1lc3NhZ2U=","IHJ1bnM=","IGd1aWRl","IHdhcnJhbnR5","ZW50aWFscw==","ZGljdA==","IFNpemU=","dWxlcg==","IHJlc3BvbnNpYmxl","X1NFVA==","IGNvbnRhaW5pbmc=","IFByaWNl","fHw=","MzUw","RlM=","IGVtcA==","X2J1dHRvbg==","KHVpbnQ=","IHN1ZmY=","cHRo","IGRlZmluaXRlbHk=","cHV0ZQ==","IG1hcmtldGluZw==","IFdI","IFNpZQ==","Kz0=","T0xPUg==","IGNvbnN1bHQ=","IHNpZ25lZA==","IHNlcXVlbmNl","bGVl","IHJlcXVpcmVtZW50cw==","aHk=","RXhwcmVzcw==","TVQ=","c2V5","IHVsdA==","5a4=","ZWxsaWdlbmNl","IGFuYWx5","IGRyZXNz","ZW5naW5l","IEdyZWF0","IEFuZHJvaWQ=","IEFsZXg=","bW9kZQ==","RGljdGlvbmFyeQ==","LkRhdGU=","5L0=","VklDRQ==","IGZhbWlsaWVz","IFJ1c3NpYW4=","IFRpbWVz","LmNhbGw=","JCg=","UHJvZmlsZQ==","IGZvbGRlcg==","Y2hlcw==","IGxlZ2lz","X3Jvdw==","dW5lcw==","2YQ=","IH0pLg==","QXNzZXJ0","YWdlbg==","IEhhbmQ=","SXRlcg==","IGJpZ2dlc3Q=","b3JlYWNo","IHBvbGlj","IHBlcm1pc3Npb25z","IHNob3dlZA==","IEVsZW1lbnQ=","IHRvcGlj","4oCU4oCU","cm9hZA==","IEJhbms=","cmVjb3Jk","IHBhcnRuZXJz","IFJlZg==","ZXNzaW9ucw==","IGFzc2Vzcw==","VVNU","IFBhcnR5","cHJvZHU=","TEM=","IHVs","LmZvcm0=","aGlkZQ==","Y29weQ==","VVRG","IFNPRlRXQVJF","DQoNCg0K","IExpbg==","dW5h","dWdhcg==","IGFkbWluaXN0cmF0aW9u","IG9wZW5pbmc=","IHNjYW4=","IGNvbnRpbnVlZA==","Y29tcG9uZW50","LnNw","IGhhcHBlbnM=","dW1teQ==","IFBS","LkZpbGU=","IERvd25sb2Fk","TG9hZGluZw==","ZGk=","IHdhaXRpbmc=","X0FERA==","VGFi","LnF1ZXJ5U2VsZWN0b3I=","IGVjb25vbXk=","IEZyZW5jaA==","dHh0","IGZhbnQ=","XzsK","SG9sZGVy","U0g=","MDA0","IG51bXB5","IHN0cmVldA==","IG1hbGU=","XE1vZGVs","YW5naW5n","MzMz","IEJpbGw=","IHByZXZpb3VzbHk=","Qkk=","IFNlY3JldA==","IG1pc3Q=","IEZpZWxk","dXBz","IFByb2Nlc3M=","IGtlcHQ=","IE9U","IHRyYWRpdGlvbmFs","Lmk=","YW1pbg==","IGhlbHBz","QW55","b3JpZ2lu","aWx0ZXJz","anU=","ZGVzYw==","IEFjY291bnQ=","ICkNCg==","a3RvcA==","b2xseQ==","IGZz","IOo=","IHV0","IGNlbnRyYWw=","KHRlc3Q=","LkFu","IHNhdGlzZg==","R1I=","IEZ1bGw=","IGhlYXQ=","aWJlcg==","IG9udG8=","bW9z","U2NoZW1h","IGZhY3Rvcnk=","Ii4k","YXdz","U3RhdGVtZW50","KHRhcmdldA==","CW5ldw==","LmJl","IGd1ZXN0","IG1hbA==","QVJZ","IHJlYWNoZWQ=","IG1vdXNl","IGNoYWxsZW5nZQ==","CWRvdWJsZQ==","IFRlbQ==","IHRlcnJvcg==","IGV4dHJhY3Q=","X1RP","IHNlcGFyYXRl","IG1pcg==","aGVscA==","IGNhcGFjaXR5","IFByb3BlcnR5","a2Fu","X2NyZWF0ZQ==","IExpZ2h0","LnBhcmVudA==","IHVuZGVyc3RhbmRpbmc=","IGVhc2llcg==","IHw9","IGVuaA==","IGZhdA==","IHByb3Rlc3Q=","YW1t","X0FU","LW9m","aWxz","IE9o","IHBzeWNo","ICQu","aW5kcw==","IHJlbGF0aXZl","c2hvcA==","c2hvcnQ=","IFNhbmQ=","MjEw","dWVzdGlvbg==","IGZlYXI=","LwoK","LmNvbnRleHQ=","IHNjaG9vbHM=","IHNlcnZl","em9uZQ==","X2Ri","IG1ham9yaXR5","ZXhhbXBsZQ==","IGxhbmc=","CSAg","UmVnaXN0ZXI=","ZW5kbw==","IHByb2Nlc3Npbmc=","X3RlbXBsYXRl","LXVzZXI=","IGVn","Q09N","IEJsdWU=","aXJv","IHJlbW90ZQ==","IElU","IyEv","IHJlZGlzdHJpYg==","MTI0","cmF6","IFNpbmNl","IFR1cg==","MTM1","QmFja2dyb3VuZA==","PT09","IHJlZmxlY3Q=","IHByb3M=","Y21k","IHdob20=","Q29tcGF0","IEFyZQ==","SWRlbnRpZmllcg==","IFRob20=","X3BvcnQ=","Z3U=","IG1vbml0b3I=","cm0=","IHBhdGllbnQ=","dmVydGVy","IGdhaW4=","LXVp","SW5zdA==","IGRpZXM=","MTE4","QXJlYQ==","X2ZpbHRlcg==","IGdyYXQ=","IHJlYWxpdHk=","b3JkaW5hdGU=","b2x2ZWQ=","Q29udGFjdA==","IGNvbXBsaWFuY2U=","X29y","IFZhcg==","ZGw=","IGFwcGVuZA==","R0VS","KG1heA==","LnJlbmRlcg==","IGR5bmFtaWM=","b3JkaW5hdGVz","X29wdGlvbnM=","X2NvbHVtbg==","IGJhdHRlcg==","c3BhY2U=","TGE=","IFNvdXJjZQ==","L2Jpbg==","IGRvcw==","IEJvYXJk","IFRocmVhZA==","IEFM","KGNvbmZpZw==","MTQ0","IE1lcg==","IG1pbGVz","X2hlYWRlcg==","RVRIT0Q=","aXp6","IGJlbmVmaXQ=","IGludGVncg==","KGN1cnJlbnQ=","dWxv","LmRlZmF1bHQ=","IERpdg==","IHRvbg==","b3Ro","ZXJ2YXRpb24=","ZWRvbQ==","IGJhYnk=","Y2VpdmVk","LnRvcA==","cmlvcml0eQ==","IExvY2Fs","cmlhZ2U=","IGF0dGFja3M=","IGhvc3BpdGFs","MTY4","IGZlbWFsZQ==","IExvZ2lu","IEZsb3I=","IGNoYWlu","YXNoaW9u","VGV4dHVyZQ==","U2F2ZQ==","IGZhcm0=","LmNvbnRhaW5z","LlRlc3Q=","IGtub3dz","IGdlbmVyYWxseQ==","aXBlbGluZQ==","IG1lYW50","ZW5jaWE=","IG5pY2h0","IGNvbnRlbnRz","UE0=","Y2hlZHVsZQ==","KGxpbmU=","Q0c=","am9i","IFJlYWw=","dWVy","ZmlybQ==","INg=","ZXRybw==","ImAK","IHNwZWVjaA==","IHRocg==","Zm9yZWFjaA==","IHdhcm4=","CWw=","IGhlYXZ5","PGxp","TmU=","IGludmVzdGlnYXRpb24=","TWF0aA==","LXRpdGxl","IGNodXJjaA==","IGRlc3BpdGU=","Y2hhaW4=","IHdoYXRldmVy","YXJpYW4=","Zm4=","IG1ldGE=","fSkKCg==","VUZG","IHJlZ2FyZGluZw==","X1NVQ0NFU1M=","bWVz","IEludGVudA==","IHJlc29sdmU=","cG9zcw==","aXJh","Zm9yY2U=","b2ljZQ==","w6I=","IHBt","IHVwZGF0ZXM=","QXJy","INE=","dGVzdGluZw==","IHRvd2FyZA==","bnRheA==","64s=","IGxpc3Rlbg==","IGdvYWxz","SW5zdGFuY2VTdGF0ZQ==","RHI=","IHJhcmU=","IHRyYWls","S2V5cw==","Q2Fs","Q2Fy","IFBlb3BsZQ==","CWxvY2Fs","Y2xhc3Nlcw==","UmVmZXJlbmNl","LmZvckVhY2g=","ZW1i","YWN0aXY=","IHByaW0=","cmVkaWN0","IHJhZA==","5pWw","LkJhY2s=","IHNwcmVhZA==","IGNsb2Nr","IHZpcg==","ZWRpdG9y","IGVmZm9ydHM=","IGJyYW5jaA==","IGluZHVzdA==","IG1vdG9y","IGFtYg==","IGRhdGV0aW1l","IHJlbmNvbnQ=","IENocmlzdGlhbg==","IEFtZXJpY2Fucw==","ZnVsbA==","IGZtdA==","Lm1haW4=","IGNhdXNlZA==","X3VwZGF0ZQ==","IENvbnRlbnQ=","QVRDSA==","IGJhdGg=","IEVhY2g=","IHJhZGlv","YWNobWVudA==","dXp6","U3VibWl0","IHJlc3RyaWN0","YWJpbg==","IExvYWQ=","IGV4dGVuc2lvbg==","IGVzc2F5","IGhhdA==","YXZpb3Vy","dG9CZQ==","Ijpb","IG9mZmVyZWQ=","IHZpbGw=","KGRvdWJsZQ==","MTE5","5pel","YmM=","X2ZyZWU=","IE1pc3M=","IEJlcg==","IOg=","IExpa2U=","IGhlbHBlZA==","LmdldE5hbWU=","X0FM","IHNwaXJpdA==","IEFwYWNoZQ==","d3M=","IHRoZXJlZm9yZQ==","KHBhcmFtcw==","X2ltZw==","IHBlYWNl","IGluY29y","IEVYUEVDVA==","IG1pbm9y","aXBlcw==","CWRhdGE=","c2VsZWN0b3I=","Y2l0eQ==","dHJpZQ==","LmJhc2U=","X2ZyYW1l","IG9wZW5lZA==","L2pzb24=","TFk=","bnU=","LkRl","dGY=","bWFyZ2lu","LlBhcnNl","IHBp","IGVx","YmQ=","RmllbGRz","IFRyZWU=","IGJhbg==","aXN0YW4=","CiAgICAgICAgCg==","CWds","IHByb2R1Y2Vk","c3lzdGVt","TWFyaw==","X2hhc2g=","IGJn","IGNvbnN0aXQ=","IExlYWd1ZQ==","IG1pc3Npb24=","X2Zvcm1hdA==","KFsK","Y2x1c2lvbg==","ISI=","0Lc=","YnJlYWs=","CXN3aXRjaA==","IHRoZXI=","VHJhbnNmb3Jt","IGZvb3RiYWxs","LWxpbms=","cm91dGU=","LmF1dGg=","IGJhZw==","b3ZlcnM=","IGVuYWJsZWQ=","IHJhYw==","KEk=","Q1I=","YW5jaW5n","IG1hbmFnZWQ=","X3E=","TkdUSA==","IG1hYw==","IEF1dG8=","YW1lbnRl","ICcnLA==","LkFwcGVuZA==","IHBpbg==","Lml0ZW0=","YWNraW5n","IG9jY2Fz","cGVyc29u","IHRp","LlJlZw==","IGhhdmVu","IGdsYXNz","ICI8Lw==","IFNpbXBsZQ==","UHJpbnQ=","IHN1cnJvdW5k","Tk8=","44CCCg==","ICAgICAgICANCg==","IE1hbnk=","ICJf","IHdlZWtlbmQ=","IHNvbWV3","LnBhcmFtcw==","c21hbGw=","QVRFRA==","IHBsdWdpbg==","ZmllbGRz","IEluaXRpYWxpemU=","b29u","YXRpbGU=","eWU=","IHZvdXM=","TEFH","IG9sZGVy","IGdhbQ==","IGV4dHJlbWVseQ==","IGhldA==","ZW51bQ==","IFNFVA==","eGZm","IHRpbWVy","L2luZGV4","IGNyaXRpY2Fs","Um93cw==","X2FyZ3VtZW50","IGV4ZWN1dGU=","IHNob3dpbmc=","LnhtbA==","LWxpc3Q=","Um9sZQ==","dHlwZW5hbWU=","X21ldGhvZA==","dGhhdA==","Y2hlcg==","IOKG","WFQ=","IHRob3VzYW5kcw==","CW4=","IHJlc3A=","X3ByaWNl","b2x1dA==","QWc=","IFR3bw==","IGJlY29tZXM=","IGh1cw==","LlVzZQ==","dGhlbWU=","dXJi","IC8qCg==","ZXJpYWxpemU=","QVJO","IGxvc2U=","TG93ZXI=","IHZlbA==","IGRlZmVuc2U=","Y29uZGl0aW9u","IGJlcw==","IGRyeQ==","IHNjcm9sbA==","LlNob3c=","SUVM","0L7RgA==","IFJlc3Q=","V2hlcmU=","b29kcw==","IEplcw==","IHdpcmU=","X0lORk8=","IHN0cmluZ3M=","Z21lbnQ=","IG1hdGNoZXM=","IGVsZWN0cmlj","IGV4Y2VsbGVudA==","IENvdW5jaWw=","aWRhZGU=","IHd4","cHVzaA==","X2VudHJ5","IHRhc2tz","IHJpY2g=","c2E=","IFNtaXRo","VU5DVElPTg==","UG9pbnRlcg==","cGVjdGl2ZQ==","MTMx","IHdpZGdldA==","aXN0YQ==","IGFnZW5jeQ==","IHNpY2g=","b2xvZ2llcw==","IHRyaWFs","YWx5c2lz","LmNoZWNr","QVJL","IG9uQ2hhbmdl","YWJvdXQ=","Jywk","KHZhbA==","IHBsYWNlZA==","X05P","IGRhbg==","LmVxdWFs","CSAgICAg","IHdlYXRoZXI=","LmdhbWU=","IGRlc3RpbmF0aW9u","X1VTRVI=","aWVjZQ==","IHByb3ZpZGVy","Lmxhc3Q=","cGxleA==","Tm90ZQ==","L2pz","IHDDpQ==","IHBsYW5uaW5n","YXR0cmlidXRl","UFJP","YXRjaGVz","IDwt","IHNlZWluZw==","IGNhbmNlbA==","X2luZA==","LmtleXM=","IHZpc3VhbA==","IEN1cnJlbnQ=","IENvbGxlZ2U=","IFJvY2s=","IGFncmVlbWVudA==","IFN0b3Jl","b3Zpbmc=","IGNvcm5lcg==","YW1waW9ucw==","SVNF","Rmlu","IHByb3RlY3Rpb24=","IGZp","UGxheQ==","cGx1Z2lu","KX0=","LmZyYW1l","LXo=","IHRyYW5zaXRpb24=","aWdpbg==","IGNhbmRpZGF0ZQ==","IFVuaW9u","X3ZhbHVlcw==","KG1hcA==","Y2xl","IHRyZW5k","d2lkZQ==","YXJlbg==","TG9j","VVRI","IEJheQ==","IHNtYWxsZXI=","aXVz","MTQx","d2VsbA==","IGNyaW1pbmFs","IGNvbmZsaWM=","YmVydA==","X0lOVA==","IGludmVzdG1lbnQ=","Y3VzdG9t","IFNlc3Npb24=","X3dyaXRl","YW5pYQ==","IE1hc3M=","X0VR","X05PVA==","IHZpb2xlbmNl","QXJndW1lbnQ=","X2VtYWls","IGJlbG9uZw==","X2Z1bmN0aW9u","IGVuZW15","ZW1h","IEFkZHJlc3M=","LmVtcHR5","IGlubmVy","IENvbnRhY3Q=","TG9hZGVy","PGlucHV0","IENB","bG90","IHBpY3R1cmVz","IFN1cHBvcnQ=","X25hbWVz","MTg4","TGF5ZXI=","IENsaWNr","U3Vt","w6Y=","IExvb2s=","dW91cw==","TGli","RmxhZ3M=","dGVhbQ==","RVA=","MTg5","aGF0","b3ZlcnJpZGU=","YXBzZWQ=","IGxhYmVscw==","cXVpcw==","IFN0cmVhbQ==","X2RldmljZQ==","IENvbW1pdA==","KHJvb3Q=","In0=","LmlzRW1wdHk=","MTI2","CU0=","IGFuZ2xl","IEJlY2F1c2U=","JSUlJSUlJSU=","IGFpbQ==","IHN0aWNr","c3RtdA==","YWdyYXBo","YW5zd2Vy","IGNsaW4=","IElzbA==","LmV4dA==","IElOVA==","IHN0eWxlcw==","IGJvcm4=","IHNjcg==","IGV4cGFuZA==","IHJhaXNlZA==","VGV4dEJveA==","SUxM","LS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0t","SFRUUA==","MTMy","Pik=","X2NoYXI=","cmVzb3VyY2U=","IGVwaXNvZGU=","ICdf","IEVz","IEVhcnRo","wqDCoA==","VVBEQVRF","MTMz","IFNvdQ==","dWlz","dHlwZXM=","IG1hcw==","IGZhdg==","IGNvbnN0cnVjdA==","X3JhdGU=","ZXJhcw==","IHwK","cm9wZXJ0aWVz","IGV4dGVybmFs","IGFwcGxpZWQ=","IHByZWZpeA==","b3RlZA==","bGVycw==","IGNvbGQ=","IFNQ","IENodXJjaA==","IE91dHB1dA==","bG9zZWQ=","55o=","aWZpY2F0ZQ==","b3BlcmF0aW9u","aGVyaXQ=","eEZG","LmVudg==","X2Vycg==","b3No","RGlyZWN0aW9u","Q2FuY2Vs","IEZyYW5r","IGZpbmRpbmc=","LikKCg==","IHJvdXRlcg==","44O7","c2Vz","IGNyb3c=","PT0n","IHNhbmQ=","IHJpZA==","aXR1cmU=","IGVudHJl","IG9ic2Vydg==","IHZhYw==","8J8=","LVQ=","QXJ0","bmlnaHQ=","LnNlYXJjaA==","IGV4Y2hhbmdl","IGRpc3RyaWN0","Lm9z","IGRlcGFydG1lbnQ=","IGRvY3VtZW50cw==","IGNlbnR1cnk=","IE5leHQ=","SG9zdA==","IEtJTkQ=","IHN1c3A=","LVA=","cmVuZA==","LmVt","dWl0ZQ==","aXN0ZXJz","KGpzb24=","IEFubg==","d3Q=","YXRp","IEhUTUw=","d2hlbg==","RGlyZWN0b3J5","IHNodXQ=","PGE=","ZWR5","IGhlYWx0aHk=","IHRlbXBlcmF0dXJl","IEdlbg==","IG1ldGFs","IHN1Ym1pdA==","IERP","IGF0dHJhY3Q=","IHt9Owo=","IFdvcmQ=","IGxs","IHNlZW1lZA==","a28=","SUVE","IGxhYm9y","LkNvbnRleHQ=","IGFzc2V0","eW91","IGNhcnM=","IENvbHVtbg==","IHLDqQ==","IHNxdWFyZQ==","IE5TU3RyaW5n","4oCdLA==","YXBlcw==","Li4uCg==","IHRoYW5rcw==","KHByb3Bz","IHRpY2s=","IGV4cGVyaW1lbnQ=","IHByaXNvbg==","dHJlZQ==","LXRleHQ=","IElPRXhjZXB0aW9u","LXdpZHRo","X1NUQVRVUw==","ZmFzdA==","LWJvZHk=","LWhlYWRlcg==","IGd1YXI=","Y3JldGU=","IFRpbQ==","IGNsZWFybHk=","IFJlcHVibGljYW4=","IGp1c3RpZnk=","0LjRgg==","CSAgICA=","Y2FjaGU=","Oy8v","IHByZXNlbmNl","IGZhY3RvcnM=","IGVtcGxveWVl","XSkp","TWVtYmVy","IHNlbGVjdG9y","Ym9y","IE1leA==","55qE","dXRleA==","X3RhZw==","YWlsdXJl","IE5ldA==","IHJlbGk=","RUc=","IGZwcmludGY=","IHRlZW4=","bG9zcw==","IGxlYXZpbmc=","MTM0","RGVsZWdhdGU=","IGJlYXQ=","IG1pbnV0ZQ==","c3Vic2NyaWJl","IHJlZGlzdHJpYnV0ZQ==","Q29uc3RhbnRz","IGNhbmNlcg==","L3s=","Qkw=","IHNwYW4=","IENoaWxk","Q2VudGVy","IGVhcnRo","WVM=","IExldmVs","IHNlYQ==","LnN1cHBvcnQ=","LmlubmVy","Lkl0ZW0=","aWxsaW5n","ICAgIAogICAgCg==","IExhYmVs","MzIw","IEVzdA==","KGFyZw==","MTQ1","Ym9Cb3g=","CWZvcmVhY2g=","Y29z","RmFpbGVk","c3dlcnM=","RWRpdG9y","cm9udA==","IE1Q","ZXhwcg==","IExpZmU=","ID8/","w7Zy","IGF0dGVuZA==","IFF1ZQ==","IHNwZWNpZXM=","LUQ=","IGF1cw==","U3RydWN0","IGFkdmFudGFnZQ==","b3N0b24=","LWJsb2Nr","aW5pdGlhbA==","Q1JF","IHRydWx5","IGNvbXBhcmU=","b3JuZXk=","IHNwZWN0","RnVsbA==","YmVz","IHZpc2libGU=","IG1lc3M=","c3RhbmNlcw==","IGNsb3Vk","X3ZlcnNpb24=","IGZ1cm4=","aWNhZ28=","TE9X","IHRyYWZmaWM=","IGZvbA==","cnlwdG8=","IGRlY2xhcg==","IHNsb3Q=","IEV4dA==","IEVuZ2xhbmQ=","IFVuZGVy","IHRh","bGV0dGVy","MjAz","IG9mZmljZXI=","IERvbmFsZA==","WWVz","X2pzb24=","SVRhYmxlVmlldw==","IFVTRQ==","bXBsb3llZQ==","IG9waW5pb24=","IEF1dA==","Ym9yZGVy","IGFkdmljZQ==","IGF1dG9tYXRpY2FsbHk=","aXNjbw==","IG1t","LnZpcw==","YW1s","IGluaXRpYWxpemU=","ICh7","IDsKCg==","IGdlbmVyYXRpb24=","IGJpdHM=","Y2xpcHNl","IHVuZg==","dXRvcnM=","cGx0","IGRlbHRh","ZXN0cm95","aXNpcw==","PGJy","IGxpbWl0YXRpb25z","IGVuZGVk","IE1hZA==","aWxt","VGhlc2U=","MTg3","IE1pbmlzdGVy","IGNoYXJ0","RnJhZ21lbnQ=","IGluZGVwZW5kZW50","WWVhcg==","IGluc3Ry","IHRhZ3M=","QVZF","IEFyY2g=","c3RvcA==","UHJvZ3Jlc3M=","IG1p","IGxlYXJuZWQ=","R2U=","IGhvdGVs","MTUx","U00=","VFlQRQ==","IGN5","RVJTSU9O","dW5hdGVseQ==","bGltaXQ=","c2Vs","IG1vdmllcw==","IHN0ZWVs","b3o=","Z2I=","IENhbXA=","c2l0ZQ==","IExvZ2dlcg==","UExF","0L7QtA==","LnJpZ2h0","IENvcmU=","IG1peGVk","c3RlcA==","IHB1dHM=","c3VwZXI=","Um91dGVy","MTg2","Lkh0dHA=","MjIy","bHlwaA==","IENvbG9ycw==","IGFuZHJvaWR4","LnN0cg==","IGlubm92","IGRlY2s=","Jz4K","YXBlcnM=","XSg=","Y29udGludWU=","c3BlYw==","IFJvYWQ=","QVNI","aWxpYXI=","IGNvbnRpbnVlcw==","IGFwcG9pbnQ=","ICMK","IFZpcg==","ID8+Ig==","IGJpbg==","fSIs","Z29pbmc=","ZWFjaA==","QkQ=","MTg1","IEFjY2Vzcw==","RG9j","IE1hbmFnZW1lbnQ=","QkVS","YXNrZXQ=","LmdldEluc3RhbmNl","MTI5","IGVzdGFibGlzaGVk","c29ja2V0","SU5T","CXZpcnR1YWw=","CXJlc3VsdA==","UkVBRA==","X2hlaWdodA==","MTUy","IEZvbnQ=","ICgpOwo=","X2h0bWw=","IG5laWdoYm9y","bG9y","IGdhdGhlcg==","IH0pCgo=","IGlkZW50aXR5","IGZhYg==","cGFkZGluZw==","IFJvdXRl","RW51bWVyYWJsZQ==","w7Q=","IGZvcmNlZA==","L2pxdWVyeQ==","LgoKCgoKCg==","cmVzZW50cw==","X2xlZnQ=","LlBhcmFt","CXRocm93","IEhhbQ==","IGV2ZW50dWFsbHk=","YWNlcg==","cHVi","IHRyYQ==","dW5pcXVl","ZGVs","IEZsb3JpZGE=","IENsZWFu","eGE=","IMK3","IHZhbGlkYXRl","VmlzdWFs","RXhwcmVzc2lvbg==","X2Z1bmM=","bWVtYmVy","CWg=","dHJs","MTM2","CUc=","bmFwc2hvdA==","IFByb3BUeXBlcw==","dmlu","MTUz","XSkKCg==","b3ds","aWZpZXM=","ICQoJy4=","IENvbnRleHQ=","IFRvYXN0","LktleQ==","IG9mZmljZXJz","L24=","c24=","dW5kZWZpbmVk","Lml0ZW1z","dXRvdw==","YW1hZ2U=","IGFjY291bnRz","b29raWU=","U2VjdGlvbg==","aWNpYW5z","IGFkdmlz","KGlz","Wzos","IEZyYW5jZQ==","RnVuYw==","aWNpb3Vz","IHRvaw==","Q2hhbm5lbA==","IEFE","X05VTQ==","IHRpbWVvdXQ=","bGVtbWE=","cmVtZQ==","dWo=","LkFs","dWNsZWFy","KG9z","KCI8","Wwo=","ZmV0Y2g=","IGJhbA==","IGd1aWQ=","LWFsaWdu","IFdyaXRl","IE9uY2U=","dXRvd2lyZWQ=","T0RVTEU=","IHBpdGNo","Q0Y=","Ynl0ZXM=","IENvbW1pc3Npb24=","IGluY3JlZA==","UEVS","X3Jlc3BvbnNl","IExvcw==","cGFyc2Vy","IGFzc3VtZQ==","LlJlcXVlc3Q=","IFRva2Vu","X3Bvc2l0aW9u","IG5vbQ==","LXRlcm0=","IHJlbWFpbmluZw==","aW9zdHJlYW0=","IHBpZWNlcw==","YXB5","IExlc3M=","cmFuZ2U=","dW1ibg==","cHJpc2U=","X29wdGlvbg==","MjMw","SW1wbA==","a3dhcmdz","IGJ1c2luZXNzZXM=","QWxlcnQ=","IHBhcnRpZXM=","IENvbnRhaW5lcg==","IFByaXZhdGU=","IFBsYW4=","IHJlZ2lzdGVyZWQ=","IGpvdXI=","YWNrZXI=","0LXQvdC4","Lz4=","Y2hhdA==","c2VjdA==","IGNyZWF0aW9u","b2x1dGVseQ==","IGluc3RhbnQ=","IGRlbGl2ZXJ5","aWNrZW4=","eWVz","MTYz","IEZyYW5j","Ymxpbmc=","ZW5kYQ==","Wyg=","X3Jhbmdl","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA==","IHNjaGVkdWxl","Q29ubg==","IHRoYW5r","eGQ=","IGhvb2s=","IGRvY3VtZW50YXRpb24=","UGFyYW1ldGVycw==","SGVsbG8=","dnQ=","IGFydGljbGVz","IHdlc3Q=","ZGVmaW5lZA==","LnNlbGVjdA==","b2tlbnM=","IFZBTA==","LmZpbGU=","cmVzZXQ=","IG15cw==","IE1B","XSks","IGNpdGllcw==","cmVsYXRlZA==","5Zs=","IGFwcGVhcmVk","IHdpZA==","LnBhbmVs","IElucw==","LmVudGl0eQ==","IGRlY3Jl","IExvdQ==","KHRpbWU=","IFRoYW5r","LmNyZWF0ZUVsZW1lbnQ=","IG1lbnRpb25lZA==","b3VuY2U=","IFRyeQ==","IFdhbGw=","L2ltYWdlcw==","IE1lbnU=","Jw0K","IEVy","IGNyaXRpYw==","IFllYXI=","KHBhcmFt","IGZsbw==","Tk4=","b290ZXI=","IF07Cg==","IEFmZg==","ImdpdGh1Yg==","cm9vbXM=","IGh5cA==","Z2xvYmFs","IGF2ZWM=","5pyI","IGNvbXBsZXRpb24=","IGNvbmQ=","b255bW91cw==","KHRlbXA=","IHN0YXJz","IHJlbGV2YW50","IGNvdmVyZWQ=","IGVsaW0=","X3R5cGVz","KGJvb2w=","IHR1","X2V4aXN0cw==","IHNlY3VyZQ==","IHN0b3JlZA==","XS8=","eEY=","IENvbnRyb2xsZXI=","IG1pZ3I=","TUk=","IERlbg==","IGFubnVhbA==","VUlM","LWFuZA==","IGNyaW1l","YmVs","IGtpdGNoZW4=","QGc=","X3Bo","b3VybmFtZW50","IFNvY2lhbA==","IFNwZWNpYWw=","bG9nZ2Vy","IHRhaWw=","IHVua25vd24=","ZGVk","IGFwcHJlYw==","KGRi","Y2Y=","MTU1","IGFzc2lnbg==","LW91dA==","IE1vbnQ=","ZHA=","d2lkZ2V0","IHN0b25l","LXByaW1hcnk=","LmdyaWQ=","UmVzdWx0cw==","YXp6","IGRhdWdodGVy","IGN1cnI=","MTc1","IGxpbg==","IHNvdXRo","Zm9ybXM=","IE9VVA==","bGV0dGU=","YWtz","aWd1cmU=","IEVV","dmFyaWFibGU=","IGJyaWVm","IFNjb3R0","IGNvbmZlcmVuY2U=","YW5kYQ==","X2xvY2s=","b3JhbA==","IGVpbmU=","T1JT","Ly8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLw==","ZXNzbw==","IHJpcw==","IGdlbmRlcg==","ZXN0aWM=","TGljZW5zZQ==","KG91dA==","IG1z","U2Vl","IHdpbGxpbmc=","YXpl","IHNwb3J0cw==","IHllcw==","bHU=","IHB1cnM=","L2phdmFzY3JpcHQ=","LXBybw==","bmF2YmFy","X3Byb2R1Y3Q=","L2Jvb3RzdHJhcA==","IGRyaXZpbmc=","IMQ=","IHByb3Bvcw==","dWx0aXA=","dXBsaWM=","LmVtYWls","IGFwcHJveA==","KGNs","IHdlYXI=","IHJlcGx5","YXNzZXQ=","IGljZQ==","IHR4","a3I=","IEdlcm1hbnk=","IEdlb3JnZQ==","IGNi","CWVycg==","TW92ZQ==","IHBvbHk=","dm9pY2U=","fSI=","IGFuaW1hbA==","QXY=","IExvY2F0aW9u","IG5hdGl2ZQ==","XVsi","PGRvdWJsZQ==","IG1haXM=","LGludA==","IHByZXBhcg==","IGludGVydmFs","cGxlbWVudGF0aW9u","X0VSUg==","IGJ1Zw==","PiI=","c3RhdA==","IH0sDQo=","PHNwYW4=","IGZhaXRo","IHJvbQ==","cHJldg==","IEVsZWN0","RmluZA==","IGdvZA==","b3Rvcg==","Ly8tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0t","b3JpZ2luYWw=","Q3Bw","IFNlbmF0ZQ==","IHBvc2l0aW9ucw==","IHdlYXBvbnM=","IGNvZmY=","IHB1cnBvc2Vz","cG9s","IGltcHJlc3M=","IGFuaW1hbHM=","LkVudGl0eQ==","KG5w","IG11cmRlcg==","IGBg","ZmxhZw==","IHNvbHV0aW9ucw==","IEFjdGl2ZQ==","IGJyaWdodA==","LmRhdGU=","IHNpdHU=","77yI","LklE","IHNpZQ==","KSwNCg==","YWt0","U3BhY2U=","LmRhdA==","LmluZGV4T2Y=","aGFu","YXppbmU=","IFpl","IGNyYXNo","KC8=","Pj0=","0LE=","MTM5","aXZh","LkF1dG9TaXpl","IExhdA==","X2V4dA==","SW5pdGlhbGl6ZQ==","LnJlZ2lzdGVy","MTU2","T1BZ","IHJldmVyc2U=","X2Rpcw==","J11b","IHByb21wdA==","b250bw==","IEpvdXJuYWw=","cm91dGVy","IG15c3FsaQ==","I2Vsc2U=","KSI=","LXhz","bGV0cw==","cGhhbg==","LkxF","MTM3","V2lsbA==","IGFmZm9yZA==","IHNraWxs","LXRvZ2dsZQ==","TkM=","QmluZA==","VFM=","SnVzdA==","aXRlcmFs","WVA=","CXVuc2lnbmVk","IHdpbmQ=","MTQ5","KSk6Cg==","IHdhcm5pbmc=","IFdhdGVy","IGRyYWZ0","IGNt","IHNhbQ==","IGhvbGRpbmc=","emlw","IFNjaWVuY2U=","IHN1cHBvc2Vk","R2Vu","IGRpZXQ=","PGg=","IFBhc3M=","dmk=","IGh1c2JhbmQ=","77+977+9","bm90ZQ==","IEFib3V0","IEluc3RpdHV0ZQ==","IGNsaW1hdGU=","LkZvcm1hdA==","IG51dA==","ZXN0ZWQ=","IGFwcGFyZW50","IGhvbGRz","Zmk=","bmV3cw==","Q00=","dmlkZW8=","Jzon","RElUSU9O","cGluZw==","IHNlbmlvcg==","d2E=","LS0+Cg==","X2RlZmF1bHQ=","IERhdGFiYXNl","cmVw","RVNT","bmVyZ3k=","LkZpbmQ=","X21hc2s=","IHJpc2U=","IGtlcm5lbA==","Ojok","LlE=","IG9mZmVyaW5n","ZGVjbA==","IENT","IGxpc3RlZA==","IG1vc3RseQ==","ZW5nZXI=","IGJsb2Nrcw==","b2xv","IGdvdmVybmluZw==","XEY=","IGNvbmNlbnQ=","LmdldFRleHQ=","IG1i","IG9jY3VycmVk","IGNoYW5naW5n","U2NlbmU=","X0NPREU=","QmVo","IlRoZQ==","IHRpbGU=","IEFzc29jaWF0aW9u","CVA=","YWx0eQ==","X2Fk","b2RpZXM=","aWF0ZWQ=","IHByZXBhcmVk","cG9zc2libGU=","IG1vcnQ=","VEVTVA==","MTQy","IGlnbm9yZQ==","IGNhbGM=","IHJz","IGFzc2VydEVxdWFscw==","IHN6","IFRISVM=","LiIK","IGNhbnZhcw==","amF2YQ==","IGR1dA==","VkFMSUQ=","LnNxbA==","LmlucHV0","IGF1eA==","U3Vw","IGFydGlzdA==","VmVj","X1RJTUU=","LnN0cmluZ2lmeQ==","ZXR3ZWVu","IENhdGVnb3J5","IFst","IERldkV4cHJlc3M=","IEp1bA==","IHJpbmc=","LmVk","WVk=","TGV0","VGV4dEZpZWxk","IGZsYXQ=","X3ByaW50","IE9USEVS","YWRpYW4=","IGNoZWNrZWQ=","ZWxl","QWxpZ24=","c3RhbmRpbmc=","IFtdLA==","IGxhYg==","dWNreQ==","IENocmlzdG1hcw==","KGltYWdl","Lm1vZHVsZQ==","IGxvdHM=","IHNsaWdodGx5","KGZpbmFs","ZXJnZQ==","6L8=","MTQ3","IFBvbGljZQ==","MTQz","IFJpZ2h0","IGF3YXJk","IE9T","IHt9Cgo=","IHB0cg==","b3Zlcw==","aWNhdGVk","0LXQvA==","IG1hbmFnZQ==","b2xpZGF5","QW1vdW50","b29sU3RyaXA=","dGJvZHk=","TmF2","d3JhcA==","QkI=","IHdhdGNoaW5n","YXJpb3M=","IG9wdGlvbmFs","X0s=","IExpY2Vuc2Vk","Lk1hcA==","VGltZXI=","IEFQ","IFJldg==","KG8=","LGM=","dW1pbg==","ZXRhaWxlZA==","IEh5","IGJsYW5r","YWdnZXI=","IFNlbGY=","KClb","Lm1ha2U=","ZWFybg==","Y2hhbm5lbA==","PHByZQ==","YmxlbQ==","X3Bhc3N3b3Jk","X3Nw","aWNpbmc=","ZXo=","IHRoZW9yeQ==","IFRlcg==","MTg0","LG4=","bG9nbw==","IEhUVFA=","KCkpKQ==","LmhhbmRsZQ==","PjsK","V29ybGQ=","IHB5dGhvbg==","IGxpZg==","IHRyYXY=","IGNvbnZlbg==","Y29tcGFueQ==","IENsdWI=","MTM4","VmVy","QnRu","IHpvbmU=","cHJvZHVjdHM=","IEVkdWM=","IHZlcmlmeQ==","IE1pbA==","b25v","XSk7Cgo=","RU5DRQ==","IHBhY2tldA==","IGNlcg==","IGVudW1lcg==","IHBhcnM=","Zm9ybWVk","IG9jY3Vw","dHJl","IGV4ZXJjaXNl","RGF5","X3N1bQ==","IGFza2luZw==","YXB0aW9u","IG9yZGVycw==","IHNwZW5kaW5n","IEVSUg==","LkRpcw==","IFV0aWw=","4oCcSQ==","XCc=","Pyk=","Lz4K","IGVtb3Q=","IGluZmx1ZW5jZQ==","IEFmcmljYQ==","YXR0ZXJz","2YU=","LnNlc3Npb24=","IGNoaWVm","CQkJCQkJCQkJCQk=","IHRvbQ==","Y2x1ZGVk","c2VyaWFs","X2hhbmRsZXI=","LlR5cGU=","YXBlZA==","IHBvbGljaWVz","LWV4","LXRy","Ymxhbms=","bWVyY2U=","IGNvdmVyYWdl","IHJj","X21hdHJpeA==","X2JveA==","IGNoYXJnZXM=","IEJvc3Rvbg==","UGU=","IGNpcmN1bQ==","IGZpbGxlZA==","MTQ4","IG5vcnRo","aWN0dXJlQm94","CXJlcw==","6K4=","IHRlcm1pbg==","IFvigKY=","SVJFQ1Q=","IGJlcg==","ICIuLi8uLi8=","cmV0Y2g=","LmNvZGU=","X2NvbA==","IEdvdmVybm1lbnQ=","IGFyZ3Y=","IExvcmQ=","YXNp","RXhlYw==","CWxldA==","dmVydGlz","IGRpc2N1c3Npb24=","ZW5hbmNl","b3V0dWJl","dHlwZW9m","IHNlcnZlZA==","IFB1dA==","CXg=","IHN3ZWV0","QmVmb3Jl","YXRlZ3k=","Lm9m","IE1hdGVyaWFs","U29ydA==","T05U","aWdpdGFs","V2h5","IHN1c3Q=","IOc=","YWJldA==","IHNlZ21lbnQ=","IFtdLAo=","IE11c2xpbQ==","IGZpbmRWaWV3QnlJZA==","Y3V0","X1RFWFQ=","IE1hcnk=","IGxvdmVk","IGxpZQ==","IEpP","IGlzc2V0","bW9udGg=","IHByaW1l","dGk=","IENhcm9s","VXNl","MTQ2","IFBvcA==","IFNhdmU=","SW50ZXJ2YWw=","ZXhlY3V0ZQ==","ZHk=","IElyYW4=","X2NvbnQ=","CVQ=","IHBoYXNl","Y2hlY2tib3g=","d2Vlaw==","IGhpZGU=","IHRpbA==","IGp1","Q3VzdG9t","YnVyZw==","L00=","VE9O","IHF1YW50","IHJ1Yg==","aXhlbHM=","IGluc3RhbGxlZA==","IGR1bXA=","IHByb3Blcmx5","KExpc3Q=","IGRlY2lkZQ==","YXBwbHk=","SGFz","IGtlZXBpbmc=","IGNpdGl6ZW5z","IGpvaW50","cG9vbA==","U29ja2V0","X29w","IHdlYXBvbg==","Z25vcmU=","IEV4ZWM=","b3R0ZW4=","IE1T","ICgt","IFJldmlldw==","IGV4YW1wbGVz","IHRpZ2h0","ISg=","RFA=","IE1lc3NhZ2VCb3g=","IHBob3RvZ3JhcGg=","MTY0","VVJJ","w6l0","bG93","IEdyYW5k","LnBlcnNpc3RlbmNl","IG1haW50YWlu","IG51bXM=","IHppcA==","aWFscw==","IEdldHM=","cGVn","IEJ1ZmZlcg==","fn5+fg==","cmFzdHJ1Y3R1cmU=","IFBM","dWVu","b2JieQ==","c2l6ZW9m","IHBpYw==","IHNlZWQ=","IGV4cGVyaWVuY2Vk","IG9kZA==","IGtpY2s=","IHByb2NlZHVyZQ==","YXZpZ2F0b3I=","LW9u","LGo=","IEFsdGhvdWdo","IHVzZXJJZA==","YWNjZXB0","Qmx1ZQ==","SUNvbG9y","bGF5ZXI=","YXZhaWxhYmxl","IGVuZHM=","LnRhYmxl","IGRhdGFzZXQ=","YnVz","IGV4cGxhaW4=","KHBybw==","IENvbW1pdHRlZQ==","IG5vdGVk","XToK","RGlt","c3RkaW8=","MTU0","LiIsCg==","X3NvdXJjZQ==","MTgx","IFdlZWs=","IEVkZ2U=","IG9wZXJhdGluZw==","IGVzdGU=","aXBs","MzMw","YWdpbmF0aW9u","IHByb2NlZWQ=","IGFuaW1hdGlvbg==","Lk1vZGVscw==","IFdhdGNo","aWF0","IG9wcG9u","L0E=","UmVwb3J0","IHNvdW5kcw==","X2J1Zg==","SUVMRA==","IGJ1bmQ=","CWdldA==","LnBy","KHRtcA==","IGtpZA==","PgoKCg==","IHlhbmc=","Tm90Rm91bmQ=","0YY=","bWF0aA==","QGdtYWls","IExJTUlU","cmVkaWVudHM=","IHZlbnQ=","YXZpZ2F0ZQ==","TG9vaw==","IHJlbGlnaW91cw==","IHJhbmQ=","cmlv","KEdM","X2lw","dWFu","aWNpZW5jeQ==","IENoYW5nZQ==","Pg0KDQo=","IEVudGl0eQ==","IHJlbmNvbnRyZQ==","IFJldA==","cGxhbg==","w6lu","Qk9PTA==","dXJpZXM=","dHJhaW4=","RGVmaW5pdGlvbg==","PT09PT09PT09PT09","eno=","NDUw","QW5pbWF0aW9u","IE9L","X21lbnU=","LmJs","X3Njb3Jl","IGFjYWQ=","KFN5c3RlbQ==","IHJlZnJlc2g=","Jz0+JA==","LkdyYXBoaWNz","YW1lbnRv","cGlk","dGM=","IHRpcHM=","IGhvbWVz","IGZ1ZWw=","4pY=","X2hlbHBlcg==","ICANCg==","IFJvb20=","LkNsb3Nl","X2F0dHI=","IE1vdW50","IEV2","YXJzZXI=","X3RvcA==","ZWFo","IERlbGV0ZQ==","44CN","dWtl","IHVzYWdl","YXJpYQ==","X2Rldg==","IHRleHR1cmU=","IGNvbnZlcnNhdGlvbg==","ZXBlcg==","QmVhbg==","ZG9uZQ==","bm9uYXRvbWlj","IFNlY29uZA==","IHNob290aW5n","X3ByZQ==","Q29tcG9uZW50cw==","IF0KCg==","X18s","c3RpdHV0aW9u","LkNoYXI=","PigpOwoK","IHByZXNlbnRlZA==","IHdh","b2tlcg==","LQoK","aW5lcg==","IGJlY29taW5n","IGluY2lkZW50","QXR0","MTYy","IHJldmVhbGVk","Zm9yYw==","IGJvb3Q=","LnBhZ2U=","RW51bWVyYXRvcg==","MTY1","Xy0+","UGhvdG8=","IHNwcmluZw==","LiIs","IERpY3Rpb25hcnk=","QkpFQ1Q=","IGxvY2F0aW9ucw==","IHNhbXBsZXM=","SW5wdXRTdHJlYW0=","IEJyb3du","IHN0YXRz","cXVhbGl0eQ==","0YU=","LWRpcw==","IGhlbHBpbmc=","IHBlZA==","MjI0","KHNl","IFdobw==","YWxpYW4=","aW50ZXJuYWw=","IGZ0","PigpLg==","LT57","IG1pbmU=","IHNlY3Rvcg==","IGdybw==","IG9wcG9ydHVuaXRpZXM=","IMO8","IG1w","IGFsbGVnZWQ=","IGRvdWJ0","TW91c2U=","QWJvdXQ=","X3BhcnQ=","IGNoYWly","IHN0b3BwZWQ=","MTYx","bG9vcA==","ZW50aXRpZXM=","IGFwcHM=","YW5zaW9u","IG1lbnRhbA==","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA=","RlI=","IGRlZmVuZA==","Y2FyZQ==","IGlkZWFs","L2FwaQ==","dXJmYWNl","MDEx","IGVsZQ==","dWxhdG9y","IFJpZ2h0cw==","YW5ndWFnZXM=","IGZ1bmRz","IGFkYXB0","QXR0cmlidXRlcw==","IGRlcGxveQ==","b3B0cw==","IHZhbGlkYXRpb24=","IGNvbmNlcm5z","dWNl","Lm51bQ==","dWx0dXJl","aWxh","IGN1cA==","IHB1cmU=","LkZvcmU=","MTgz","IEhhc2hNYXA=","LnZhbHVlT2Y=","YXNt","TU8=","IGNz","IHN0b3Jlcw==","ICoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKg==","IGNvbW11bmljYXRpb24=","bWVt","LkV2ZW50SGFuZGxlcg==","LlN0YXR1cw==","X3JpZ2h0","LnNldE9u","U2hlZXQ=","IGlkZW50aWZ5","ZW5lcmF0ZWQ=","b3JkZXJlZA==","ICJb","IHN3ZQ==","Q29uZGl0aW9u","IEFjY29yZGluZw==","IHByZXBhcmU=","IHJvYg==","UG9vbA==","IHNwb3J0","cnY=","IFJvdXRlcg==","IGFsdGVybmF0aXZl","KFtd","IENoaWNhZ28=","aXBoZXI=","aXNjaGU=","IERpcmVjdG9y","a2w=","IFdpbA==","a2V5cw==","IG15c3Fs","IHdlbGNvbWU=","a2luZw==","IE1hbmFnZXI=","IGNhdWdodA==","KX0K","U2NvcmU=","X1BS","IHN1cnZleQ==","aGFi","SGVhZGVycw==","QURFUg==","IGRlY29y","IHR1cm5z","IHJhZGl1cw==","ZXJydXB0","Q29y","IG1lbA==","IGludHI=","KHE=","IEFD","YW1vcw==","TUFY","IEdyaWQ=","IEplc3Vz","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg","LkRF","IHRz","IGxpbmtlZA==","ZnJlZQ==","IFF0","IC8qKg0K","IGZhc3Rlcg==","Y3Ry","X0o=","RFQ=","LkNoZWNr","IGNvbWJpbmF0aW9u","IGludGVuZGVk","LXRoZQ==","LXR5cGU=","MTgy","ZWN0b3Jz","YW1p","dXRpbmc=","IHVtYQ==","WE1M","VUNU","QXA=","IFJhbmRvbQ==","IHJhbg==","LnNvcnQ=","IHNvcnRlZA==","LlVu","NDAx","X1BFUg==","aXRvcnk=","IHByaW9yaXR5","IEdhbA==","IE9sZA==","aG90","IERpc3BsYXk=","KHN1Yg==","X1RI","X1k=","IENhcmU=","bG9hZGluZw==","S2luZA==","X2hhbmRsZQ==","LCw=","cmFzZQ==","X3JlcGxhY2U=","LmFkZEV2ZW50TGlzdGVuZXI=","IFJU","MTcy","IGVudGVyZWQ=","Z2Vycw==","IGljaA==","KHN0YXJ0","MjA1","L2FwcA==","IGJyb3RoZXI=","TWVtb3J5","T3V0bGV0","IHV0Zg==","cHJlYw==","IG5hdmlnYXRpb24=","T1JL","IGRzdA==","RGV0YWls","IGF1ZGllbmNl","IGR1cg==","IGNsdXN0ZXI=","dW5jaGVk","IF0s","IGNvbWZvcnRhYmxl","LnZhbHVlcw==","IFRvdGFs","IHNuYXA=","IHN0YW5kYXJkcw==","IHBlcmZvcm1lZA==","aGFuZA==","KCJA","5a0=","IHBoaWw=","aWJy","dHJpbQ==","IGZvcmdldA==","MTU3","IGRvY3Rvcg==","LlRleHRCb3g=","Mzc3","aWNvbnM=","LHM=","IE9w","U20=","U3RvcA==","CUxpc3Q=","CXU=","Q29tbWVudA==","X1ZFUlNJT04=","Llh0cmE=","UGVyc29u","cmI=","TE9C","ICAgICAgICAgICAgICAgICAgICAK","IENlbnRyYWw=","Mjcw","SUNL","cmFx","IHB1dHRpbmc=","IG1k","IExvdmU=","UHJvZ3JhbQ==","Qm9yZGVy","b29y","IGFsbG93aW5n","YWZ0ZXI=","IGVudHJpZXM=","IE1heWJl","XSku","IFNob3J0","KVw=","Lm5vdw==","ZnJpZW5k","IHByZWZlcg==","IEdQSU8=","b3Npcw==","IEdhbWVPYmplY3Q=","IHNraXA=","IGNvbXBldGl0aW9u","X21hdGNo","bGljYXRpb25z","X0NPTlQ=","Lmdyb3VwQm94","IGFscw==","NjY2","Ildl","X2Vx","bGFu","X3NlYXJjaA==","IE11c2lj","YXNpcw==","IGJpbmQ=","IElzbGFuZA==","cnVt","KEU=","IHNlYXQ=","VmlkZW8=","IGFjaw==","cmVlaw==","PXsoKQ==","IHJhdGluZw==","IHJlc3RhdXJhbnQ=","NDU2","REVY","KGJ1Zg==","cHBpbmc=","dWFsaXR5","IGxlYWd1ZQ==","MTc2","IGZvY3VzZWQ=","YXBvbg==","JGRhdGE=","Q0xVRA==","Q0xVRElORw==","IGFic29sdXRl","KHF1ZXJ5","IHRlbGxz","QW5n","IGNvbW11bml0aWVz","IGhvbmVzdA==","b2tpbmc=","IGFwYXJ0","YXJpdHk=","LyQ=","X21vZHVsZQ==","IEVuYw==","LmFu","LkNvbmZpZw==","Q3Jl","IHNob2Nr","IEFyYWI=","SUVOVA==","L3Jl","IHJldHJpZQ==","eWNsZXI=","aXNh","IE9yZ2Fu","LmdyYXBo","IO0=","IEJBUw==","RW51bQ==","IHBvc3NpYmx5","0YDQsNA=","IEphcGFuZXNl","IGNyYWZ0","IFBsYWNl","IHRhbGVudA==","IGZ1bmRpbmc=","IGNvbmZpcm1lZA==","IGN5Y2xl","L3g=","R0U=","IGhlYXJpbmc=","IHBsYW50cw==","IG1vdXRo","cGFnZXM=","b3JpYQ==","IFJlbW92ZQ==","X3RvdGFs","IG9k","b2xsYXBzZQ==","ZG9vcg==","IGJvdWdodA==","IGFkZHI=","QVJDSA==","X2RpbQ==","ZGRlbg==","IGRlY2FkZXM=","UkVRVUVTVA==","IHZlcnNpb25z","ZmlyZQ==","MDA2","IG1vdmVz","ZmI=","IGNvZmZlZQ==","LmNvbm5lY3Q=","IFJvdw==","IHNjaGVtYQ==","U2NvcGU=","LVR5cGU=","IGZpZ2h0aW5n","IHJldGFpbA==","IG1vZGlmaWVk","VEY=","RmlsZXM=","bmll","X2NvbW1hbmQ=","c3RvbmU=","INGC","X3RocmVhZA==","IGJvbmQ=","IERldmVsb3BtZW50","IHB0","Rk9STQ==","cGxldA==","IGlkZW50aWZpZWQ=","Y3Bw","MjA2","MjI1","IGNvZGluZw==","b2tlZA==","IE1hc3Rlcg==","SURUSA==","IHJlc2lkZW50cw==","cmVkaXQ=","IFBob3Rv","PS0=","dW50ZQ==","YXRldXI=","MTU5","X1NUQVRF","IFNpbmc=","IHNoZWV0","LnZhbA==","b3JzZQ==","IGhlcnM=","IGRldGVybWluZWQ=","Q29tbW9u","IHdlZA==","X3F1ZXVl","UEg=","IEF0bA==","Y3JlZA==","L0xJQ0VOU0U=","IG1lcw==","IGFkdmFuY2Vk","LmphdmE=","LlNo","R28=","a2lsbA==","ZnA=","X3NldHRpbmdz","IHBhbA==","IHRydWNr","IGNvbWJpbmVk","ICIkew==","IENvcnBvcg==","IGpvaW5lZA==","IEpvc2U=","IEN1cA==","dW5z","ZXN0aXZhbA==","bGV2aXNpb24=","IGJyb2tlbg==","IG1hcnJpYWdl","IFdlc3Rlcm4=","IHJlcHJlc2VudHM=","IFRpdGxl","IHNz","LkFzcw==","b25nb29zZQ==","aWVudG8=","PD4oKTsK","IGFic29sdXRlbHk=","IHNtb290aA==","VEVSTg==","IFVubGVzcw==","V29yZA==","IG1lcmdl","aWdhbg==","IFZvbA==","IG5u","LmdldElk","INC3","MTcx","IHNleHk=","IHNlZWtpbmc=","U2luZ2xl","LnRoaXM=","MTc5","IGtvbQ==","Ym91bmQ=","OyI=","IGZvbnRTaXpl","X2Rm","IGluanVyeQ==","KEg=","IGlzc3VlZA==","X0VORA==","OnNlbGY=","MDIw","IHBhdGNo","IGxlYXZlcw==","IGFkb3B0","RmlsZU5hbWU=","44CQ","IGV4ZWN1dGl2ZQ==","IEJ5dGU=","XSkpCg==","IG51","b3V0aW5n","Y2x1ZGluZw==","LVI=","Lm9wdGlvbnM=","IHN1YnN0YW50","YXZheA==","IEJVVA==","IHRlY2huaWNhbA==","IHR3aWNl","IG3DoXM=","IHVuaXZlcnM=","eXI=","IGRyYWc=","IERD","IHNlZA==","IGJvdA==","IFBhbA==","IEhhbGw=","Zm9yY2VtZW50","IGF1Y2g=","Lm1vZA==","bm90YXRpb24=","X2ZpbGVz","LmxpbmU=","X2ZsYWc=","W25hbWU=","IHJlc29sdXRpb24=","IGJvdHQ=","KCJb","ZW5kZQ==","KGFycg==","RnJlZQ==","KEAi","IERpc3RyaWN0","UEVD","Oi0=","UGlja2Vy","IEpv","ICAgICAK","IFJpdmVy","X3Jvd3M=","IGhlbHBmdWw=","IG1hc3NpdmU=","LS0tCg==","IG1lYXN1cmVz","MDA3","IFJ1bnRpbWU=","IHdvcnJ5","IFNwZWM=","CUQ=","44CR","ICl7Cg==","IHdvcnNl","KGZpbGVuYW1l","IGxheQ==","IG1hZ2lj","IFRoZWly","b3Vs","c3Ryb3k=","IFdoZXJl","Mjgw","IHN1ZGRlbg==","IGRlZmU=","IGJpbmRpbmc=","IGZsaWdodA==","IE9uSW5pdA==","IFdvbWVu","IFBvbGljeQ==","IGRydWdz","aXNoaW5n","KCcuLi8=","IE1lbA==","cGVhdA==","dG9y","IHByb3Bvc2Vk","IHN0YXRlZA==","X1JFUw==","IGVhc3Q=","MjEy","IENPTkRJVElPTg==","X2Rlc2M=","IHdpbm5pbmc=","Zm9saW8=","TWFwcGVy","IFBhbg==","IEFuZ2U=","LnNlcnZsZXQ=","IGNvcGllcw==","TE0=","IHZt","5Y0=","IGRpY3Rpb25hcnk=","U2Vn","MTc3","ZWxpbmVz","IFNlbmQ=","IGlyb24=","IEZvcnQ=","MTY2","LmRvbWFpbg==","IGRlYmF0ZQ==","Tm90TnVsbA==","ZXE=","YWNoZXI=","bGY=","CWZtdA==","IGxhd3k=","MTc4","xJ8=","IE1lbg==","IHRyaW0=","KE5VTEw=","ICEh","IHBhZA==","IGZvbGxvd3M=","Il1bIg==","cmVxdQ==","IEVw","LmdpdGh1Yg==","KGltZw==","ZXRv","KCdc","U2VydmljZXM=","dW1ibmFpbA==","X21haW4=","cGxldGVk","Zm9ydHVuYXRlbHk=","IHdpbmRvd3M=","IHBsYW5l","IENvbm5lY3Rpb24=","LmxvY2Fs","dWFyZA==","fVw=","PT0i","YW5kb24=","IFJveQ==","d2VzdA==","MTU4","aWdpbmFs","ZW1pZXM=","aXR6","Jyk6Cg==","IFBldGVy","IHRvdWdo","IHJlZHVjZWQ=","IGNhbGN1bGF0ZQ==","IHJhcGlk","Y3VzdG9tZXI=","IGVmZmljaWVudA==","IG1lZGl1bQ==","IGZlbGw=","LnJlZg==","IENhcw==","IGZlZWRiYWNr","U3BlZWQ=","KG91dHB1dA==","YWpl","IGNhdGVnb3JpZXM=","IGZlZQ==","fTs=","IGRlbGV0ZWQ=","cmVo","IHByb29m","RGVzYw==","QnVpbGQ=","IHNpZGVz","LkFycmF5TGlzdA==","LSU=","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA=","2LE=","Lm1hdGNo","0LvQuA==","IGZlZWxz","IGFjaGlldmU=","IGNsaW0=","X09O","IENE","IHRlYWNoZXI=","X2N1cnJlbnQ=","Ym4=","X1BM","aXN0aW5n","RW5hYmxl","R0VO","IHR2","IHNvY2s=","IHBsYXlz","IGRpc2NvdW50","IEtF","IERlYnVn","Rm9yZQ==","IElyYXE=","IGFwcGVhcmFuY2U=","TW9u","IHN0eWxlZA==","IEh1bWFu","aW90","IEhpc3Rvcnk=","IHNhYw==","IENvbGxlY3Rpb24=","IHJlY29tbWVuZGVk","LlNlbGVjdGVk","IG9yZ2FuaXphdGlvbnM=","IGRpc2NvdmVyZWQ=","Y29ob2w=","YWRhcw==","IFRob21hcw==","TWF5","IGNvbnNlcnY=","IGRvbWlu","IEZvbGxvdw==","IFNlY3Rpb24=","IFRoYW5rcw==","VXNlcm5hbWU=","IHJlY2lwZQ==","IHdvbmRlcmZ1bA==","LnNsZWVw","X2lm","CQoJCg==","b3Jubw==","IHJ1","X3RhcmdldA==","LiIi","4KY=","RXZlbnRBcmdz","IGlucHV0cw==","IGZpZg==","IHZpc2lvbg==","Y3k=","IFNlcmllcw==","KSgoKA==","IHRyYWRpbmc=","IG1hcmtlcg==","QmVnaW4=","IHR5cGljYWxseQ==","IGNhdXNlcw==","ZHJvcGRvd24=","X0RFQlVH","MjYw","IGRldGVjdA==","Y291bnRyeQ==","ISIpOwo=","CVI=","YXBweQ==","IGNyZWY=","KCc8","Ij0+","IExF","cmVhZGVy","IGFkbWluaXN0cg==","w7U=","dWNrZXQ=","IGZhc2hpb24=","LmNoYXI=","aXphcg==","IGRpc2FibGU=","IHN1Yw==","IExpdmU=","aXNzdWU=","IG1ldGFkYXRh","ZmxhZ3M=","IPCf","IGNvbW1pdHRlZA==","IHZh","IHJvdWdo","ICcnJwo=","IGhpZ2hsaWdodA==","X3ZhcnM=","Vk8=","IGVuY29kaW5n","LVo=","X3NpZ24=","JCgiIw==","IHJhaW4=","cmVhdGVzdA==","IEVORA==","U2VsZWN0aW9u","IGNhbmRpZGF0ZXM=","IHNhdg==","LkVtcHR5","IGRlY2lzaW9ucw==","IGNvbGxhYm9y","cmlkZ2U=","ZmVlZA==","cmVzc2lvbg==","IHBlcnNvbnM=","Vk0=","MDA4","ZWdh","X0JJVA==","QWNjb3JkaW5n","YWNrZWQ=","IGRvbGxhcnM=","X2xvc3M=","IENvc3Q=","fSIK","Tm90aWZpY2F0aW9u","IHByb3N0aXQ=","IGF1dGhvcml0eQ==","LnJlYw==","IHNwb2tlcw==","IFRvZGF5","aXN0YW50","IEhlYWQ=","4oCdLg==","ZXJ0YWlubWVudA==","Y2Vhbg==","Y3VsYXRl","IHZlbg==","SG93ZXZlcg==","X2Fycg==","IHRva2Vucw==","R3JhcGg=","IEp1ZA==","IFZpcmdpbg==","IFNlcmlhbA==","dW5uaW5n","TXV0YWJsZQ==","YWdlcnM=","LmNzdg==","IGRldmVsb3Bpbmc=","IGluc3RydWN0aW9ucw==","IHByb21pc2U=","IHJlcXVlc3RlZA==","X2VuY29kZQ==","LyI=","IEljb24=","dWlsdA==","LWRheQ==","IGludGVsbGlnZW5jZQ==","LklT","IE9ic2VydmFibGU=","IEhhcmQ=","Qm9vbA==","MjEx","aWRlbnRpYWw=","LkFuY2hvcg==","IHNlbGxpbmc=","Q0k=","QUdFUw==","dGxl","YnVy","VUZGRVI=","Ulk=","IGJpZ2dlcg==","IHJhdA==","IGZhbW91cw==","IHR5cGVuYW1l","IGV4cGxhaW5lZA==","fX0K","IG51Y2xlYXI=","LU4=","IGNyaXNpcw==","IEVudGVy","IGFuc3dlcnM=","LyR7","L3Bs","IHNlcXU=","X25leHQ=","bWFzaw==","IHN0YW5kaW5n","IHBsZW50eQ==","IENyb3Nz","CXJldA==","ZHJv","IENhc3Q=","MTY3","PXRydWU=","IENocmlz","aWNpbw==","IE1pa2U=","RGVjaW1hbA==","YWRkQ29tcG9uZW50","TGVu","IGNvY2s=","ICN7","VVJO","PHRy","IGF1dGhvcml0aWVz","UmVzb3VyY2Vz","LUg=","Qm90dG9t","MDEy","X3F1","cHV0ZXI=","ZXN0ZXJkYXk=","RGlzcGF0Y2g=","c2luY2U=","IGZhbWlsaWFy","LGk=","VkM=","IG1lbnQ=","LEM=","IGZyZWVkb20=","IHJvdXRlcw==","IEJ1eQ==","IGNvbW1hbmRz","IG1lc2g=","L0M=","IFNldHRpbmdz","LXN0eWxl","IHdpdG5lc3M=","IGNsZQ==","IHVuaW9u","ZWZhdWx0","YXJldA==","IHRob3VnaHRz","IC0tLS0=","X3Byb2Nlc3M=","X3Vz","aW5nbHk=","VUVT","VG91Y2g=","INC8","X29wZW4=","IFZlYw==","IHJld2FyZA==","LkNsaWNr","Lzo=","IG5pZQ==","Q2hhbmdlcw==","TW9udGg=","77yf","IGV4ZWN1dGlvbg==","IGJlYWNo","KEludGVnZXI=","CWE=","Lyc=","LkZvbnRTdHlsZQ==","IGFib3J0","IFNpbmdsZQ==","KGlzc2V0","IGRw","IH19PC8=","IE1h","MjE0","LlJvd3M=","IFBldA==","JSk=","cmFuZA==","6YA=","UnVsZQ==","IGhlbA==","MDIx","UklURQ==","IHF1aWV0","IHJhdGlv","IENPTkRJVElPTlM=","b3NvcGg=","IElM","IGFkdmVudA==","Y2Fw","Ozwv","IFVTQg==","RHJpdmVy","IG91cnM=","IEpvaG5zb24=","Lks=","X2RlbGV0ZQ==","LnE=","CXN0cg==","L2NvbW1vbg==","CXN0cmluZw==","IFBERg==","YWN0cw==","LkFjdGlvbg==","IFF1ZXJ5","LnJlc3BvbnNl","IEdpcmw=","IHByb2Nlc3Nlcw==","PEludGVnZXI=","aW1v","IGFkZHM=","IGVudGlyZWx5","IHdhc2g=","LyoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKg==","IGFuaW1hdGVk","IHByb2ZpdA==","ZW5jaW5n","L1M=","IFN5bQ==","IG1hbnVhbA==","RG93bmxvYWQ=","ICghJA==","IG1vdGlvbg==","d2VicGFjaw==","LWJvdHRvbQ==","IGdyYXR1aXQ=","UEc=","KDos","IGVyYQ==","IGhv","IEppbQ==","cXVpcg==","IEJBU0lT","w6Fu","REVS","IGV4cGVuc2l2ZQ==","X2Nv","Qm91bmRz","V2VsbA==","IERlbW9jcmF0aWM=","IOKGkg==","LlJlbQ==","X1NZ","bmFtZXM=","IFZp","IGlzaW5zdGFuY2U=","XCI+","ICo9","IFBT","IGRhbmdlcm91cw==","W3A=","T01F","T3RoZXI=","IFN0cmluZ0J1aWxkZXI=","UG9pbnRz","aGVhZGluZw==","IGN1cnJlbmN5","IHBlcmNlbnRhZ2U=","X0FQSQ==","IGNsYXNzaWM=","dGhlYWQ=","IE1P","RkU=","SWR4","YXdhaXQ=","IMOo","IGFjY2lkZW50","IHZhcmlhbnQ=","IG15c3Q=","IExhbmQ=","IEJyZQ==","IGhhcm0=","IEFjYw==","IGNoYXJnZWQ=","aW9uZXM=","VmlzaWJpbGl0eQ==","YXJyeQ==","IExhbmd1YWdl","IHdhbGtpbmc=","Ii4KCg==","aWZlcg==","IGxlYWRlcnNoaXA=","LkZyb20=","eW5hbQ==","IHRpbWVzdGFtcA==","aXB0","IEhhcw==","UkVGRVI=","IEl0cw==","IGxpc3RlbmVy","VVRF","MjEz","X2Rlc2NyaXB0aW9u","IGV4cGVyaWVuY2Vz","IGNyZWF0ZXM=","UlM=","Y2FydA==","YmxhY2s=","IGNob2ljZXM=","d2Fy","NzUw","ICcnJw==","IG9yZGVyZWQ=","IGV2ZW5pbmc=","IHBpbA==","IHR1bg==","IEJhZA==","KGFwcA==","cmFuZG9t","IGV4cGxpY2l0","IGFycml2ZWQ=","IGZseQ==","IGVjb25vbQ==","LW1haWw=","IGxpc3Rz","IGFyY2hpdGVjdA==","MjM0","IFBheQ==","IGRz","IFNvbA==","IHZlaGljbGVz","SHo=","LWNvbQ==","IGtpbmc=","X2VxdWFs","IEhlbHA=","IGFidXNl","NDgw","MTY5","LS07Cg==","IGV4dHI=","IGNoZW1pY2Fs","5L8=","IG9yaWVudA==","IGJyZWF0aA==","IFNwYWNl","KGVsZW1lbnQ=","d2FpdA==","REVE","aWdtYQ==","IGVudHI=","IHNvYg==","LW5hbWU=","IGFmZmVjdGVk","aWth","IGNvYWw=","X3dvcms=","IGh1bmRyZWRz","IHBvbGl0aWNz","c3ViamVjdA==","IGNvbnN1bWVy","QU5HRQ==","IHJlcGVhdGVk","U2VuZA==","ICNb","IHByb3RvY29s","IGxlYWRz","dXNldW0=","RXZlcnk=","ODA4","MTc0","SW1wb3J0","KGNvdW50","IGNoYWxsZW5nZXM=","IG5vdmVs","IGRlcGFydA==","Yml0cw==","LkN1cnJlbnQ=","IGAkew==","b3Rpbmc=","KFw=","IGNyZWF0aXZl","IGJ1ZmY=","IGludHJvZHVjZWQ=","dXNpYw==","bW9kdWxlcw==","QXJl","LWRvYw==","bGFuZ3VhZ2U=","X2NhY2hl","IHRvZA==","Pz48Lw==","b21ldGhpbmc=","IGh1bg==","5bo=","YXRlcnM=","SW50ZW50","IGltcGxlbWVudGVk","IENhc2U=","Q2hpbGRyZW4=","IG5vdGlmaWNhdGlvbg==","UmVuZGVyZXI=","V3JhcHBlcg==","T2JqZWN0cw==","dGw=","LkNvbnRhaW5z","UGx1Z2lu","LnJvdw==","IGZvcmc=","IHBlcm1pdA==","IHRhcmdldHM=","IElG","IHRpcA==","c2V4","IHN1cHBvcnRz","IGZvbGQ=","cGhvdG8=","fSwNCg==","IGdvb2dsZQ==","JCgnIw==","IHNoYXJpbmc=","IGdvb2Rz","dnM=","IERhbg==","UmF0ZQ==","IE1hcnRpbg==","IG1hbm5lcg==","bGll","LlRoZQ==","SW50ZXJuYWw=","IENPTlRS","TW9jaw==","UklHSFQ=","ICd7","IGNvbnRyb2xz","TWF0","IG1hbmQ=","IGV4dGVuZGVk","T2s=","IGVtYmVk","IHBsYW5ldA==","IE5vbg==","LWNo","KSIs","ZXBhcg==","IGJlbGlldmVk","IEVudmlyb25tZW50","IEZyaWVuZA==","LXJlcw==","IGhhbmRsaW5n","bmlj","LWxldmVs","c2NyaQ==","WG1s","QkU=","dW5nZW4=","IGFsdGVy","W2lkeA==","UG9w","Y2Ft","ICgoKA==","IHNoaXBwaW5n","IGJhdHRlcnk=","aWRkbGV3YXJl","TUM=","IGltcGw=","b3RhdGlvbg==","IExhYg==","PGZvcm0=","CW5hbWU=","IEdhbWVz","cmF5","RXh0cmE=","VHdv","KHBsYXllcg==","IExlcw==","wrA=","IGNoYXJzZXQ=","IGpvdXJuZXk=","ZXRpbmc=","5pg=","4pQ=","55So","IGRpbg==","IHBlcm1hbg==","IHNvbHZl","IGxhdW5jaGVk","IG5pbmU=","IHNlbmRpbmc=","IHRlbGxpbmc=","LnBhc3N3b3Jk","IE1hdHJpeA==","ZXJpYw==","IGdyYWI=","LnU=","IExpYnJhcnk=","IGRlYnQ=","SU5L","LmZpbmRWaWV3QnlJZA==","IGZyZXF1ZW5jeQ==","LmFk","X1RFU1Q=","IG5lZ290","IEFmcmljYW4=","c2VuZGVy","xaE=","R2xvYmFs","MTcz","IGV4cGVydHM=","KyspDQo=","IGRlcGVuZGluZw==","Z3JheQ==","IGp1ZGdl","IHNlbnRlbmNl","bG9zdXJl","QWM=","IHRyYWNl","RWRnZQ==","IGZyaWVuZGx5","IGNvbmNlcm5lZA==","YmxvZw==","IGNsYWltZWQ=","fSc=","aW50ZWdlcg==","X3RyZWU=","CWNvbnRpbnVl","eGk=","IGFjY2VwdGVk","X29uZQ==","IEVkdWNhdGlvbg==","dWJsaXNoZWQ=","Z29u","YXBwb2ludA==","b3V0cw==","IG1pbmluZw==","IHNvbmdz","IGhlcnNlbGY=","IGdyYW50ZWQ=","IHBhc3Npb24=","IExha2U=","IGxvYW4=","dWVudA==","Y2hhbnQ=","IGRldGFpbGVk","ZXhjZXB0","X2NtZA==","IEhF","UmVsYXRlZA==","enQ=","J30sCg==","IHNwZWNpZmljYWxseQ==","U3RhdGlj","IGNhcnJpZWQ=","QU5T","XCI6","Q3JlYXRlZA==","IGN1bA==","XS0=","X2FwaQ==","RlA=","IHNpdHRpbmc=","ICIiKQ==","CWdvdG8=","IEVxdQ==","IGFzc2F1bHQ=","a2lucw==","YW5jZXI=","b2dlbg==","IHZvdGVycw==","IFByb3Q=","RGVzY3JpcHRvcg==","44O8","LkFzc2VydA==","YnNpdGVz","b3N0ZXI=","LW1lbnU=","IGFybXM=","LkNsaWVudA==","LmJhY2tncm91bmQ=","YXZpdHk=","IHZ1bA==","X01BU0s=","IGhvdXNpbmc=","IGJlYXI=","X2l0ZXI=","cGlyZWQ=","IG1hcmtldHM=","IFN0dWRlbnQ=","IHRpY2tldA==","IG1pbGxpb25z","ZmxhdGVy","KT0=","IHJlY292ZXI=","IEZvcmNl","IEJvdGg=","IHZpY3RpbQ==","IERpc2M=","cmVwb3J0","IGZvdXJ0aA==","IEFzc2VtYmx5","L3VzZXI=","TnVsbE9y","dGV4dGFyZWE=","IGF0aA==","IChb","IGNoYW5uZWxz","IEp1c3RpY2U=","Y2hvaWNl","TE9CQUw=","ZXhlYw==","ZW1hbGU=","IGVsZW0=","X2xl","IHJlc3BvbnNpYmlsaXR5","IFR3","SUNBVElPTg==","IGVsc2VpZg==","IGZv","YXN0cw==","IHRyZWF0ZWQ=","c2Vu","IFZpY3Q=","c3VtZXI=","X0JBU0U=","IGFzdA==","Pnt7","IFJlc291cmNl","IFN0YW5kYXJk","IFByZW0=","dXBkYXRlZA==","aXZhbGVudA==","IGFzc2V0cw==","X3RlbXA=","IGludGVyZXN0cw==","IGhhcmR3YXJl","IFJvbQ==","IFNoYXJl","ICcnCg==","ICos","IFRha2U=","IEltYWdlcw==","X0NIRUNL","KHR5cGVvZg==","IEp1bg==","XDxe","IGxpcXU=","IHdvcnN0","eW1ib2xz","CQkJICAg","IGRyaXZlcnM=","IERvY3VtZW50","ZW5v","IFRlY2hub2xvZ3k=","IGFwcHJvdmVk","dW1wcw==","IHNub3c=","Zm9ybWFuY2U=","X0FTU0VSVA==","dWl0cw==","MjA3","2YY=","IGRpZmZlcmVuY2Vz","LlZpc2libGU=","CQkJDQo=","IFBz","X2ZldGNo","IHRvZG8=","LicsCg==","IHNlbA==","dXJlcnM=","aW52YWxpZA==","IHR3ZWV0","VkVM","IHJlc2VhcmNoZXJz","IHNwcmludGY=","IFJP","IHBlbA==","LlRyYW5z","IGlsbGVnYWw=","ZGlhbG9n","c21hcnR5","bGc=","X01JTg==","IGhlcm8=","ZmluYWw=","IHBw","Lkxl","IGNp","CVJU","IHN1Z2dlc3RlZA==","cGRm","YWNoaW5n","IFJv","IFByb3BlcnRpZXM=","IFNp","IGJ1eWluZw==","IG11","IGxhbmRz","aWZpZXJz","IEZJTEU=","Uk9VUA==","IGhvbGRlcg==","IFNvbg==","IHN5bXB0","LnJvdXRl","KT8=","IGFyZ2M=","IGZvcnQ=","IGNhc2lubw==","X2NhdGVnb3J5","IGZvcnVt","MjE1","cHJlZml4","YXB0dXJl","VHViZQ==","ZW1z","aW1pemU=","IG51ZQ==","YXVz","Y291cnNl","QVRPUg==","KCkpLA==","QWR2ZXJ0aXM=","SU5HUw==","IGFja25vdw==","IEtvcmVh","cGxpbmc=","IHdvcmtlcg==","UExJRUQ=","aGFs","IFJpY2hhcmQ=","RWxlbWVudHM=","CQkJIA==","c3Rhcg==","IHJlbGF0aW9uc2hpcHM=","IGNoZWFw","QUNI","IFhNTA==","LCY=","IExvdWlz","IHJpZGU=","X0ZBSUw=","IGNodW5r","W3M=","X09VVA==","IGNob3Nlbg==","X1s=","Lyg=","IEplZmY=","X3Ns","cHJpdg==","IENhbmFkaWFu","IHVuYWJsZQ==","X0ZMQUc=","IG5vcw==","aGlnaA==","IGxpZnQ=","ZnVu","KCl7","ZWxseQ==","eWNsZXJWaWV3","X2Fz","X0xJU1Q=","IHJhZGk=","LmdldFZhbHVl","MzA0","IEFuZ2VsZXM=","IFNwYW4=","X2luc3RhbmNl","aXRvcnM=","MjA4","IG1pZ3JhdGlvbg==","QUs=","T2g=","wq4=","LnNlbGVjdGVk","IEdU","IGFkdmFuY2U=","IFN0eWxl","LkRhdGFHcmlkVmlldw==","ZWN0aW9u","0Y4=","cGlv","cm9n","IHNob3BwaW5n","IFJlY3Q=","SWxsdW1pbmF0ZQ==","T1U=","CWFycmF5","IHN1YnN0YW50aWFs","IHByZWdu","IHByb21vdGU=","SUVX","LkxheW91dA==","IHNpZ25z","Ly4=","IGxldHRlcnM=","Qm9hcmQ=","Y3RybA==","Ilw=","IEpvbmVz","IHZlcnRleA==","IGph","IGFmZmlsaQ==","IHdlYWx0aA==","CWRlZmF1bHQ=","IHNpZ25pZmljYW50bHk=","IGVj","IHhz","YWN0dWFs","LnBlcg==","X3N0ZXA=","YW52YXM=","bWFj","IHRyYW5zbA==","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA=","SXRlcmF0b3I=","IG9jaA==","YWdub3N0aWM=","IER1cmluZw==","IERFRkFVTFQ=","IHRpbGw=","IHNpZ25hdHVyZQ==","IGJpcmQ=","IE9s","MzEw","IEly","SFM=","YXZhdGFy","RVNTQUdF","IGVsZXY=","IG10","IE5hdg==","IHJlbGF4","IHBsYXRl","SVRFTQ==","KGRhdGU=","Lm5vdA==","IGdyYWRl","IH0pLAo=","PyIKCg==","aWVuY2Vz","SGlnaA==","IERJUw==","MjMx","ZGlzYWJsZWQ=","UVVJ","IG5vaXNl","YXV4","IFVQ","ODg4","b3Nh","IHZvYw==","ICkp","b2NvbQ==","X09GRg==","IERi","TG9jaw==","LmVjbGlwc2U=","LGQ=","IERyYXc=","ICIo","IHZpc2l0ZWQ=","IOKI","IHN1Y2NlZWQ=","IGltcG9zc2libGU=","YWlyZQ==","IFR1cm4=","IGRpc2g=","Rkc=","IHNlbnNvcg==","QU5O","YWJh","IHN1cmc=","XSk7DQo=","IGZw","X2Fu","LUo=","LUc=","IEpvYg==","Q29udmVydA==","IEtFWQ==","IGF1dGhvcnM=","X3NlcnZlcg==","XHI=","IC0qLQ==","ZmxleA==","IHNvYw==","UmV0","IHNhbHQ=","IOKApgoK","IENsZWFy","KHBhZ2U=","LWRhbmdlcg==","IHJvb21z","Y29udg==","I3s=","Lm9w","IEFyZWE=","X1ND","aGVu","IGJlZ2lucw==","LXk=","IGV4Y2l0ZWQ=","IGlnbm9yZWQ=","IGJvbnVz","c3R1ZGVudA==","IE1lbWJlcg==","IHJlbGF0aXZlbHk=","IExvdw==","IFByb2R1","YXRld2F5","cG9zdXJl","IHRoaWNr","YW5pZWw=","KHZpZXc=","IENydXNo","RXh0ZW5zaW9u","SWw=","ZWVk","TE9D","Lmlt","Lkl0ZW1z","IGNvbmZsaWN0","LnByZXZlbnQ=","MjUy","IG9uQ3JlYXRl","dXY=","aXNlcg==","IHdhdmU=","TWFy","IENvbW11bml0eQ==","aWNoZQ==","IE5vdGhpbmc=","W20=","IExlZQ==","cmllbmRz","MjMy","w6hyZQ==","ISEh","YW56","LnJlc3VsdA==","IFNL","X1BBUkFN","IGRlbW9jcg==","QmFja0NvbG9y","LmV4aXN0cw==","Ikl0","KG9wdGlvbnM=","cmF6eQ==","YXNlcg==","XERhdGFiYXNl","YWxlbmRhcg==","X2Fzcw==","O30K","dmVydGV4","aW5lY3JhZnQ=","V2FybmluZw==","YXJnbw==","IGFjdG9y","IEluc3RlYWQ=","IFVzaW5n","U2VsZg==","QGludGVyZmFjZQ==","IHNwZWFraW5n","IFBhcmlz","IExJQ0VOU0U=","Lm5vZGU=","IEZvb2Q=","RUlG","IEJp","LlN0YXJ0","IElC","IHVuaXZlcnNpdHk=","MjU0","IEhlYWRlcg==","LnByb2R1Y3Q=","NDA5","Q29weQ==","ZXRj","cmljYWw=","ID4+Pg==","Ym9va3M=","IGFsZ29yaXRobQ==","ICdfXw==","KGphdmF4","IG51bWVyb3Vz","U2hhcmU=","SGF2ZQ==","IHJlY3J1","IHByb3Zl","LnN1YnN0cmluZw==","aGVhbHRo","0LXQuw==","IGRlY2ltYWw=","IGNvbW1pc3Npb24=","c2NyaXB0aW9u","eEM=","IHN1bW1hcnk=","YXR0ZWQ=","IGNsb3Nlcg==","ZmluaXNoZWQ=","KCkpewo=","IFdvb2Q=","MzAx","X2ZpZWxkcw==","a3U=","X2l0ZW1z","RmxhZw==","IGNvbmZpZGVuY2U=","IEZlZGVyYWw=","ZHV4","IGNvbXBhdA==","IHZlcnRpY2Fs","0Lk=","w6hz","OyI+Cg==","X21hbmFnZXI=","KCkpKQo=","SURF","OiIs","MjM1","X18K","IFdheQ==","MjIx","0Yg=","VGVtcA==","IFNUUg==","cml0dGVu","U3luYw==","IEFW","IENFTw==","IEd1aWQ=","IGVudmlyb25tZW50YWw=","IGNvcnJlc3BvbmRpbmc=","CWNvbnNvbGU=","IGp1c3RpY2U=","IEpT","IGxpdmVk","Z2Fy","IEdyYXBo","IFN0YXQ=","IGlQaG9uZQ==","LmFs","IEhE","IG9jY3Vy","IHRocmVzaG9sZA==","NTA5","IG9uY2xpY2s=","UkVH","LkdyYXBoaWNzVW5pdA==","TWV0YQ==","xb4=","IGN1bQ==","LmdudQ==","w6s=","IG9idGFpbmVk","IGNvbXBsYWludA==","IGVhdGluZw==","IHRhcg==","X3Rhc2s=","IG9wdHM=","MjE2","KHRv","UGFzcw==","IHBsYXN0aWM=","dGlsaXR5","IFdpbg==","LnByZXZlbnREZWZhdWx0","cGlsZQ==","IEdhcg==","IHF1YW50aXR5","X2xhc3Q=","IGdyZWF0ZXN0","RGFv","X0RJUw==","IFVzZWQ=","IEhQ","cml0aW5n","U0lPTg==","Ymx1ZQ==","ZG9tYWlu","IHNjb3Jlcw==","Tm9ybWFs","X2FkbWlu","IEFTU0VSVA==","VGhlbg==","Kioq","ZGlzdA==","bG9u","IGhhdGU=","c2hhbA==","SW1hZ2VWaWV3","ZGF0YWJhc2U=","IHBhbmQ=","IGxvZ2lj","PWZhbHNl","Ymc=","IENvbmZpZ3VyYXRpb24=","IG51cg==","T0c=","IG1hcnJpZWQ=","Ois=","IGRyb3BwZWQ=","MDQw","IHJlZ2lzdHJhdGlvbg==","0L7QvA==","dWx0aXBsZQ==","aXplcnM=","c2hhcGU=","LmNvcHk=","IHdlYXJpbmc=","IENhdGg=","IGRlZGljYXRlZA==","IC4uLgo=","IGFkdm9j","IEZhbWlseQ==","IHN0YXRlbWVudHM=","ZW1hdGlj","YW1waW9uc2hpcA==","IG1vdGl2","IEhhdmU=","IGJsb3c=","Sm9i","Y2VydA==","X3ZlY3Rvcg==","aW5zdGFsbA==","IENPUFk=","ZW1iZWQ=","RElS","IFNwcmluZw==","IGV4aGli","MjIz","Y2Ru","IENvbW1lbnQ=","IE9wdGlvbmFs","LnBsYXllcg==","IERhcms=","KHBvcw==","IFNob3VsZA==","IGNlbnRyZQ==","IEd1YXJk","w7N3","IHRyb3VibGU=","RU5FUg==","KHVuc2lnbmVk","X3NlcnZpY2U=","IG5z","dWxpbmc=","IE1leGljbw==","IE5Z","bXlzcWw=","IGxpYw==","5Zw=","TXI=","LWZs","IEN1c3RvbWVy","aWRp","ID8+Cgo=","cmlibGU=","INC/0YA=","IHNpemVz","X1NUUklORw==","dmFsaWRhdGlvbg==","IEpvbg==","KEh0dHA=","YWRkQ2xhc3M=","Tm9kZXM=","IGZyYWdtZW50","IHNwb2tl","IHdhc3Rl","Sm9pbg==","IGlsbHVzdHI=","ZWxp","Y2llbnQ=","IGFpZA==","IHByb3NlYw==","Jyl7Cg==","IHBhc3Npbmc=","IGZhY2Vz","U2hhcGU=","X1o=","aXRp","IGFsbGU=","IHJvYm90","ICAgICAgIAo=","IFNwZQ==","IHJlY2VpdmluZw==","IERldGFpbHM=","ICIp","bWc=","X1JFRg==","IGNvbXBhcmlzb24=","Kiw=","IEZvdW5k","X3Nlc3Npb24=","KFU=","L0Y=","IHh4eA==","TmV0d29yaw==","ZGVycw==","IGNhcHR1cmU=","IGNvcnJl","IEx0ZA==","IEFkdg==","W0A=","IGNsaXA=","TWlsbA==","IFByb2ZpbGU=","IGVuZGlm","IG9ibGln","ZGVzY3JpYmU=","LmVsZW1lbnQ=","cml0ZXJpb24=","TEQ=","ZXJlZA==","IGZhdm91cg==","c2NvcmU=","IEZpbHRlcg==","YXR0cmlidXRlcw==","IGNoZWNrcw==","SW5mbGF0ZXI=","IFBsdXM=","IHNjaWVudGlmaWM=","IHByaXZhY3k=","SGVhZA==","IGZlYXQ=","IGRlZ3JlZXM=","IFBhbGU=","OyI+","IGZpbG1z","IEF1ZGlv","IFRhZw==","IEVuZXJneQ==","aXRhcg==","cGFyYXRvcg==","IGZlbGxvdw==","IGV2dA==","IFRyaQ==","IERBTQ==","Y2xvdWQ=","IFBhc3N3b3Jk","IERlbW9jcmF0cw==","IEFjYWQ=","JGxhbmc=","IHJlYg==","KCkpCgo=","0L3Riw==","IEJ1cg==","cmVhZGNy","IGhleA==","MjA5","Q29uc29sZQ==","Y3Rs","b3VzZWw=","IFdpbGxpYW0=","IGF6","X1BPUlQ=","IHByYWN0aWNlcw==","IGFueXdoZXJl","IFBvc2l0aW9u","IC0+Cg==","aWFtcw==","LnVzZXJuYW1l","cGxhY2Vob2xkZXI=","IG9kZXI=","IFNlY3JldGFyeQ==","IGlU","bW9uZA==","ZXZlbnRz","P+KAnQ==","LlN1Yg==","IGF0dGFjaGVk","IG7Do28=","IGVzdGF0ZQ==","MzY1","LmFjdGlvbg==","IGZpZ3VyZXM=","IH0pOw0K","IHN1YnNjcmk=","LnRhZw==","bmFt","LnBsb3Q=","bm9vbg==","bGlhbWVudA==","Q2hhcmFjdGVy","LnRhYg==","IHdpbnRlcg==","IFZhcmlhYmxl","IHRyZWVz","IHByb3Vk","KFY=","X2xvYWQ=","IGhpZXI=","IEVjb24=","IGZk","IHZpY3RpbXM=","UmVzdA==","aWFuYQ==","IGZha2U=","LlByaW50bG4=","IHN0cmxlbg==","IHNhZA==","IGJsZQ==","UHJvdA==","IGJ1dHRvbnM=","IHRlbGV2aXNpb24=","IGxvZ28=","ZXh0ZW5zaW9u","CWo=","c3RlaW4=","YWNpb25lcw==","ICIiIgoK","IHNpbXA=","IHJlY29yZGVk","IGJyaW5ncw==","IHByaW5jaXBhbA==","IGZlZXM=","KHNvdXJjZQ==","a2Rpcg==","IHV0aWxz","IGNvcnJlY3RseQ==","Zmls","IHdlbA==","UGFpcg==","LWJ1dHRvbg==","c2NhbGU=","dmVyaWZ5","W2M=","IC0tLQ==","IGVzY2FwZQ==","aWtlcw==","TG93ZXJDYXNl","aWNpYW4=","IGNoYXB0ZXI=","IFRZUEU=","IHNoYWRvdw==","IGF3ZXNvbWU=","V0U=","ZWxpZg==","IGxhbWJkYQ==","IGRpc3RpbmN0","IGJhcmU=","LW9mZg==","IGNvbG91cg==","LmFwcGVuZENoaWxk","b2xlYw==","YWdh","LmZpbGw=","CXN1cGVy","IGFkag==","KHBvc2l0aW9u","LmdldEl0ZW0=","MjQy","U2hvcnQ=","IHRvdGFsbHk=","VkQ=","IFRyZQ==","X2Vw","dmVtZW50cw==","IFNvbHV0aW9u","IGZ1bmRhbWVudA==","Rm9sbG93","IGZhY2lsaXR5","IGhhcHBlbmluZw==","T0Y=","LnRleHRCb3g=","U3Bhbg==","IMKr","aWRlbg==","IGV4Y2VlZA==","KHBhcmVudA==","IGNw","57s=","IGhhc24=","IHByaQ==","IGNvbnNlcXU=","bmVu","IElOVE8=","SWdub3Jl","IEZ1dHVyZQ==","IGNhcmJvbg==","IFN0ZWVs","Zm10","b2tpZQ==","IHNwbA==","KHRpdGxl","LWluZm8=","IGRlYWxz","IGZpeHR1cmU=","ZWE=","RGl2","IHRlc3RlZA==","X3JldHVybg==","KQoKCgo=","dXBwb3J0ZWQ=","IENvb2s=","IHBheWluZw==","IElsbA==","IGFycmVzdGVk","IFByaW1l","X2NhbGxiYWNr","PiwK","ZHJpdmVy","T25jZQ==","YWJi","X2J5dGVz","IFNldHM=","KE9iamVjdA==","IGNj","IHNoZWxs","YWxv","KTsvLw==","KGxvZw==","MjY0","Y3RvcnM=","KTwv","IG5laWdoYm9yaG9vZA==","NDIw","YWlsYWJpbGl0eQ==","dm9s","IHlvdXRo","IHRlY2huaXF1ZXM=","IFNjaGVtYQ==","dWg=","bWVudGU=","IHJlcG9zaXRvcnk=","aW1t","IGNvb2tpZQ==","SlM=","b3ZpZXM=","Ons=","Q29tcGxldGU=","U2luY2U=","IGxhdWdo","X0JP","ZW5hYmxl","IERvZXM=","IFdhbGs=","d2hhdA==","a2Vz","IG11bHRpcA==","aW1lbnRz","ZXVy","IHZpY3Rvcnk=","R2VuZXJhdG9y","IE1vcw==","cm92ZXJz","IGNvbXB1dGU=","IHByb3ZpZGVycw==","IE1lZGlj","TFA=","X0NPTkZJRw==","IHZldGVy","c3RlcnM=","X3dpbmRvdw==","dW1lcmlj","CQkJCQkK","LlJlc3BvbnNl","IHJlcGxhY2Vk","LnJvb3Q=","LWZyZWU=","LWNvbnRhaW5lcg==","IG1hdGNoaW5n","IEVkaXRvcg==","PSR7","IFNhZg==","IHNpbmQ=","KGJ1ZmZlcg==","5Yc=","LmVkdQ==","KV07Cg==","IE5GTA==","YXlh","IGRvZ3M=","IGRlc2lyZQ==","IE1pZGRsZQ==","Q2FydA==","MzA2","VGhlbWU=","IG1vYg==","IGRpc3BsYXllZA==","aWdpdA==","IGFkdWx0cw==","IiIi","IGRlbGl2ZXJlZA==","dmlzaWJsZQ==","Ijp7Cg==","PDw8","IEdP","c2Nyb2xs","eEU=","IGFzc2lnbmVk","IEJvb2w=","IHdw","IGNvbWJhdA==","IEhhdw==","Li0=","IHN1cHBvcnRpbmc=","LkNvbnRlbnQ=","MzQ1","aXJjcmFmdA==","IHNwaW4=","IENS","Lm15","4KU=","dHBs","IHNwYWNlcw==","Pyw=","Mzg0","IFN5cmlh","IHBhdHRlcm5z","LWJveA==","IGZyYW1ld29yaw==","LyU=","KGxvbmc=","IHRlYWNoaW5n","QVJOSU5H","X2tleXM=","IHRhYmxlcw==","VU5D","aW5hdGlvbnM=","LXdlaWdodA==","cmFkaW8=","IFBhYw==","LnNlcnZlcg==","LkNoYXJGaWVsZA==","cmluZw==","IHF1b3Rl","YW5uYQ==","IHdlcmRlbg==","IGNyZWFt","IG1hY2hpbmVz","LWs=","Mzc1","IHN0aW0=","IFN0b2Nr","cmljaw==","IGltcG9ydGFuY2U=","cng=","w7Vlcw==","2Yg=","IHN0cm9rZQ==","YWdyYQ==","IHRhc3Rl","IERFQlVH","VGhhbmtz","IFJlcXVpcmVk","b3Zh","TWVkaWE=","IHNpxJk=","KGJhc2U=","cG9zdHM=","IGZpbGVOYW1l","Q2hlY2tlZA==","IGludGVycnVwdA==","ICgpCg==","cHl0aG9u","cGFpcg==","IGNpcmNsZQ==","IGluaXRp","X3N0cmVhbQ==","IGNvbXByZWg=","bGVhcm4=","UHVibGlj","IGh1bWFucw==","IGJyaW5naW5n","b2dyYXBoaWM=","X2xheWVy","LWxpa2U=","dXBwb3J0SW5pdGlhbGl6ZQ==","aWRlYmFy","IHZvdGVz","IGRlc2lyZWQ=","TWFzaw==","IHJlbGF0aW9u","Lkluc3RhbmNl","SGVscA==","IGluc3Bpcg==","IE1vbm8=","Vmlld01vZGVs","b21ldGltZXM=","IGJhY2tncm91bmRDb2xvcg==","IHJvdGF0aW9u","IG1hcmk=","L3Rlc3Q=","SU5TRVJU","U3Rhcg==","cGh5","SWRz","X0dFVA==","IGluY3JlYXNlcw==","X2Nsb3Nl","MjMz","X0ZPUk0=","IFvigKZdCgo=","YXph","VEVYVA==","IMOk","IFZhbg==","IGxpZ2h0cw==","IEd1aWRl","IGRhdGVz","LkNvbW1hbmQ=","YW1hbg==","IHBhdGhz","LmVkaXQ=","CWFkZA==","ZHg=","IHJlYWN0aW9u","IEJlYWNo","LmdldE1lc3NhZ2U=","RW52aXJvbm1lbnQ=","aW50ZXJlc3Q=","IG1pbmlzdGVy","IHJlYWRlcnM=","CUY=","IGRvbWVzdGlj","IGZpbGVk","Q2l0eQ==","IG1hcHBpbmc=","IERFUw==","IHJlcGFpcg==","dGljcw==","aXh0dXJl","IG5vbWJyZQ==","LklTdXBwb3J0SW5pdGlhbGl6ZQ==","em8=","LklzTnVsbE9y","IENhcm9saW5h","IERlcg==","IEVWRU5U","IGdlc3Q=","IGhpc3Q=","cmVzb3VyY2Vz","IG9ycGhhbg==","LkFyZQ==","IEludmVzdA==","UkVGRVJSRUQ=","LkxvZ2dlcg==","IFJvbWFu","IGN1bHR1cmFs","ZmVhdHVyZQ==","cHRz","YnQ=","IGRvdA==","IGRpYW0=","dXNwZW5k","X2FjY2Vzcw==","KCl7DQo=","IHN1cnByaXNl","YWJpbA==","IHZpcnQ=","IGJvbWI=","YXJvbg==","X0lT","IHZhc3Q=","UmVhbA==","ZXBlbmQ=","aWN0ZWQ=","IHBpY2tlZA==","IEZM","IFJlcHVibGljYW5z","Lnplcm9z","UHJlc3NlZA==","c3Vw","LkNvcmU=","TWljcm9zb2Z0","c2VydmljZXM=","YWdpYw==","aXZlbmVzcw==","IHBkZg==","IHJvbGVz","NDAz","cmFz","IGluZHVzdHJpYWw=","IGZhY2lsaXRpZXM=","MjQ1","6KE=","IG5p","IGJh","IGNscw==","CUI=","Q3VzdG9tZXI=","IGltYWdpbmU=","IGV4cG9ydHM=","T3V0cHV0U3RyZWFt","IG1hZA==","KGRl","KXsKCg==","IGZybw==","aHVz","IGNvbW1pdHRlZQ==","7J20","LHg=","IGRpdmlzaW9u","KGNsaWVudA==","KGphdmE=","b3B0aW9uYWw=","LkVxdWFs","IFBoeXM=","aW5ndQ==","MDMz","NzIw","IHN5bmM=","IE5h","fX08Lw==","T0xVTQ==","aXTDqQ==","IGlkZW50aWZpZXI=","b3dlZA==","IGV4dGVudA==","IGh1cg==","VkE=","Y2xhcg==","IGVkZ2Vz","Q3JpdGVyaWE=","IGluZGVlZA==","aW5oZXJpdA==","IE5pZ2h0","MzAy","IHJlcG9ydGluZw==","IGVuY291bnRlcg==","IGtpbmRz","X3ByZWQ=","IGNvbnNpZGVyaW5n","Lig=","IHByb3RlaW4=","VHlw","Z3JpY3VsdA==","IEJhbGw=","QENvbXBvbmVudA==","IEVzcw==","IFJ1Yg==","ODAy","dWxw","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA==","aXR1ZA==","LmF0dHI=","aWVudGU=","IHNwZWxs","IEpvZQ==","RU5URVI=","X2hvc3Q=","aXRhbg==","IG1hdHRlcnM=","IGVtZXJnZW5jeQ==","dWF0ZWQ=","IENoYXQ=","PXsn","Y29udHJp","YXJrZXI=","5oiQ","aXBlcg==","IHNjaGVtZQ==","KHN0ZGVycg==","ICoo","Y2VpdmVy","LmNvbHVtbg==","IG1hcmtlZA==","X0FUVFI=","IGJvZGllcw==","IElNUExJRUQ=","R2Fw","IFBPU1Q=","IGNvcnBvcmF0ZQ==","IGRpbWVuc2lvbg==","IGNvbnRyYXN0","ZXJ2aWV3","IEVSUk9S","IGNhcGFibGU=","IGFkdmVydGlzaW5n","dXJjaGFzZQ==","IFBB","IEZyYW5jaXNjbw==","IGZhY2luZw==","44CM","Z2l0","IGJlZXI=","IHNreQ==","ZG93bmxvYWQ=","IEN1cg==","bWM=","YW5ueQ==","LmZsb29y","IGNyaXRlcmlh","IHBhcnNlSW50","YCwK","IGFzcGVjdA==","IGJ1bmRsZQ==","Q291bGQ=","IHRhbms=","LWlk","IGh1cnQ=","IGJyb2FkY2FzdA==","T0tFTg==","b3dudA==","bnVsbGFibGU=","Q2Fw","IGFsY29ob2w=","IENvbGw=","IEhlbHBlcg==","IEFm","Lm1ldGhvZA==","IHBsYW5uZWQ=","cGxlcg==","IFNpdGU=","IHJlc2M=","b21lbnQ=","IEphdmFTY3JpcHQ=","U0VSVkVS","IHJocw==","ZXJlcw==","KCIs","aWZp","LmZpZWxkcw==","IHBhcmtpbmc=","IGlzbGFuZA==","IHNpc3Rlcg==","Xwo=","Q29uc3RyYWludHM=","IEF1c3Q=","ZGlt","X3BvaW50cw==","IGdhcA==","X2FjdGl2ZQ==","IHZvb3I=","IFBP","QmFn","LXNjYWxl","bGFtYmRh","LkRpc3Bvc2U=","cnVsZQ==","IG93bmVk","IE1lZGljYWw=","MzAz","ZW50cmllcw==","IHNvbGFy","IHJlc3VsdGluZw==","IGVzdGltYXRlZA==","IGltcHJvdmVk","RHVyYXRpb24=","ZW1wbG95ZWU=","JC4=","QWN0aW9ucw==","TGlrZQ==","LCg=","KFJlcXVlc3Q=","JXM=","Lk9wZW4=","KSIK","IHBpeGVs","IGFkYXB0ZXI=","IHJldmVudWU=","b2dyYW0=","IExB","IE1hY2hpbmU=","INin","IGZsZQ==","IGJpa2U=","SW5zZXRz","IGRpc3A=","IGNvbnNpc3RlbnQ=","YcOnw6Nv","Z2VuZGVy","IFRob3Nl","cGVyaWVuY2U=","LkJhY2tDb2xvcg==","LnBsYXk=","IHJ1c2g=","IGF4aW9z","IG5lY2s=","X21lbQ==","LlBSRUZFUlJFRA==","X2ZpcnN0","Q0I=","IFdpZGdldA==","IHNlcQ==","aGFy","IGhpdHM=","IOKCrA==","IGNvbnRhaW5lZA==","cmllbnQ=","d2F0ZXI=","TE9BRA==","IFZpcmdpbmlh","IEFybQ==","IC4v","wrs=","X3Jvb3Q=","IGFzc2lzdGFuY2U=","W10s","c3luYw==","IHZlZ2V0","ZXNjYXBl","aWNlcg==","Ym9vc3Q=","IEZsb2F0","LVc=","Ki8NCg==","Kj4=","MjE4","ICQoIi4=","LnBvcw==","IGJveXM=","IHdlZGRpbmc=","IGFnZW50cw==","PSJf","IEFybXk=","IGhpbnQ=","dmlzaW9u","IHRlY2g=","IENvbm5lY3Q=","IGxlZ2VuZA==","IEJldA==","LkJhc2U=","U3ViamVjdA==","IGxpdA==","UmVtb3Zl","ICI6","IEZpbmFs","cGVhcmFuY2U=","IGlUdW5lcw==","IHBhcnRpY2lwYW50cw==","IFB5dGhvbg==","IGJ1c3k=","aWVs","dmVydGljZXM=","IHRlbXBsYXRlVXJs","IENsb3Nl","SW1n","IENvcnBvcmF0aW9u","dGltZXN0YW1w","IGV4dGVuZA==","IHdlYnNpdGVz","IHBvc3NpYmlsaXR5","0L7Rgg==","IGvDtg==","IG1lYXQ=","IHJlcHJlc2VudGF0aW9u","MjQx","IAkJ","X1NUQVJU","LmFwcGx5","IFZhbGxleQ==","IFN1Y2Nlc3M=","SGk=","IG5vYg==","IElFbnVtZXJhYmxl","X3NlbGVjdA==","Z2Vv","LiIpCg==","IHR1cm5pbmc=","IGZhYnJpYw==","KCIiKTsK","IHBlcnNwZWN0aXZl","6Zc=","IFNu","VGhhbms=","O2o=","LlBhcmFtZXRlcnM=","CSAgICAgICAgICAg","IGZhY3Rz","MzA1","IHVudA==","Lmluc3RhbmNl","IyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIw==","LWVuZA==","IEpPSU4=","IEhlbg==","IHVyaQ==","5ZCN","INC90LA=","IEluZm8=","IGNvbmR1Y3RlZA==","IMOl","T1VSQ0U=","IHdpbmU=","Sm9obg==","LkVycm9yZg==","IEFnZQ==","b3VuZGVk","IHJlYWxpemU=","MzEy","IF07","IHN1YnNlcXU=","LG0=","KFVzZXI=","aWFubw==","IGFjY29tcGw=","aXNw","LnN0ZA==","6Yc=","IEJlZA==","LnNldEF0dHJpYnV0ZQ==","QlI=","a2VlcA==","IEFMTA==","IGlzb2w=","YW1tYQ==","UGFja2FnZQ==","IG9jY2FzaW9u","LXN1Y2Nlc3M=","0LXQtA==","IExJTUlURUQ=","c3RyaXA=","KCkKCgo=","aXN0cmlidXRpb24=","Q29sb3Jz","ICs6Kw==","RGlkTG9hZA==","YWxlcg==","IHRpZA==","IExFRA==","IExpbmtlZA==","IENhcnQ=","KCkpDQo=","X1JFQUQ=","IGtpbGxpbmc=","IFBIUA==","ZmVjdGlvbg==","IGluc3RhbmNlcw==","Y3Y=","Ii8+","IHNm","IHRheGVz","X2xvY2F0aW9u","IEJpdGNvaW4=","dWFibGU=","cmFuaw==","aWdub3Jl","dHJhY2s=","0LrQsA==","IHNob3VsZG4=","IE9Q","PT57Cg==","IGtt","IGhlbHBlcg==","X2hlYWQ=","IFdoZXRoZXI=","b2Nv","X2Js","IHN0YXRpc3RpY3M=","IGJlYXV0eQ==","IHRvZw==","dGlw","64uk","IGNzdg==","KHNxbA==","c3RkbGli","d2Vhaw==","IGxpa2Vz","xI0=","IHJlcGVhdA==","IGFwYXJ0bWVudA==","IGVtcGg=","X2VkaXQ=","IHZpdA==","CXR5cGU=","MjE3","RXZlbg==","dXRlbg==","IGNpcmN1bXN0YW5jZXM=","Ymlhbg==","IHN1Z2Fy","V2luZG93cw==","7J4=","IG9ic2VydmVk","L2RhdGE=","IGNhbGVuZGFy","IHN0cmlrZQ==","IFJFUw==","X3Nj","Zm9ueQ==","b3JlbQ==","KHo=","cG93ZXI=","ZXRlY3Q=","IFNhdA==","LmRlc2NyaXB0aW9u","IGdhbmc=","IFNwb3J0cw==","b25ncw==","IEJ1bmRsZQ==","LnN1bQ==","b25jZQ==","IGFjY3VzZWQ=","IGV4cGxvcmU=","IGFwcHJveGltYXRlbHk=","IGxvc2luZw==","dGhlc2lz","IEZ1bmQ=","IGRpYWdu","QXV0b3dpcmVk","cHJvcGVydGllcw==","IF8u","IGNudA==","Y2VkdXJl","IHl5","IGdyYW50","c29jaw==","LmlubmVySFRNTA==","IF0pOwo=","IENPTkZJRw==","PSck","NTUw","XV07Cg==","VU5E","IGdsb2I=","IGRpcmU=","dWZmbGU=","X01FTQ==","IGF1dGhlbnRpYw==","Pigi","IGRlY2FkZQ==","IEltcG9ydA==","IG9yaWdpbmFsbHk=","IGpRdWVyeQ==","IGluZGljYXRl","IG91cnNlbHZlcw==","U3c=","LmxibA==","ZW5lcmF0ZQ==","IGJhc2ljYWxseQ==","IEhvbQ==","ICsjKw==","IEJyaXRhaW4=","IEthcg==","dG9FcXVhbA==","LnN0b3A=","IG1vZGFs","aXNp","IHN1Z2dlc3Rz","IGR0eXBl","IHR1cg==","YmY=","IGNvbm5lY3Rpb25z","IEJlZm9yZQ==","aXN0ZWQ=","bW91c2U=","IHB1bGxlZA==","LmJ1aWxk","IGxlZ2lzbGF0aW9u","IGZvcnRo","cGFk","ZWdv","Lk5vdw==","IGV4Y2l0aW5n","fQoKCgo=","IGNvbXBy","IHNoYXJlcw==","IHJpZw==","Z3JlZW4=","X3ZlYw==","IGVudW1lcmF0ZQ==","QXV0bw==","aWNhdG9y","IFJheQ==","YXNzZQ==","IGhvbGlkYXk=","IG51bGxhYmxl","Z3Vu","X2RldGFpbHM=","IHdyYXBwZXI=","c2Vx","IFlvdW5n","anVhbmE=","ICJfXw==","bGljZW5zZQ==","c2VydmU=","Xig=","aWRlcnM=","LlJlbW92ZQ==","cm9wZG93bg==","J1M=","cGlu","KHRva2Vu","LkRlZmF1bHQ=","IHJlYXNvbmFibGU=","YW1waW9u","IFNvY2lldHk=","IGJlaQ==","ZXJ2ZXM=","cmFk","IEZveA==","X2ltYWdlcw==","IHdoZWVs","Jylb","IGNmZw==","KEJ5","Q29uc3RydWN0b3I=","IHZhcnk=","LnN3aWZ0","IHByb3h5","CUg=","IEFub3RoZXI=","IFBlbg==","IGNoZWNraW5n","IGplc3Q=","bWFuYWdlcg==","T3JpZ2lu","dWdz","b2ly","PjwhLS0=","IGV4cHJlc3NlZA==","IG1vZGVy","IGFnZW5jaWVz","IGlo","LWhpZGRlbg==","aW91c2x5","IFJvZA==","IHNvbGU=","TWVk","LkFueQ==","IHBj","YmFs","RXhhbXBsZQ==","IFNhbGU=","IHN0cmlw","IENvbXA=","IHByZXNpZGVudGlhbA==","TW9zdA==","cHV0YXRpb24=","KHJlZg==","IEZvdXI=","X2ZpbGVuYW1l","IGVuZm9yY2VtZW50","2K8=","IEdlb3Jn","d2VpZ2h0cw==","L2w=","IGFnZ3Jlc3M=","IGRyYXdpbmc=","YW5keQ==","PEk=","LWo=","YWth","aHJlZg==","IHRlYWNoZXJz","X1E=","KGl0","IE1C","IHRlbXBvcmFyeQ==","aXJlYmFzZQ==","c3RyYQ==","5pe2","6LQ=","KGxhYmVs","b3Vw","IHRvcGljcw==","IHBvcnRpb24=","aWRvcw==","IEpld2lzaA==","IHJlY292ZXJ5","NjUw","IHN0YW5kcw==","I1s=","IGFmdGVybm9vbg==","IEFydGljbGU=","X2F0dA==","IGV4cGxhbg==","IFBhaw==","LnNldE9uQ2xpY2tMaXN0ZW5lcg==","LmNoaWxkcmVu","IGlr","Kyg=","bGFn","IGRpc2s=","IGNvbnRyb3ZlcnM=","Ij4m","YXNw","IHdpZQ==","IEF1c3RyYWxpYW4=","IFlvdVR1YmU=","QXR0cg==","Y29udGFpbnM=","ZHVjZQ==","IE1hdHQ=","MzQw","YXRlcm4=","IHZvbHVudGU=","IG5ld3Nw","VlA=","b2x0aXA=","IGRlbGVnYXRl","X21ldGE=","IGFjY3VyYXRl","IEV4YW1wbGU=","JSw=","IERhaWx5","IGNhYmlu","IFNX","IGxpbWl0cw==","a2lw","IGFybXk=","IGVuZGluZw==","IGJvc3M=","IERpYWxvZw==","QWxzbw==","PSIjIg==","b3JkYW4=","cm93c2U=","LW1pbg==","ICIm","X2xvYw==","VVg=","IGRldmVsb3BlcnM=","IGFjY3VyYWN5","IG1haW50ZW5hbmNl","IGhlYXY=","IGZpbHRlcnM=","LlRvb2xTdHJpcA==","IG5hcnI=","IEVtcA==","T1JERVI=","IE1vYmlsZQ==","LlNlcmlhbA==","Lm91dHB1dA==","MjQ0","LmNvbA==","TWF0ZXJpYWw=","dW1h","IGNvbnN1bWVycw==","c2hpZnQ=","IHB1ZWQ=","IG1pbmk=","Y29sbGVjdGlvbg==","IGthbg==","LmNlbnRlcg==","SGlzdG9yeQ==","IGJlbmNo","KCkpOw==","aXRvcmllcw==","IGNyb3dk","X2NhbGw=","IHBvd2Vycw==","LUU=","IGRpc21pc3M=","IHRhbGtz","IENoYW5uZWw=","Zm9yd2FyZA==","X2NvbnRyb2w=","L3NyYw==","aWVzdA==","KioqKioqKioqKioqKioqKioqKioqKioq","IGJldGE=","KGNvbG9y","X09CSkVDVA==","IEFwaQ==","IGVmZmVjdGl2ZWx5","Q2FtZXJh","c2Q=","dXNzeQ==","Mjkw","RGljdA==","IEVmZmVjdA==","aWJpbGl0aWVz","IHJldHVybmluZw==","IEZhcg==","ICcnKQ==","IG1vZHVsZXM=","MjE5","aWxhdGlvbg==","ICgl","VFJHTA==","IHN0b3Jt","b25uYQ==","IEVYUA==","IHNwb25z","IGRpc3Bs","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg","ZmFsbA==","5Yw=","aWduS2V5","X1VT","ZXRyaWNz","IGhhbmRsZXM=","VEw=","X2Ftb3VudA==","b3dh","YnJhbmQ=","IFRvb2w=","IHVzdWFs","Llo=","Y3JlbWVudA==","YWRpdW0=","c3RvY2s=","IHNlcnZpbmc=","IEJvbg==","IGxpbmVhcg==","IFRhcmdldA==","IFJhZGlv","SEw=","U2hhZGVy","b21hdGlj","YWd1ZXM=","aW5pdHk=","ZGlmZg==","X2l0ZXJhdG9y","cXVvdA==","ICwK","Y2FsbGJhY2s=","IHN5bXB0b21z","W18=","IEJ1bA==","IEZlYg==","dW5kbw==","X2FjY291bnQ=","IHR5cGVkZWY=","0LjRgQ==","dHJhcw==","VXNlcklk","IFBlbm4=","IFN1cHJlbWU=","fT4=","dXNlcklk","MzI3","IEtpbQ==","IGdh","IGFydGlzdHM=","5bg=","IEFic3RyYWN0","b2tlbW9u","IGhhbQ==","b3ZhbA==","IGNoYQ==","YXRlbg==","5YY=","Rml4ZWQ=","IHZ1bG5lcg==","IFBhcmFtZXRlcnM=","cXVhbnRpdHk=","LkNsZWFy","U2VydmxldFJlcXVlc3Q=","IHlh","IHNvdWw=","MDgw","dHJhbnNhY3Rpb24=","IHNvbG8=","IHBhaXJz","5pQ=","IEdyZQ==","X3dvcmQ=","IEND","IGdp","emll","IHNjaGVkdWxlZA==","cm90YXRpb24=","Z3lwdA==","dWxvdXM=","Ojpf","IEVsbA==","PCE=","CQkgIA==","bHA=","YWhh","Q29weXJpZ2h0","MDA5","IGRyYW0=","MjUx","IGRpYWdyYW0=","IE1lbQ==","IGdhcmRlbg==","Q29tcA==","IGF0dGVtcHRz","dWZmaXg=","Pigp","IHBoaWxvc29waA==","X3JlbA==","5bw=","IHN2","LnNlY29uZA==","YW50bw==","Lkpzb24=","IFRlbGU=","X2xvY2Fs","X3NlbmQ=","IGFzcGVjdHM=","7Jc=","SUJMRQ==","IHJhaWw=","IHdpZGVseQ==","YXNoZWQ=","aWFy","aW5m","dXBwZXI=","ZGphbmdv","X3Jlc3VsdHM=","aXNzaW5n","IGVxdWl2YWxlbnQ=","T1VORA==","IHR5","IHBvdGVudGlhbGx5","QWR2ZXJ0aXNlbWVudA==","MjM4","IFJlY29yZA==","Mzgw","cmVzZW50YXRpb24=","X3dpZGdldA==","b3VuZGluZw==","IHJlbGlnaW9u","IGNvbnNj","IExpbQ==","LmFt","SHRtbA==","ICc6","UEFUSA==","X3NwZWM=","b3J0ZWQ=","aWRhZGVz","X3NoYXBl","IGtlZXBz","LlNhdmU=","IExvYw==","b3Jp","IFRFU1Q=","dW5pY2lw","IHJlZ2lvbnM=","IGJlbGlldmVz","L2Vu","cG9zaXRl","eyc=","cHJlcGFyZQ==","X2NvbnN0","c2FtcGxl","IFdpbGxpYW1z","IHN0cnQ=","X0dldA==","IEFuZHJldw==","LmFjdGl2ZQ==","IGxheWVycw==","VmlzdWFsU3R5bGU=","YXp5","IEtu","IGFjaWQ=","IEFzaWE=","IGV4Y2Vzcw==","CW15","IGtleWJvYXJk","ZW5zdXM=","IGNyZXc=","IG1pc3NlZA==","bWFzdGVy","IFdpbGQ=","IG5ld2x5","IHdpbm5lcg==","IHN0dWI=","aWNvZGU=","Lm1vdmU=","RG9tYWlu","IFNhcg==","IGZvcmVzdA==","TEVE","Y2xhaW1lcg==","LmV4aXQ=","IFdpbmRvdw==","IHJlc2lzdGFuY2U=","IENIRUNL","KCIt","IFJ5YW4=","IHBpcGU=","IGNvYXN0","REVG","Ly8h","X29mZg==","ZXhpdA==","IHVsdGltYXRlbHk=","aW1pdGl2ZQ==","IEtlZXA=","IGhpc3RvcmljYWw=","IGFueXdheQ==","IEphY2tzb24=","b2NrZXI=","RVJO","IFVJTlQ=","eW50YXg=","RVJZ","aXNtcw==","IGNu","IG9jY3Vycw==","IDs7","VGV4dFZpZXc=","QUU=","L2ltZw==","IHllc3RlcmRheQ==","LWRlZmF1bHQ=","IHRpbnk=","IHByb2M=","IGFsaXZl","IFJFRw==","LnRo","ZWFyaW5n","LmdldExvZ2dlcg==","PGxpbms=","X2xvZ2lu","Rm9sZGVy","YWJj","bHlwaGljb24=","0L3Qvg==","IG5vdGljZWQ=","b2RpZ28=","IGVkaXRpb24=","aW1hdG9y","LkVuYWJsZWQ=","LnBhcnNlSW50","IHlhcmRz","CQkJCQkJCQkJCQkJ","IHZlcmJvc2U=","0LvRjw==","X0JZ","LmxvZ2lu","Lio7Cg==","IE1pZA==","w6llcw==","IGdsbw==","IGJ1aWxkaW5ncw==","IHpl","IEl0ZXI=","IHR1YmU=","IFBvdA==","XE0=","MjUz","PHRo","YnJpZGdl","IFNjcmlwdA==","IE1vZHVsZQ==","IHZhY2M=","IGluc3RhbGxhdGlvbg==","dnk=","VmlzdWFsU3R5bGVCYWNrQ29sb3I=","IFNN","LnRvdGFs","NjQw","YmF0","IGZpbmRz","IGF0bW9z","U3Vidmlldw==","aXphcmQ=","IHJlcGxhY2VtZW50","bGljYXRlZA==","YXBpcw==","IGxvZ2dlZA==","IExlZnQ=","R3Vp","X1R5cGU=","dG0=","UGFk","IGhvdXNlaG9sZA==","IHJlbGU=","IHByb3Bvc2Fs","X0NMQVNT","MjQz","Ojo6Og==","IGluZnJhc3RydWN0dXJl","SW5qZWN0","L2h0bWw=","MjI2","IGFkcw==","aXp6YQ==","IG1n","Y3RyaW5l","JQo=","PGh0bWw=","LWltYWdl","IGF0dG9ybmV5","PG0=","KCcs","IGNhbm4=","IHByaW50bG4=","b29zZQ==","IHllbGxvdw==","LmV4cA==","cGF5bWVudA==","IHRhYmxlVmlldw==","YXdheQ==","IG9wcG9zaXRpb24=","IEFnYWlu","IEhhbmRsZQ==","IGV4Y2x1c2l2ZQ==","aW5hcg==","w6ly","0L7QsQ==","IENPREU=","ZW1wb3Jhcnk=","IHJlYWN0","cGlwZQ==","MjM2","Y3o=","LmFjdGl2aXR5","IGxhcmdlbHk=","IGRpc3M=","YXh5","ZXNpcw==","IFJlbg==","IGNvcm4=","LlVzZVZpc3VhbFN0eWxlQmFja0NvbG9y","ZGF5cw==","IGZydWl0","SW5zZXJ0","X2VuYw==","RXN0","X2RlYw==","IEx1Yw==","IMO8YmVy","cGFyYW1ldGVycw==","UEVSVA==","ZXhwcmVzcw==","X3Byb2ZpbGU=","VW5rbm93bg==","IHJldm9sdXRpb24=","LmFkZHJlc3M=","X3JlcXVpcmU=","IHVuaWZvcm0=","IFBhY2s=","bGFy","IFVJVGFibGVWaWV3","IGRlcGVuZHM=","VmFsaWRhdGlvbg==","Y29uZmlybQ==","T3duZXI=","IHRyaWI=","aGV0","IElkZQ==","YW5zYXM=","MjQ3","TGFuZ3VhZ2U=","dWV0","IFBv","IFN0ZXZl","IGNvbnRlc3Q=","X0RFRkFVTFQ=","IGFwcGFyZW50bHk=","UkVFTg==","IGZyZXF1ZW50bHk=","IHRyYWRpdGlvbg==","b2NvbGF0ZQ==","U0k=","IEFyZ3VtZW50","Rm9jdXM=","ZXJ0ZQ==","IExheW91dA==","IGR4","IGdlbmVyYXRvcg==","IFdhaXQ=","UG9saWN5","bGlnaHRz","LkV4ZWN1dGU=","NTU1","UHk=","IGJlZHJvb20=","ZWRh","cmFpZA==","CXNpemU=","IGFuY2llbnQ=","IHB1bXA=","IGR3","ICghKA==","IHNwZWNpZnk=","KHN0YXR1cw==","IEZCSQ==","LmV4Y2VwdGlvbg==","IHJlbWFyaw==","bHltcA==","YW50ZWU=","VXBsb2Fk","ZXJuZXQ=","6aE=","aW5lbnQ=","IFJlbmRlcg==","ZG0=","IE1lbW9yeQ==","cmljaA==","IFRvb2xz","IGtuZQ==","IHBlcm0=","YmFk","IGRpbm5lcg==","LnJlc2V0","IGpMYWJlbA==","RmVhdHVyZQ==","LlNlcnZpY2U=","ICh7Cg==","IHJlZmVycmVk","LmNsYXNzTGlzdA==","MjQ4","IGluaXRXaXRo","IFRleHRWaWV3","IG5laXRoZXI=","IGNvdW50eQ==","ICJ7","56c=","IHRhY2s=","Y2xhc3NOYW1l","IFVTRVI=","IHJlbmV3","YGA=","Z2V0TmFtZQ==","IGJyb3du","RXJyb3Jz","ZXJ0bw==","IHN1c3RhaW4=","U08=","bGV0ZXM=","IEludmFsaWQ=","MjQ2","MjI3","IGVuZW1pZXM=","dW5nZQ==","IGV4aXN0ZW5jZQ==","ZXJyYQ==","CiAgCg==","dXRvcmlhbA==","I2E=","cGF5","Y2hhcmdl","IElyZQ==","YXRlc3Q=","IGV4cGxvcw==","IGZpcmVk","TkVS","IFR5","aWNpb24=","VXJp","IG9idmlvdXNseQ==","IENvbHVt","ICcr","IERldmljZQ==","LXJlbGF0ZWQ=","X0FSRw==","IHZvcg==","IExlc3Nlcg==","X09Q","U2VyaWFsaXplcg==","IHVwZ3JhZGU=","TGlnaHQ=","IGNvZGVz","Kys7DQo=","IHdyaXRlcw==","Zm9vZA==","IMOpdA==","QHNlY3Rpb24=","IHRyYWNrcw==","IHNlcmlvdXNseQ==","Y2h0","NDMw","KHNpemVvZg==","IGltbWVkaWF0ZQ==","IHNjaWVudGlzdHM=","IHsk","X25l","LkFuY2hvclN0eWxlcw==","IGFjY29tbW9k","IEhhcnJ5","IHNpZ2h0","IFBhbGVzdA==","ZXJzaXN0ZW50","INGD","LWlucHV0","IGNvb3JkaW5hdGVz","wrc=","MjI4","V2VsY29tZQ==","LmNvbmY=","IGdyZXc=","IGJvbGQ=","IENQVQ==","KG15","IHBlcmZlY3RseQ==","IG1vbWVudHM=","IE1vdmll","LWRhdGE=","eXN0YWw=","X1dJRFRI","MjYy","IFNjcmVlbg==","5p0=","IGRpc2Fw","IHJlZHVjdGlvbg==","LkdldENvbXBvbmVudA==","X01PRFVMRQ==","IGdlbmVyaWM=","IGR5","YWxsZXI=","IGN1cmw=","IEJvZHk=","IGJhbmtz","LHQ=","YXZn","IGV2aWw=","IG1hbnVmYWN0dXJlcg==","IHJlY2VpdmVy","Q29sdW1ucw==","IGluZ3JlZGllbnRz","CW91dA==","cXVlcw==","LkxvYWQ=","IHNsb3dseQ==","IFRvd24=","IENlbGw=","X25vcm1hbA==","X3ByZWZpeA==","IEFsZXJ0","KCJ7","w6Ry","4oCcVGhl","IE1E","IGNvdXJzZXM=","YXRoYW4=","6Zk=","b2Nj","IFNFUg==","ZXNpZ24=","QWRkcg==","PVsn","KCIuLw==","XX0=","LmZvbnQ=","IEluc3RhZ3JhbQ==","IEJvcmRlcg==","b2Rh","IGhhbGw=","IHJ1bQ==","X2JpdA==","IHNhdmluZw==","X2Rvd24=","UmFuZG9t","X3JlZ2lzdGVy","KENvbnRleHQ=","IG9wcG9zaXRl","Um9vbQ==","WUVT","0LDQvdC4","IGVuam95ZWQ=","X3J1bg==","Q2xlYXI=","4oCY","IEZvcmQ=","b25pYw==","b3N0ZW4=","Il0p","X2F1dGg=","Ly8NCg==","IHN1ZmZpY2llbnQ=","TEVT","IHBoZW4=","IG9o","X2Nzdg==","IHJvdXRpbmU=","LkFyZUVxdWFs","YXlsb3I=","IGJhc2tldA==","X0NPTU0=","cnlwdGVk","U2lt","IFNob3A=","IHN0dWRpbw==","YXRvcw==","KFc=","W3N0cmluZw==","w6R0","b2dh","IHNocg==","IHNpY2s=","QW5vdGhlcg==","IGRvb3Jz","X05F","IFRIUkVF","Lm9yZGVy","cmF6aWw=","IG1hcHM=","X1RSVUU=","dHJhbnNsYXRl","IG5lYXJieQ==","MjY1","IG5hY2g=","TE9BVA==","YmF0Y2g=","MjI5","IGx1eA==","YXNoZXM=","YW5nZXJz","4oCm4oCm","X0VWRU5U","X1VQ","IGFjdHM=","aW52","X01FVEhPRA==","Y2Npb24=","IHJldGFpbg==","dXRjaA==","INCx","IGtub3dpbmc=","IHJlcHJlc2VudGluZw==","Tk9U","cG5n","Q29udHJhY3Q=","IHRyaWNr","IEVkaXRpb24=","dXBsaWNhdGU=","IGNvbnRyb2xsZWQ=","Y2Zn","amF2YXNjcmlwdA==","IG1pbGs=","V2hpdGU=","U2VxdWVuY2U=","YXdh","IGRpc2N1c3NlZA==","NTAx","IEJ1c2g=","IFlFUw==","LmZhY3Rvcnk=","dGFncw==","IHRhY3Q=","IHNpZA==","JCQ=","IEVudW0=","Mjc1","IGZyYW1lcw==","fSk7","IHJlZ3Vs","J107DQo=","UmVnaW9u","MzIx","ZmZm","IGNybw==","KGNvbQ==","PSIr","U3R1ZGVudA==","IGRpc2FwcG9pbnQ=","UkVTVUxU","Q291bnRlcg==","IGJ1dHRlcg==","IEhh","IERpZ2l0YWw=","IGJpZA==","Ij57ew==","aW5nZXJz","IENvdW50cnk=","X3RwbA==","Il0pCg==","L2s=","ZGF0aW5n","OiM=","IERBVEE=","eW5jaHJvbg==","X2JvZHk=","b2xseXdvb2Q=","IHZhbG9y","aXBpZW50","b2Z0","VUJM","ZG9jcw==","IHN5bmNocm9u","IGZvcm1lZA==","cnVwdGlvbg==","IGxpc3Rh","UmVxdWVzdE1hcHBpbmc=","IHZpbGxhZ2U=","IGtub2Nr","b2Nz","Ins=","X2ZsYWdz","IHRyYW5zYWN0aW9ucw==","IGhhYml0","IEpl","ZWRlbg==","IGFpcmNyYWZ0","aXJr","IEFC","IGZhaXJseQ==","LmludGVy","LkFjdA==","IGluc3RydW1lbnQ=","cmVtb3ZlQ2xhc3M=","LmNvbW1hbmQ=","0Yk=","CW1lbQ==","KG1pbg==","IG90","IGNvbGxl","PXM=","dGltZW91dA==","IGlkcw==","IE1hdGNo","aWpu","emVybw==","NDEw","IG5ldHdvcmtz","Lmdvdg==","IGludGVs","IHNlY3Rpb25z","b3V0aW5l","KGNtZA==","KGRpcg==","IExJQUJJTElUWQ==","IEJsb2c=","IGJyaWRnZQ==","MzA4","IENW","Y29udmVydA==","ICIpCg==","IEJlcm4=","X1BP","ZXZhbA==","KHNldA==","dG9vbA==","IHBheW1lbnRz","QmVoYXZpb3Vy","IGNvbmNyZXRl","IGVsaWc=","IGFjY2VsZXI=","IGhvbGU=","X28=","VEVHRVI=","IGdyYXBoaWNz","T3du","Rm9ybWF0dGVy","b25kZXI=","IHBhY2thZ2Vz","L2E=","IEtub3c=","T3JEZWZhdWx0","IGR1dHk=","V2FpdA==","0L3QsA==","X3JlY29yZA==","W3Q=","TWVzaA==","IG9uZ29pbmc=","LmJlYW5z","IHRhbg==","IGludGVycHJldA==","YXN0ZXJz","UVVBTA==","IGxlZ3M=","XFJlcXVlc3Q=","LWZpbGU=","X211dGV4","IFNhaW50","Ly8j","IHByb2hpYg==","KGluZm8=","Oj0=","bGludXg=","IGJsbw==","b3RpYw==","CWZpbmFs","X2V4cA==","IFN0b3A=","YXBpbmc=","KHNhdmVk","X3B1c2g=","IGVhc2U=","X0ZS","cG9uc2l2ZQ==","c3RyY21w","OgoKCgo=","5Lu2","b2xp","IGV4dHJlbWU=","IHByb2Zlc3Nvcg==","SW1hZ2Vz","LklPRXhjZXB0aW9u","IGFkZHJlc3Nlcw==","cGxlbWVudGVk","IGluY29ycG9y","IHVzZUVmZmVjdA==","X09G","IERh","bm9tYnJl","SVJTVA==","IGRpc2NyaW0=","IGNvbXBlbnM=","Z3JlZ2F0ZQ==","YW5jZWxs","YWNoZXM=","IENyaXRlcmlh","JHJlc3VsdA==","RGVzdHJveQ==","IHNlY29uZGFyeQ==","V2F0Y2g=","IFNlbQ==","IE1jQw==","IGFjYWRlbWlj","VXBwZXI=","Ojp+","dXRyYWw=","IERvZw==","YWRlZA==","MjM3","VmFsaWRhdG9y","IGRlcml2ZWQ=","IHNldFRpbWVvdXQ=","IEtlbg==","IHR5cGljYWw=","IEJvYg==","IGJvdW5kcw==","IFNlYXNvbg==","IGNyYXp5","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg","LXJvdXRlcg==","aXR0ZXN0","IE1pcg==","IGVtb3Rpb25hbA==","LHY=","Y24=","L3N0","5b0=","b25vbQ==","IGRlY2xhcmVk","Pi4=","YWlsaW5n","IC8qPDw8","IG5vcm1hbGx5","KE1l","ZXZpbg==","bGlrZWx5","IHBvaW50ZWQ=","IFN0YWNr","IHdhbGxz","LlZlY3Rvcg==","bWVhbg==","XV0K","IGxpc3RlbmluZw==","YWR2","IHN3YXA=","SUZU","2Ko=","LmFyZ3Y=","dWxz","PG9wdGlvbg==","bm90YXRpb25z","IGVtYWlscw==","IFVrcg==","YXN0YQ==","IFRodXM=","IFN0b25l","IGFwcGVhbA==","LuKAmQ==","IHJlZ3VsYXRpb25z","UHJlZmVyZW5jZXM=","IFBob25l","dWxm","IERS","IHRlY2hub2xvZ2llcw==","IHBhcmFncmFwaA==","IG5lY2Vzc2FyaWx5","Mzcw","MDMw","LmVhY2g=","PGZsb2F0","cmVzYQ==","IHVuZGVyc3Q=","IGZpbmdlcg==","cHJlc3NlZA==","LWJ5","aWZmZXI=","d2F0Y2g=","IEJh","QUlN","IHdlaWdodHM=","IFJvbg==","Jyl9fQ==","W3NlbGY=","LS0tLS0tLS0tLQo=","cGVyaW1lbnQ=","IHRvU3RyaW5n","eGlj","IENhbWVyYQ==","IQoKCgo=","YXVyYW50","UHJlZml4","IGluc3RpdHV0aW9ucw==","OmludA==","IGV4cG9zdXJl","cGF0dGVybg==","IExpbnV4","Lm51bWJlcg==","cmVkaWVudA==","QXJndW1lbnRFeGNlcHRpb24=","IENoaWVm","In0s","IGVsZWN0cm9uaWM=","cm9uZw==","ZXJk","c3BOZXQ=","cmFpdA==","Lycs","IE9oaW8=","Q29udHJvbGxlcnM=","IGNvbnRpbnVpbmc=","IFRlbXBsYXRl","IEV0aA==","c3o=","L2Vudg==","RW52","JS4=","YXJ0ZXJz","KSgo","IFRBQkxF","IMOu","cGVyYXR1cmU=","cHJvZ3Jlc3M=","UHJlcw==","6rA=","aW1wbGVtZW50YXRpb24=","IGJpZW4=","IHN0cmVldHM=","X01TRw==","TmV3cw==","IyMj","Oi8=","IGN1dHRpbmc=","eEI=","cmVzc2Vk","X0VOQUJMRQ==","bGFi","IGNhdXNpbmc=","XSkpOwo=","YnJh","eEZGRkY=","aWxseQ==","cGxldGlvbg==","d2lsbA==","X2Jhcg==","IHN0cnVjdHVyZXM=","IEltcA==","24w=","IDw+","IC0tLS0tLS0tLS0tLS0tLS0=","X0JVRkZFUg==","LmRpcg==","IHBsYWlu","IHBlZXI=","MjQ5","Z2c=","b2ludHM=","IHNvbWV3aGF0","IHdldA==","IGVtcGxveW1lbnQ=","IHRpY2tldHM=","aXJtcw==","IHR1cGxl","c2lz","JHNxbA==","cmln","IGNvbnZlcnNpb24=","IGdlcw==","IGNvbmZpZ3VyZQ==","ZWdy","IENh","IF9fKCc=","b3VzdG9u","LnRva2Vu","QmxhY2s=","IG1hZ2F6aW5l","QVc=","LklO","b3Npbmc=","IGJyb2tl","IENydQ==","REVMRVRF","IGRlc3Ryb3llZA==","KE1hdGg=","IGFwcHJvdmFs","LWRvbQ==","IElJSQ==","dGFibGVWaWV3","IGRlc2lnbnM=","IGNydXNoaW5n","IGNvbnNlbnQ=","ZGlybmFtZQ==","b21w","IGNyeXB0","Pyg=","b3JvdWdo","MzA3","Lm8=","CWxpc3Q=","YW1zdW5n","LiIiIgo=","ZXJyaW5n","R29vZ2xl","X3BhaXI=","X0lOSVQ=","cmVtYXJrcw==","IGdlYXI=","RmlsbA==","bGlmZQ==","fSIpCg==","IHN1aXRhYmxl","IHN1cnByaXNlZA==","X1JFUVVFU1Q=","IG1hbmlmZXN0","YXR0ZW4=","IGZydXN0cg==","b3ZlbWVudA==","LmNsaWNr","IGlp","IGV4cGFuc2lvbg==","aWdz","UGFyc2U=","LlJlZ3VsYXI=","Um9i","X2xheW91dA==","7KA=","IHRyYW5zbGF0aW9u","IEJlYXV0","QmVzdA==","X0NPTE9S","PGxhYmVs","IGxpcXVpZA==","SVRT","IHByb2Q=","MjM5","IG9wZXJhdGU=","VUlLaXQ=","IG5hdHVy","YXJndW1lbnQ=","X2RldGFpbA==","IENlbnRyZQ==","ICItLQ==","IH19Ig==","bG9jYWxl","LnR2","X3NlcQ==","IHVwY29taW5n","Q2hhcnQ=","IERpdmlzaW9u","IGNsaW5pY2Fs","Q29tcGFueQ==","U2VwYXI=","bGFz","IEh1bg==","OnM=","IGhlYWRpbmc=","0L7Qsw==","ICIiKTsK","W2lk","Ymlh","IHN0cmV0Y2g=","aWNpZGU=","IHJlcHJvZHU=","LnByb2plY3Q=","bGVnZW5k","ZW5kZXJz","IHJlc3BvbnNlcw==","IG9udA==","cml0aWNhbA==","IHJlZnVnZQ==","IExp","IDoKCg==","IFRocmVl","LmNvbnRyb2xsZXI=","X0lOREVY","X0ZPUg==","XE1vZGVscw==","amF4","CWV4aXQ=","IOKW","IGNvdmVycw==","CXk=","LS4=","SU5ET1c=","IGZhaWxz","aW5jbHVkZXM=","IGZhdWx0","NDQw","IGx5","NDQ0","w7Fv","LnNsaWNl","SUxFRA==","IFB1cg==","IEFzaWFu","X2JhdGNo","Lk1heA==","dmw=","IENPUFlSSUdIVA==","IGdpYW50","IE1hbnVhbA==","IENvcHk=","Q2xhc3NOYW1l","SGVhbHRo","Q3Vyc29y","SUJPdXRsZXQ=","IHR3ZQ==","5rM=","X2xhYmVscw==","IGNvbGxlY3RlZA==","IGZ1cm5pdHVyZQ==","IGRlYWxpbmc=","Q29udHJvbHM=","IEhvdGVs","Y2tz","IGNob3Nl","4pSA","b2Rk","U1I=","2Yo=","7IQ=","IGFjY29yZA==","IE1vdmU=","IE1vZGU=","IE1vY2s=","IHRocmVhZHM=","KysrKw==","IE9wdGlvbnM=","UmVmcmVzaA==","IERpZA==","J10tPg==","dWNj","X2NoYW5uZWw=","LmFicw==","IHt9LAo=","IFdhbA==","ZXJpb3I=","IG1haW5seQ==","IERyaXZlcg==","Tm90Rm91bmRFeGNlcHRpb24=","IGNvdW50cw==","ZWFt","ICY9","UXVlc3Rpb24=","IEFsaQ==","IGFueW1vcmU=","ZGV0YWls","dGFpbA==","IG1pbGU=","IEZhaXI=","IHNvcnJ5","IHN1cnJvdW5kaW5n","IGFkbQ==","RGV2","IG1hcmlqdWFuYQ==","IFNvdW5k","IEFzaA==","RkQ=","VGVhbQ==","LnBvcnQ=","IFtdCgo=","dWJibGU=","IGFzYw==","IGludGVudGlvbg==","QWNj","Y2hp","dXN0ZXJz","IGluc3BpcmVk","c2Vn","Q0xV","IG1hbmlw","TWV0YWRhdGE=","Q29ubmVjdA==","IEJlaA==","IGZpbmRpbmdz","IGFzc2VtYmx5","d29ybGQ=","IHJlbWFpbmVk","IHVpZA==","KC4=","IG14","TG9vcA==","CgoKCgo=","IGZhbnRhc3RpYw==","d2hv","YWtp","IEJhc2lj","IFlldA==","IFVzZXJz","aWtpcA==","IGhlYWRz","IE1pY2hpZ2Fu","X2l0","IFRvcm9udG8=","IHJlY29yZGluZw==","IHN1Ym1pdHRlZA==","X3ZhcmlhYmxl","bWVkaWF0ZQ==","LmdyYXBoaWNz","IHN0b29k","IHJlYXI=","dmVsb2NpdHk=","X01FU1NBR0U=","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg","cm9sZXM=","IFRvdXI=","X3llYXI=","ZW5kbWVudA==","YW1wcw==","IElyZWxhbmQ=","bWFs","IHlvdW5nZXI=","IHN0cnVnZ2xl","IGNhYmxl","IFNETA==","KCct","YW5lcw==","IE5lZWQ=","LlJvdw==","UG9s","IFBI","X3NjcmlwdA==","YWdlbQ==","IEJhcw==","X3NwYWNl","LmxvYw==","Omk=","YWRy","IGVuZ2luZWVyaW5n","aXRlbg==","KSY=","IHVr","IExpdHRsZQ==","X0NPVU5U","eEE=","QXJyYXlMaXN0","5o0=","ICIiKQo=","QW5jaG9y","IGhhbmc=","dHdpdHRlcg==","IGNvbXBldGl0aXZl","LnNyYw==","44GX","IHRyYW5zbGF0ZQ==","IENyZWF0ZXM=","b29rcw==","IFJvbGw=","JycnCg==","L3No","c29tZQ==","RW5jb2Rpbmc=","LnJlc29sdmU=","IGRlc2lnbmVy","IFN0b3JhZ2U=","IHph","IE5ldmVy","IHNvbWV3aGVyZQ==","IGJveGVz","LnNvdXJjZQ==","IHB5Z2FtZQ==","IGdyb3du","LnR3","KCkpLAo=","JyxbJw==","IG9wcG9uZW50","KHNyYw==","LmxheWVy","QVBQ","IEFjdGl2","IGd1ZXN0cw==","IFZBTFVFUw==","fTsKCgo=","Lm5hdGl2ZQ==","IGFtb3VudHM=","LlJF","IGNsb25l","IHdlcmVu","ICI8PA==","X2Fj","IGJyZWFraW5n","IHJlbGlhYmxl","LlBPU1Q=","IFNreQ==","ICcm","IHNhdmVkSW5zdGFuY2VTdGF0ZQ==","YXN0aW5n","aWxsaW9u","Y29tbWVudHM=","dWx0eQ==","Lm1lbnU=","L2NvbmZpZw==","IAoKCg==","VE9ETw==","IHB1cmNoYXNlZA==","X2Nvcg==","CWF1dG8=","Q29tcGF0QWN0aXZpdHk=","Y29tcGxldGU=","X2dyYXBo","aXNvZGVz","IHNpdHVhdGlvbnM=","IEhvcg==","UmVjZWl2ZQ==","4oCcV2U=","IGVudGl0aWVz","LmFzc2VydEVxdWFscw==","0L7Qug==","IFNhbnM=","dmluY2U=","cm9tcHQ=","PQo=","IC8u","LlNlbGVjdA==","eWx2","IGJhdHQ=","QXVkaW8=","IGluY3JlYXNpbmdseQ==","LkJ1bmRsZQ==","IGV4cGxhaW5z","MDYw","dGhlYXN0","Lm9mZnNldA==","IGhhbA==","IHRlY2huaXF1ZQ==","X2xpbWl0","IGRyYXdu","QVlFUg==","IGZlYXR1cmVk","eXl5eQ==","YXRpbg==","cGhlbg==","YWNoZWw=","IVw=","bG93ZXI=","IEdS","IHBhZw==","IFBhcnNl","IHRvdQ==","5LiA","RGlzdGFuY2U=","SW5kZXhQYXRo","IGhlbGw=","c2lt","VVRUT04=","VXNhZ2U=","ZWxlbml1bQ==","IEZhbGw=","ICIuJA==","IE11","IGNydWM=","IHNvbnQ=","UkVGSVg=","MzEx","IGludGVyaW9y","IE9seW1w","LkF1dG9TY2FsZQ==","cGFyYQ==","QXhpc0FsaWdubWVudA==","IHJpdmVy","RHRv","IHdpdGhkcmF3","UmVhY3Q=","LWNsYXNz","YmVmb3Jl","X2FsbG9j","Q29udGVudHM=","IFdhcw==","SUNU","IGZvcm11bGE=","IGluZGljYXRlcw==","ICAgIAoK","X3N0b3Jl","aXR0aW5n","IEl0YWxpYW4=","X1NldA==","X3JlcG9ydA==","IHBpZA==","X1ZFUg==","IHdpbnM=","IENsb3Vk","Iil7Cg==","Y2hlc3Rlcg==","IGRlbmllZA==","IHdpcmQ=","IFN0ZXA=","IGludmVzdG9ycw==","Ym9sZA==","X2Rpc3BsYXk=","b3V2ZXI=","b3Jlcg==","UmVzZXQ=","IHN1cmdlcnk=","IHN0cmF0ZWdpZXM=","L21hdGVyaWFs","X3VuaXQ=","IGNvdW5jaWw=","LlBlcg==","IOKAng==","IHJlZm9ybQ==","RnJhbWV3b3Jr","IGxpc3Rpbmc=","X2J0bg==","IGJpcw==","JWQ=","ZWdhcw==","IHN1ZGRlbmx5","X1NFUg==","MzE1","IGFv","X2RpcmVjdG9yeQ==","ZmFz","IHByZW1pdW0=","IHRyYWNraW5n","IEJM","IG1hdHVyZQ==","IGJhdGhyb29t","ICcvJw==","IMSR","UGVyZm9ybWVk","IHNvbGRpZXJz","YXJuaW5ncw==","IHdhbGtlZA==","LWNvbg==","Ym90dG9t","IHN1cnByaXNpbmc=","IGdlbmU=","VXN1YXJpbw==","LkRFRkFVTFQ=","IE1JVA==","Q09ERQ==","IEVneXB0","cGlja2Vy","eXNxbA==","QVRVUkU=","ZGV0YWlscw==","IENvbmZlcmVuY2U=","SW5mb3JtYXRpb24=","IE1haWw=","LWRvd24=","cmFyaWVz","YnJv","IHN1YmplY3Rz","ICcq","6K+3","b3JpZW50","OkA=","dmVyYm9zZQ==","RUY=","IHRvbGVy","MzEz","ZW5nZXJz","IGVuZHBvaW50","IHN0cmFuZ2U=","IGNvbG9u","IHByZWZlcnJlZA==","ZGVw","IEVW","QVJSQVk=","IHdoZQ==","IHB1cA==","X25vZGVz","IHRhbGtlZA==","IGluc3RpdHV0aW9u","ZGJj","IGV4cG9zZWQ=","dGVlbg==","IEZyb250","VFQ=","X05PTkU=","XC9cLw==","cHJvZ3JhbQ==","IGVuY291cmFnZQ==","LmA=","c2hpcmU=","IElzbGFt","MzI1","ZWVu","Tkk=","JyI=","LldpZHRo","IGxpa2Vk","IHsuLi4=","IFN5c3RlbXM=","IHZvdHJl","IG1hbnVmYWN0dXJpbmc=","Q29udmVydGVy","IEluZg==","7Jo=","RFRP","IGluY2hlcw==","IOCk","w7k=","IENoYXJsZXM=","QlU=","IikpOwoK","IExhYm9y","dW5u","IGVzdGlt","bW9iaWxl","IExlYXJu","Mjgx","X0NBTEw=","4oQ=","IGluZGljZXM=","IHR1Yg==","Mjg4","aWtpcGVkaWE=","Q29zdA==","cm93YWJsZQ==","66E=","Z2FnZQ==","IGZ1bmN0aW9uYWxpdHk=","dXp6bGU=","ZW1vcw==","LmxpYg==","IGRhc3M=","0LXQug==","ZW5uYQ==","IHNob3Rz","IHJlc3RvcmU=","L0Q=","Rm9yS2V5","XSxb","YWxpYXM=","bGludA==","LnN0cmVhbQ==","5qA=","X0ZPUk1BVA==","IHNpbHZlcg==","LnJlcG9zaXRvcnk=","IGxlZ2lzbA==","LkJvcmRlcg==","X2ZlYXR1cmVz","UGVybWlzc2lvbg==","IGhvdXNlcw==","IFdhcnM=","X0NPTVA=","IGluanVyaWVz","IGNvbnN0YW50bHk=","Zmx1dHRlcg==","RU5V","IENvbmY=","IHJlY29nbml6ZWQ=","IHByYWN0aWNhbA==","IGRlY2VudA==","Qko=","XSk7","YXN0eQ==","IEFjdGl2aXR5","LW1vZGU=","IHNsaWRl","LklzTnVsbE9yRW1wdHk=","IFlPVQ==","UG93ZXI=","aW5kaWNlcw==","IHF1YWxpZmllZA==","IHRocm93bg==","aGVsbG8=","MzE2","IE5pY2s=","bGFo","YXNzZW1ibHk=","IFNtYWxs","b2xkaW5n","U2hvdWxk","IFNpbHZlcg==","KHNhdmVkSW5zdGFuY2VTdGF0ZQ==","IHRvZ2dsZQ==","Lk5vdA==","Q3RybA==","Om5pbA==","IENvbnRpbnVl","IEJvb3Q=","5ok=","IE11cg==","ZG9u","IEZB","U25hcHNob3Q=","IGFzc29jaWF0aW9u","Zm94","LGE=","YXppb25l","XSkNCg==","Q1RZUEU=","IGZhZGU=","IERhcg==","Lm5hdmlnYXRpb24=","IGx1Y2s=","U0NSSQ==","IERlYWQ=","IHRlcm1pbmFs","X0xFTkdUSA==","IGVmZmljaWVuY3k=","IHVudw==","IG5hcnJvdw==","aW1lbnRv","KENvbG9y","IFNlYQ==","X2FyZWE=","LEE=","X29wdA==","IEhpbGxhcnk=","LnRhc2s=","IEphYw==","YXN0ZWQ=","IEFkYW0=","IElsbGVnYWw=","IHNlYXJjaGluZw==","SW5zdGFuY2VPZg==","SmF2YQ==","IEZvcm1hdA==","IHJlYWxpemVk","IENoaWxkcmVu","IGtpbA==","KGZyYW1l","4oCdLgoK","IHNjZW5hcmlv","Il0pOwo=","IGluY3JlZGlibGU=","bGl4","SU9FeGNlcHRpb24=","IFF1ZXN0","aWx0eQ==","IHVubG9jaw==","4oKs","IHJlZmVyZW5jZXM=","IFZlcnQ=","QmluZGluZw==","ZWdhdGl2ZQ==","IHdyYXA=","LmRhdGFiYXNl","KGNvbnRlbnQ=","QnVm","IFRyYWQ=","IEF1ZA==","dHJhY2U=","Lm1vY2s=","IHRoZXJhcHk=","CUw=","LlRvSW50","IEtpbmdkb20=","QnVz","aGF1c3Q=","IiIiCgo=","KGVuZA==","LmRyYXdhYmxl","W107Cg==","IEhvc3BpdGFs","IHBoYXJt","LS0tLS0=","IEFH","w6lk","PiIpOwo=","IHdhbGxldA==","YXRhYmxl","KSQ=","IG1vbnRobHk=","IGRpYWdub3N0aWM=","U3ltYm9s","IGl0ZXJhdG9y","dW5maW5pc2hlZA==","IGltbWlncmF0aW9u","c3I=","Uk9X","KGdhbWU=","IGNsb3RoZXM=","IFVudA==","IGFjdGl2YXRpb24=","X0Nvbg==","Mjcz","Lmhhc2g=","IGluaXRpYWxseQ==","Lkhhc2g=","IGN1dHM=","Zm91bmQ=","IFN0b3J5","0YbQuA==","YWNhbw==","X1RZUA==","cHJvdG8=","ZXN0cg==","LXBhZ2U=","YWhy","IGluY29ycmVjdA==","IEpvc2VwaA==","VGV4dEJveENvbHVtbg==","X3N0eWxl","IERhbmllbA==","c2hlZXQ=","IGxpdg==","bGluZWQ=","IHJh","UnVudGltZQ==","X2VtcHR5","c2x1Zw==","X3N0cnVjdA==","64o=","bXU=","IHBlcm1pdHRlZA==","IHJlZ2lvbmFs","IHNvYnJl","IFN1Y2g=","IFtf","IHJvb2Y=","LkFsaWdubWVudA==","dGltZXM=","Lm1zZw==","IGNoZXN0","IFRhYg==","IGVzdGE=","w6Ru","IHN1YnNjcmlwdGlvbg==","KGNvbW1hbmQ=","c3BlY2lhbA==","IG1lYWw=","Iik6Cg==","X2N0eA==","IGNsb3NlbHk=","MzA5","ZXRyeQ==","LWJl","YWRlbA==","IFJhbQ==","aWdlc3Q=","IFNwYW5pc2g=","IGNvbW1pdG1lbnQ=","IHdha2U=","Kj4o","UEhQ","X3s=","Y2tlcg==","PExpc3Q=","X251bGw=","Mzkw","IFJlc2VydmVk","IGluaGVy","LkNvbHVtbnM=","LkFzcE5ldA==","X0lOVkFMSUQ=","IFBhcmFtZXRlcg==","IGV4cHI=","fXs=","Q2VsbFN0eWxl","IHZhbHVhYmxl","IGZ1bm55","SW52","IHN0YWJsZQ==","KnQ=","IHBpbGw=","Mjk5","cGxpZXJz","IENTUw==","IENvbmRpdGlvbg==","IFNwZWVk","dWJsaXNoZXI=","MjU5","IG9mZmVuc2l2ZQ==","Y2VzdA==","aWNhcw==","IHNwYXJr","IFByb3Rl","c2V0dXA=","SUZZ","IFRheA==","V2hv","RmFtaWx5","LWZvcg==","LnVr","IGZhc2M=","c3Zn","IikpLg==","IGJpcnRoZGF5","4paI","dmVo","ZWxsZWQ=","IGltcG9ydHM=","IElzbGFtaWM=","VEE=","IFN0YW4=","d2VhdGhlcg==","IHN1c3BlY3Q=","ZWF0dXJl","ZW5uZXM=","V00=","Lm1pbmVjcmFmdA==","YXZpZA==","6L0=","LnNlY3VyaXR5","aW5vcw==","R29vZA==","IG1hcmNo","NjU1","MjU3","IHBvc3Nlc3M=","dXN1YXJpbw==","Q29ucw==","YW1iZXI=","Y2hlZHVsZXI=","IGhvcnNl","570=","KGJvZHk=","IFRyYW5zZm9ybQ==","X2RlY29kZQ==","LnN2Zw==","IGZvbw==","IGRlbGxh","ZXh0ZW5kcw==","YW1lcg==","IHByb2Nlc3NlZA==","IEhhcnI=","IEFJ","IGtv","Q0hBUg==","KCU=","IHRhcA==","KHsn","Y3JvbGw=","RE9N","IHRlYQ==","IHJlaW4=","MjYx","IHdvcmxkd2lkZQ==","X2Zu","c2hh","IGJpcg==","w6fDtWVz","PSIjIj4=","IHJlcHJlc2VudGVk","aWxsZXI=","KGV4cGVjdGVk","IGRhbmNl","IHZpc2l0b3Jz","LmNvbmNhdA==","LWJpdA==","VVJSRQ==","IFJvZw==","dnA=","aXBo","IExMQw==","aXRsZWQ=","aWFtaQ==","Q29sbA==","X3JlYWw=","X3Nob3c=","X2ZvbGRlcg==","IGRhcg==","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg","IGxhdHRlcg==","YXJjaHk=","IGJvdw==","IG91dGNvbWU=","NTEw","IFBvc3RlZA==","IHJpc2tz","IFRoZXJlZm9yZQ==","IG93bmVyc2hpcA==","IHBhcmFsbGVs","IHBlbmRpbmc=","Z2VvbWV0cnk=","IHJlY29nbml6ZQ==","U1RFTQ==","IENQ","IGltbWlncg==","SVRMRQ==","ICAgIAkJ","Y29ubmVjdGVk","IHNtaWxl","KGRvY3VtZW50","XENvbXBvbmVudA==","dmVydGljYWw=","IGNvbnN1bXB0aW9u","IHNob2Vz","LmltcGw=","dW5rcw==","LiI7Cg==","IGZvb2Rz","Xyk7Cg==","LmFzc2VydFRydWU=","IHBpcGVsaW5l","IGNvbGxlY3Rpb25z","IGVhcm5lZA==","IENlcnQ=","IHBhcnRuZXJzaGlw","KGFjdGlvbg==","MjYz","IGNk","IFZlcnk=","T3B0aW9uYWw=","IHNjcmVlbnM=","IHRpdGxlcw==","ZW5lcmF0b3I=","IGFiYW5kb24=","a2luZA==","SUxURVI=","IGNsb3Npbmc=","bGljYQ==","X2ludGVy","IGNhbXB1cw==","c2V0dGluZw==","U3ByaXRl","44Gv","X3JlcGx5","VG9MaXN0","OlwvXC8=","ZWRl","IGZvbGtz","IGJvYXQ=","KGFyZ3Y=","IHBlcm1hbmVudA==","IGNhcnJ5aW5n","IGNvbnNlcnZhdGl2ZQ==","aW1wb3J0YW50","LmltZw==","IEltbQ==","IGRpbWVuc2lvbnM=","YWxhbmQ=","c2luZ2xl","RXhpdA==","LS0tLS0tLS0tLQ==","YXJpYW50","dGVybmFs","U2Vjb25kcw==","IEl0YWx5","b3RsaW4=","LlJlc3VtZQ==","PSci","KT09","Y2VwdG9y","IHNjYQ==","L21haW4=","U2VjdXJpdHk=","X2RhdA==","IGxldHM=","IGFxdQ==","IHdoZW5ldmVy","YmVycnk=","IGFjdGluZw==","YW50aQ==","cGQ=","Jmd0","5q0=","Wm9uZQ==","VG9kYXk=","IS4=","MzIz","VG9Qcm9wcw==","YWJpcw==","aXRhYmxl","IGdhbA==","XXs=","aXpvbmE=","IGluY29udHJp","TkVU","Ly8vCg==","W2lu","X3NhdmU=","IGV4ZW0=","IEtlbm4=","IGV2b2x1dGlvbg==","Mjcy","dmFycw==","X3N0YXRz","LW9ubHk=","IENvbG9yYWRv","IHdhdGNoZWQ=","Ym91cg==","IHNldmVyZQ==","IHByb2Zlc3Npb25hbHM=","cG9ydGlvbg==","IGd1YXJhbnRl","0LM=","IHB1c2hlZA==","IEdp","770=","IHR1bQ==","IEF6","IEVkZ2VJbnNldHM=","IikpOw0K","aXNzZQ==","LmFj","U2V0dGluZw==","IGFwcHJlY2lhdGU=","IFZhbHVlRXJyb3I=","IHN1cnZl","IFJvbGU=","LkludGVy","cGxvdGxpYg==","amV0","ZGFt","IHBsYXRmb3Jtcw==","dGVsZQ==","VVRP","IEludGVybmFs","Kzo=","fTsNCg==","R2VuZXJhbA==","XEVudGl0eQ==","IGxhd3llcg==","cXVpdg==","IFBvc3Rz","aXNv","IGFjY3Vt","b2Jl","IG1hcmtz","IF07Cgo=","CXRleHQ=","LnN1Y2Nlc3M=","Y3Vycg==","YXNh","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA=","IHRoaW4=","X292ZXI=","MDE2","YXJlc3Q=","IE9z","KGFkZHJlc3M=","IHZlbG9jaXR5","IFtdOwoK","PSIuLi8uLi8=","IFByaXY=","Ym93","IGd1YXJhbnRlZQ==","JQoK","MzIy","IGV2YWx1YXRl","LkxFTkdUSA==","IGludmVudG9yeQ==","cWE=","X2RlYnVn","Lk9uQ2xpY2tMaXN0ZW5lcg==","IGxpZXM=","IGFzc2Vzc21lbnQ=","ZGF0ZXRpbWU=","LmJhY2tncm91bmRDb2xvcg==","ICovDQoNCg==","cmFm","dW53cmFw","IEZvb3Q=","IG5vdGlmeQ==","IGxvd2VzdA==","RE9DVFlQRQ==","IGxhbmd1YWdlcw==","ZXh0cmE=","LWJhY2s=","IGVpbmVu","dGVtcGxhdGVz","Mjcx","X3Bhc3M=","NTIw","Nzc3","IE11c3Q=","IGVzdMOh","X2NvcmU=","IFNjb3Q=","QUk=","IGJpYXM=","YXRpb25zaGlw","Q29uc3RhbnQ=","IHByb2dyYW1taW5n","SW5z","dXNwZW5kTGF5b3V0","IFBST1ZJRA==","YW50ZXM=","IHNoaXJ0","aW5hdGVk","Lk9L","W2E=","IHRoaW5rcw==","PwoKCgo=","IHJlZ2FyZGxlc3M=","IE1hZ2lj","dWxhdGluZw==","CWNsYXNz","YWRkR3JvdXA=","UkVBVEU=","IFNV","IHNpbXBs","Y29weXJpZ2h0","IGJ1bmNo","IHVuaXZlcnNl","OTUw","IEVycg==","IHByZXNlbnRhdGlvbg==","Y2F0ZWdvcmllcw==","IGF0dGFjaA==","LnNpZ24=","X0FD","IGRpc2NpcGw=","IHJlZ3VsYXJseQ==","IHByaW1hcmlseQ==","aW5rcw==","W1s=","LnJhbmQ=","LnNob3VsZA==","b3dudG93bg==","PSIn","IHNhbnM=","IHN1cHBvcnRlcnM=","c2VxdWVuY2U=","R08=","Li4KCg==","IFNwcg==","IGNhcmVmdWxseQ==","VUlDb2xvcg==","ZGVzdHJveQ==","IHRvZG9z","IE9SREVS","b3R0ZWQ=","IGRvbnQ=","YXVkaQ==","X3BsYXllcg==","Z3Jl","NjI1","IE9pbA==","PGJvZHk=","X3N0YWNr","LlBhZGRpbmc=","IFByb2R1Y3Rz","IHByaXZpbGU=","MDE0","IGluanVyZWQ=","IEZ1cnRoZXI=","IGFsaWFz","LlJlc3VtZUxheW91dA==","X0xFTg==","IHNlcw==","J107Cgo=","Y3JlZW5z","IGRpcmVjdGVk","LlN1c3BlbmRMYXlvdXQ=","b2RnZQ==","LkF0","bWFya3M=","IFVuaXZlcnM=","ZXJ0cw==","IEVzYw==","IG5hdmJhcg==","IHV0aWxpdHk=","YWdub3N0aWNz","IGluamVjdA==","IEROQQ==","ICIsIg==","YW1hcg==","IGV1","IHJlc3RhdXJhbnRz","X3B1dA==","dXRlcnM=","VG9vbFN0cmlw","dHc=","aXN0cm8=","IHpvb20=","IGxlZ2l0","cGVjaWZpYw==","Mjg1","IENvbWU=","IGxvY2FsU3RvcmFnZQ==","IGFic29y","LlBhbmVs","IERlc2lnbmVy","IG93","SUNBTA==","X3VyaQ==","KGZpZWxk","IHN1cGVydg==","RXhpc3Rz","IHJlc3BlY3RpdmVseQ==","IFN0YW5k","Q29uZg==","dXNzaWFu","MzY0","IGFyYw==","IG5k","dWNrcw==","IHJlc3Ry","IHNlYXNvbnM=","IENoYXB0ZXI=","IFN3aXRjaA==","cGlj","IGhp","bG9hZGVk","IGZsdWlk","LWJ0bg==","IHJ1bnRpbWU=","Lml0","MjU4","Qk4=","T3BhY2l0eQ==","YXNhbnQ=","cnlwdGlvbg==","LW5hdGl2ZQ==","IHRhdWdodA==","5a8=","YWdtZW50","IG11bA==","UmVnaXN0cnk=","X2dyaWQ=","IEJyb29r","OlNldA==","IG1vbmdvb3Nl","QU1FUw==","aW5uZXJIVE1M","IHNvY2k=","IEludGVs","Z2V0SWQ=","Q21k","IGFjY2Vzc2libGU=","cmFtZXM=","bGV0b24=","IF9fKA==","CWRlbGV0ZQ==","IFNxdWFyZQ==","IgoKCg==","IGJ1Y2tldA==","YXZvcml0ZQ==","IEJyZWFr","Kytd","IGJydXNo","MjY2","IHRlbnNvcg==","L2h0dHA=","VGlsZQ==","IGZ1bmN0aW9uYWw=","ICIq","d2hlbA==","IHRlbnQ=","IENoYXJhY3Rlcg==","IHNlZXM=","LlNU","Qmln","IGV4dGVybg==","VXJscw==","KSkpKSw=","IEpy","LkJ1aWxkZXI=","Ljs=","bmw=","X0luaXQ=","IEhFUg==","xbxl","bXlzcWxp","X2ljb24=","dmFu","IGZlZWxpbmdz","IGxlYW4=","IGhvcGluZw==","VFY=","PSI8Pz0=","IGN1cnZl","X3N0ZA==","X0xJTkU=","ZHN0","IG1vcmFs","ZW1lcw==","b2d5","IHVyYmFu","MDE1","IGFzaWRl","IGVkaXRpbmc=","QURE","U2Vjb25k","VHJhY2s=","IHZvdGluZw==","IGhvbm9y","Lics","ZWxsZW4=","Q2hhdA==","IGltcHJvdmVtZW50","J10KCg==","oIE=","IHBhcnNlZA==","ICAgICAgICAgCg==","IGxhenk=","IGZhbGxpbmc=","U2VyaWFsaXpl","IFBh","X2dy","IGZvcmV2ZXI=","LndoaXRl","LlF1ZXJ5","QmVk","IER1","IHJlc3VtZQ==","IHBhcGVycw==","IEluaXQ=","IHN1ZmZlcmluZw==","4oCL","IGRlY2xhcmF0aW9ucw==","KCkt","IGV4ZWN1dGVk","IEhvbA==","LmJsb2Nr","44Oz","U0s=","IHN0dWNr","IExvY2s=","aW5jaXBhbA==","TnVsbGFibGU=","IHNlc3Npb25z","dW5p","IGNvdXA=","YXBwcm8=","Z2hhbg==","X3Bvb2w=","Mjgz","CWlk","IHNsb3Rz","IG1lZGljaW5l","IGdsYWQ=","IE1vbm9CZWhhdmlvdXI=","YXRyZQ==","ICQoJw==","bWVyaWNhbg==","YWdn","IGthbm4=","X2Nvbm5lY3Q=","IGJyYW5kcw==","IHNrZQ==","IGRpZ2l0","PG4=","IGJhY2t1cA==","IHBlcnNvbmFsbHk=","LlByb3BlcnR5","MzE0","LmNvbW1pdA==","IGNyeQ==","X2NvdW50ZXI=","IG1hbGxvYw==","IGdyYW4=","IERyb3A=","cGxhdGZvcm0=","cmVkZW50aWFscw==","aW5raW5n","IFVJTA==","dWJz","IG1s","bGVzc2x5","R2VuZXJhdGVk","ZXJlb3R5cGU=","IGJhdA==","TGF5b3V0UGFuZWw=","TE9U","Iik7DQoNCg==","IG11c2NsZQ==","IGNlcnRpZmljYXRl","QU5ETEU=","IGhhcmRlcg==","IHBpeGVscw==","KSIsCg==","LkhlYWRlcg==","IGRldmVsb3Blcg==","IExhcw==","ZWdhbg==","Ljw=","IGV4cGxvZGU=","IHBhcnRpY2lwYXRl","UGF0dGVybg==","KHRhYmxl","IFRFWFQ=","Y29uc3RhbnRz","eEQ=","dGhldw==","fSwKCg==","44Gu","X2Rlcw==","IHN1YnN0cg==","IFNtYXJ0","IHNjYWxh","Z2VudA==","LWJhcg==","ZXNzaW9uYWw=","dW1icw==","LmV4ZWM=","J1w=","VEs=","dW5pc3Q=","cHJvb2Y=","Y2lhbA==","cHJvYw==","PXsi","LmhyZWY=","PSQo","IGx1bmNo","aXNjYWw=","IEVudHJ5","IG91dGRvb3I=","c2VtYmxl","IGVzc2VudGlhbGx5","L0c=","W10p","JSI=","c3Rlbg==","VVNFRA==","IGR1c3Q=","5bA=","CQoK","IHJldGlyZQ==","IGZpYg==","QWx0aG91Z2g=","IGxvdmVz","IHJlYWRz","eWNsZXM=","IEhlbA==","X3VpbnQ=","ICcuJA==","X2luaXRpYWw=","TmFtZWQ=","IGZ1bmRhbWVudGFs","QURJTkc=","IHRvdw==","IEFERA==","IEFjYWRlbXk=","MDUw","OlN0cmluZw==","IGNvbXByZWhlbnNpdmU=","LnNjYWw=","IE1ldGE=","TWVzc2FnZXM=","LmFubm90YXRpb25z","XFJlc3BvbnNl","IGFja25vd2xlZA==","IEFSRQ==","XT09","IGNsZWFuaW5n","6L4=","RW50aXRpZXM=","IFNhbGVz","IFdpcw==","LmV4dGVuZA==","YWxsZW5nZQ==","IGdhbWluZw==","JHF1ZXJ5","SUNFUw==","RVRDSA==","SG9yaXpvbnRhbA==","cXVlbnRpYWw=","ODUw","QkFDSw==","ZGV2ZWxvcA==","aXNvcg==","KGNvZGU=","LUs=","X1BJTg==","cmVxdWVuY3k=","IFF1ZXN0aW9u","X2NvbnRhaW5lcg==","X21vZHVsZXM=","IEplcnNleQ==","X2RpZmY=","LmVs","ICooKA==","Y250","IFNh","Q1BQ","aW5pdGU=","IHVudXM=","LXdoaXRl","ZXRhcnk=","IGludm9sdmluZw==","ID8+DQo=","YmVzdA==","YWxsYXM=","ZW50ZWQ=","ICAgICAgICAgICAgICAgICAgICAgICAgCg==","X2Nvbm5lY3Rpb24=","IHJlcG8=","ZW5hYmxlZA==","0LDQug==","IHNoYQ==","IG1lbWJlcnNoaXA=","U3RhdHVzQ29kZQ==","aW5hdGluZw==","X3Nt","X2N1c3RvbQ==","X3dlaWdodA==","IGNzcw==","U3RhdA==","X2Vudg==","bGlua3M=","VFJM","IEhpdA==","LHI=","dXBpZA==","IG9wZW5z","IGdlbnQ=","X3Zpcw==","IGpveQ==","PHc=","X2Nvc3Q=","IFB5T2JqZWN0","cmVuY2U=","IEdlb3JnaWE=","IEJyb2Fk","bW1h","4oI=","cGY=","ICJcIg==","ICgm","b21v","IGxpdGVyYWxseQ==","iJg=","bWV0cmlj","IGJhcnM=","emVk","KHdpbmRvdw==","IElzcmFlbGk=","IGZvcm1hbA==","aWRlbnRpZmllcg==","LmRhbw==","IERlYXRo","JTsK","IGRlY2xhcmU=","YXJtcw==","UkVBTQ==","UEVSVFk=","IGNvbnNlcXVlbmNlcw==","dG9vbHM=","UGVvcGxl","IFdoaWNo","PigpOw0K","LmRlY29kZQ==","X0FDVA==","QnV0dG9ucw==","LmZsb2F0","LkZpcnN0","66U=","IFBvbGl0","IFhDVA==","VGFncw==","IENHRmxvYXQ=","PXN0cg==","IGxlYWY=","LWNoZWNr","IElzcw==","LnN5c3RlbQ==","bG9nb3V0","YWNodA==","QW5nbGU=","c2lu","Y2hhcnQ=","SU5URVI=","IE5VTQ==","QmFzaWM=","LlByb3BlcnRpZXM=","5Lit","X2NoYW5nZQ==","IEJyYXppbA==","QWJzdHJhY3Q=","IDorOg==","X3VzZQ==","0LDQuw==","MjY4","IEx5","SUJVVA==","IG91dGVy","IC0tPg0K","IHJlbGllZg==","bGFw","cXVlcg==","X3BhcmVudA==","aGVhcA==","TE9TRQ==","IGNvbWJpbmU=","IFJvc2U=","b3dlcnM=","IHByb2NlZHVyZXM=","IFNvcnQ=","YW5pbQ==","dmFyaWFudA==","ZWhpY2xl","IHNpZ25pbmc=","UHJpbWFyeQ==","Y3VycmVuY3k=","IHNleGU=","b2Vu","dGhldGE=","ZW1hbg==","IGltcHJlc3NpdmU=","KCdf","CVU=","IFRleHRTdHlsZQ==","X2NudA==","IHNsaWNl","KCc6","IHVuZGVyc3Rvb2Q=","SGlz","Mjc3","MDEz","IGluZm9ybWVk","IG5pY2s=","NDI5","KFRBRw==","aGQ=","IGVsZWN0aW9ucw==","ZXN0dXJl","IFNhbnRh","IENvYXN0","LnBkZg==","aW5jaXBsZQ==","LmNsb25l","Ym9ybg==","dXRh","IGxpY2Vuc2Vk","Q3I=","IGJyZWFk","IEhvdXN0b24=","IG5vZA==","IGhvcGVz","IENHUmVjdA==","IGd1aWx0eQ==","LmdpZg==","IHJvc2U=","LkNvbW1vbg==","VGlw","QU5L","IEZD","RHVyaW5n","IFN5bWZvbnk=","IGRlZmVuc2l2ZQ==","a20=","KT4=","YXJjaGl2ZQ==","IFVSSQ==","eWNsaW5n","LW8=","IFdlYnNpdGU=","QU1Q","NDA1","aXNobWVudA==","IGRvY3RvcnM=","RGlyZWN0","QVJJ","IFJlZGlyZWN0","aWVyZW4=","OTYw","X2Rpc3Q=","eW8=","IFByb2dyZXNz","IHp1bQ==","IG1lbW9y","IEVE","IGp1cg==","5o2u","X1RBQkxF","IHV1aWQ=","RXhwcg==","LmhlYWQ=","KCcl","cG9pbnRlcg==","IGVzdGltYXRl","IEdyZWc=","IGxvYWRlcg==","IGlPUw==","IG1lbnM=","W3k=","IHJlZnVzZWQ=","IHByZWNpc2lvbg==","aXNjaA==","IEFDVElPTg==","Q2xvdWQ=","c1dpdGg=","KHJldA==","Mjky","X0FERFI=","X2NvbmY=","KGRm","IGxvY2tlZA==","IHJpc2luZw==","44O744O7","IE1z","IHNjZW5lcw==","X0VYVA==","X3Jhdw==","X3RoZQ==","cGVvcGxl","IHJlY29u","IEZ1bg==","IGJsZXNz","IFVwZGF0ZWQ=","NDIy","w7xu","ICAgICAgICAgICAgDQo=","cGVjdGlvbg==","UmVsZWFzZQ==","LmxvZ2dlcg==","IFNZ","IGNvdW5zZWw=","dXJk","X3RydWU=","IGV2ZXJ5Ym9keQ==","aXZvdA==","IGhlbmNl","IE5BUw==","Nzg5","IG9wcG9zZWQ=","dW5rbm93bg==","IERFU0M=","IENoYWly","ZmFpbGVk","IElOQ0xVRElORw==","Mzg2","MzUy","IHdyaXRlcnM=","e30K","w610","X2NvcHk=","fTo=","IEJhdA==","IGNvbnZlcnRlZA==","ZWRpbmc=","cGxhY2VtZW50","IEhvc3Q=","U291bmQ=","0LjQvA==","IHNvdWdodA==","NDAy","bWlk","IHNhbGFyeQ==","b2dn","4oSi","YnVs","IHdpcg==","dmFsaWRhdG9y","X1NUQVQ=","LnN0b3Jl","IEJhdHRsZQ==","xLFu","IC0tPgoK","VHJ1bXA=","ZG90","IENPTlQ=","LmZldGNo","IGNvbnRpbnU=","d2Fz","IGZyYXVk","X3RtcA==","bWl0dGVy","LnBpY3R1cmVCb3g=","R0E=","IHRvdXJuYW1lbnQ=","LklucHV0","MzQz","W3I=","ZXhpb24=","Y2VudGFnZQ==","IEtvcmVhbg==","dW5kZWY=","IEF2YWlsYWJsZQ==","cmVzaGFwZQ==","IGtpdA==","IFN0cnVjdA==","IFNVQg==","QW5zd2Vy","X2xpYg==","LnR3aXR0ZXI=","IG9yZQ==","IERyYWdvbg==","LkV4dA==","LGs=","IGV4cGxhbmF0aW9u","cmVmcw==","IERyaXZl","IFRyYWluaW5n","Mjgy","Lkhhcw==","MzQx","aW50YWdl","Ymln","b2xvZ2lzdA==","ZW5uaXM=","NDYw","2Yc=","IGNoaWNrZW4=","ICAgICAgICAgIAo=","55s=","44Gn","IHBlYWs=","IGRyaW5raW5n","IGVuY29kZQ==","IE5FVw==","bWFsbG9j","CWZwcmludGY=","ID09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09","aW5jbHVkaW5n","IHByaW5jaXBsZXM=","IE1haA==","MjY3","c3RvcmFnZQ==","LWtleQ==","IGtleXdvcmQ=","JTs=","IHRyYWluZWQ=","LmNvbnRyaWI=","IGt2","X18nOgo=","IEJveQ==","cGFyYW1ldGVy","IHN1aXRl","IHRob3VzYW5k","IGNvb3JkaW5hdGU=","LWdlbmVyYXRlZA==","7ZWY","Z2VuZXJhdGVk","IGFkbWl0dGVk","IHB1c3N5","I3c=","IHN3aW0=","dW5pb24=","TmE=","Mjc0","IFJveWFs","LmNoYW5uZWw=","VXBkYXRlZA==","X1JPT1Q=","IHZpdGFs","MzM1","cmFjdGlvbg==","IENydXNoZXI=","IHByZWNlZA==","IGhvcml6b250YWw=","Qmx1ZXByaW50","IGF0dHJz","IHNtb2tl","0JI=","LkVxdWFscw==","RkI=","IFJlc291cmNlcw==","cm9sbGluZw==","IHBhc3Nlcw==","IE51bQ==","cm90YXRl","ZXR5cGU=","XCIs","IHNlbnNpdGl2ZQ==","IHRhbGw=","P+KAnQoK","UHJveHk=","aXk=","X3NlY3Rpb24=","4oCU4oCU4oCU4oCU","YnJpZA==","IGNpcmN1aXQ=","YXRhbg==","RU5D","IGRyaXZlbg==","IHZvdGVk","IGVkdWNhdGlvbmFs","IGludGVyYWN0aW9u","YWJldGVz","IHRvbmU=","IEluaXRpYWxpemVDb21wb25lbnQ=","IG1lcmVseQ==","IOye","Y29va2ll","X2Rpdg==","IFVJTGFiZWw=","dmVseQ==","fSk7DQo=","X0VOVA==","IysjKw==","YXJ0aWNsZXM=","IFNvdXRoZXJu","IHN0cm9uZ2Vy","IEdpdmVu","IEVyaWM=","IElS","YWJzdHJhY3Q=","VW5kZXI=","bmFibGU=","IGluY3JlbWVudA==","b3Zlbg==","IGNvaW4=","X3RpbWVy","IHN1ZmZlcmVk","IEZSRUU=","J10uIg==","IFF1ZWVu","c3RhdHM=","IG1lZXRpbmdz","Mjc2","IGVudGVyaW5n","IGFsb25nc2lkZQ==","KHNlc3Npb24=","aXRhbHM=","IGZvdW5kYXRpb24=","IENyZWRpdA==","LmRpdg==","X0FMTA==","cGNpb24=","X3N0YXQ=","aWNraW5n","RGVmYXVsdHM=","X3NyYw==","IG91dHB1dHM=","L0I=","IGVudGh1cw==","LWJs","LkZvcmVDb2xvcg==","CXRlbXA=","RmFjZQ==","IGludGVyYWN0","IHdlaXJk","TW91bnQ=","cmVsbA==","dWRlbnRz","IHJlcXVpcmVtZW50","IFN1cw==","SUVS","IGVsZWN0ZWQ=","cmVmZXJlbmNl","IE1F","IHNlcnZlcnM=","LndhaXQ=","IHNuYXBzaG90","aWx0b24=","IHRyaWVz","IHRpcG8=","LlRpbWU=","Pnc=","IG1vdW50YWlu","IHBvdW5kcw==","IFsuLi4=","ZXhpc3Rz","IG5nT24=","X01BUA==","IGZseWluZw==","MzMx","eGlldHk=","CXZhbHVl","X0RC","dW5v","IHNlYXRz","VFVSTg==","LmF1dGhvcg==","ISk=","b3JjZQ==","IGluZGljYXRlZA==","MzE3","LnNpbg==","IGFzc2lnbm1lbnQ=","aW1pZW50bw==","IEZyYW1l","MzI0","X2dlbg==","aW5lcnk=","Xyk=","bWVzc2FnZXM=","LnNldHRpbmdz","IE1lYW4=","IE11c2V1bQ==","aXJx","YXR0YWNo","IFBhbGVzdGlu","X1FV","X3RhZ3M=","IGNhc3VhbA==","ZW1lbg==","QVNTV09SRA==","NDMy","JHM=","IENpcmM=","0L7QuQ==","ZXRyaWM=","L1A=","MDE4","IGVwb2No","PGhlYWQ=","X0NNRA==","IGdpdA==","IHBlbmFsdHk=","b3JwaA==","X3VzZXJz","b3Vyc2Vz","LkRhdGVUaW1l","YXRlcm5pb24=","X3Byb2plY3Q=","IHN1cGVyaW9y","IERhbQ==","IFNlYXR0bGU=","WFk=","PlRoZQ==","IEFr","IGdyYXNz","LyoNCg==","KGRpcw==","IGd1bnM=","IHRi","IEtldmlu","LmFyZ3M=","IEFo","b3BlZA==","KEo=","Y29sdW1ucw==","YXJndW1lbnRz","IFdpdGhFdmVudHM=","X2Z1bGw=","IERlZmVuc2U=","U2ltcGxl","IGRlYXRocw==","Mjk1","IGV4dGVuc2l2ZQ==","IFN0aWxs","IEV4cHJlc3Npb24=","IEFnZW5jeQ==","IHBlcmZvcm1pbmc=","Rlg=","IHVzdWFyaW8=","VUFM","U2lkZQ==","b2Rvcw==","YXB0b3A=","IGNyZWRlbnRpYWxz","X2NhcA==","YXRpZW50","IERpc25leQ==","IGFp","IGNoaXA=","IHZvbHQ=","Lm1ha2VUZXh0","JSUlJSUlJSUlJSUlJSUlJQ==","IGJlbGllZg==","X0xPQw==","IENpdmls","TmF2aWdhdGlvbg==","IHJldmVhbA==","IHZpb2xlbnQ=","IEZpbA==","IGNhdGFsb2c=","ZW1lZA==","c2Nhbg==","LmNvbnRyb2w=","IGNvbnN0aXR1dGlvbg==","Q291bnRyeQ==","U2VwYXJhdG9y","X0FQUA==","dG9waWM=","dWV0b290aA==","TUlO","IGRlc2NyaXB0b3I=","eXQ=","RVRIRVI=","IGRpc3RyaWJ1dGU=","J30K","LnRyaW0=","LkxpbmU=","IGxibA==","YXNzZXJ0RXF1YWxz","IERldA==","b21ib2s=","KHdpZHRo","IHRvcnQ=","IEVYUFJFU1M=","YWNv","VXNpbmc=","IEJyYW5k","d2FsbA==","RU1FTlQ=","IENvbW11bmlj","PHVpbnQ=","IEdVSQ==","RUdJTg==","IFJhbmdl","L2k=","IFRheWxvcg==","Y29zdA==","IHJlc3BvbmRlZA==","IFRoZW1l","bmNl","SVNI","IGZlYXR1cmluZw==","UmV0dXJucw==","IEty","IC4K","IG5hbQ==","X2Ni","VGVzdGluZw==","IHt9LA==","eWFs","LmZpZWxk","IC89","X1NIT1JU","bWF0ZXM=","VGVzdENhc2U=","YWlubGVzcw==","IGV2YWx1YXRpb24=","X0lURU0=","IFBhY2lmaWM=","CWs=","IGNhbnQ=","IFJvcw==","KXM=","IGZldA==","U1RSSU5H","MzE5","IERpc3Bvc2U=","Z2Fs","IEpvaW4=","IFBvcm4=","IENhdGhvbGlj","QVJHRVQ=","Y3B1","56CB","LnNjcm9sbA==","MzI4","SVNJTkc=","aWZlc3R5bGU=","YW5jZW1lbnQ=","IG1lcmM=","IEJyb3dzZXI=","ZXRlcm1pbg==","IG92ZXJmbG93","QXZhaWxhYmxl","IGJvdHRsZQ==","OlVJ","aWZpY2lhbA==","IGNvb3Jk","Y2xhcmF0aW9u","IGNvbmo=","R0xPQkFM","b2t1","IGt3YXJncw==","Y29uZGl0aW9ucw==","dWx1bQ==","IGdlbnU=","IEhlcm8=","5Y4=","IHVuZXhwZWN0ZWQ=","IERBTUFHRVM=","IGth","IENvdWxk","VVBQT1JU","IFBob3Rvcw==","IGNvbmZpZGVudA==","IGRldGVjdGVk","ZGVn","cmdi","IHN0cm9uZ2x5","IH07DQo=","ICk6","IGxlY3Q=","dXJzaXZl","Uk9M","IFdlaWdodA==","IGVudGVydGFpbm1lbnQ=","ICkpOwo=","IGdvbm5h","IGJi","LmRv","R1M=","IG1pc3Rha2U=","REw=","IFBST1ZJREVE","ZWFybmluZw==","TGltaXQ=","aXNzaW9ucw==","W3Y=","5LiN","aXJ0eQ==","RGVs","IHVuZGVybHlpbmc=","cHJlbmU=","IGphdw==","IERJ","cGVlcg==","IG9iamVjdGl2ZQ==","IGRlcG9zaXQ=","IGtvbg==","IGVzcA==","Mjc4","LnNldFZpc2liaWxpdHk=","L2xvZ2lu","PHR5cGVuYW1l","IGZyYW5jaA==","L2U=","MjY5","UGFyYWxsZWw=","IHNjb3JlZA==","IEhvbg==","IFZpbGw=","aWdh","IGFudGljaXA=","X2Fzc2VydA==","IE9wdA==","IGRlc2NyaWJlcw==","d2Fu","bW91bnQ=","IG1vbml0b3Jpbmc=","IHRvdXQ=","64qU","fSx7","Li4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4=","PWludA==","IGN1c3Q=","LS0tLS0t","IGF0bW9zcGhlcmU=","UEFS","b3J0ZQ==","SVNJQkxF","IElyb24=","IE5vdGlmaWNhdGlvbg==","LmxvZ2dpbmc=","IEJPT0w=","LXBvaW50","IGFmcmFpZA==","ZW50YQ==","IHRvbW9ycm93","QGltcGxlbWVudGF0aW9u","IGVuZ2FnZQ==","IEFudGg=","IEZsb29y","IFVs","VG9vbHM=","IGJhYg==","IGNhcmVmdWw=","44GE","IGNydWNpYWw=","IGNhbGN1bGF0ZWQ=","IFNB","IHd5","OTEx","RFg=","X1RBRw==","aW5kZWQ=","IGpldA==","IEVuZ2luZWVyaW5n","Lk1BWA==","ZW56","dmQ=","IHB1YmxpY2F0aW9u","ICMjIw==","IGZhY2Vk","cmFoYW0=","IENhcHQ=","MzM2","QXNzZXQ=","IENvbnN0YW50cw==","IGxvYW5z","X0lQ","IEZpc2g=","UmVkdWM=","X21hdA==","RGF0ZUZvcm1hdA==","X21l","W11bXQ==","IGludGVncml0eQ==","IENvdXJzZQ==","bG9iYWxz","IGZhY2lsaXQ=","IGVtYnI=","IE5n","LlN5c3RlbQ==","IG1hbnVmYWN0dXJlcnM=","IHByb3Zlbg==","Lm9uQ3JlYXRl","IGFsYXJt","IMKn","IGNvbW1vbmx5","aWNvcw==","5paw","IFN0YXRpb24=","fSku","IEZpbG0=","d2k=","54k=","IGVuZ2FnZWQ=","U3RhdHM=","IGdvdmVybm1lbnRz","NTQw","IGFmZm9yZGFibGU=","X3Byb3BlcnR5","IGFnZXM=","KCctLQ==","IGbDtnI=","IFByb2Zlc3Nvcg==","IGh5ZHJv","UHVzaA==","IG9yZ2FuaXplZA==","Mjg0","QWNjZXB0","w6lt","X2NlbGw=","IG5i","cGI=","QXJ0aWNsZQ==","IHJlbW92YWw=","IGF1dGhlbnRpY2F0aW9u","IEZS","bGlkZQ==","IHBsZWFzdXJl","YXBvbA==","IHBhcnRpdGlvbg==","IFNpZGU=","IGNyaW1lcw==","IGRlbW8=","aG9sZGVycw==","IFBha2lzdGFu","SW5zdHJ1Y3Rpb24=","IGV4cGVjdGF0aW9ucw==","MzMy","LnNjZW5l","ICcp","aGVz","aW5vaXM=","X1Bybw==","IG1vbGVj","YW5kYWw=","X3Nob3J0","IGRlZmF1bHRz","IG5hdGlvbnM=","aW5lbg==","IHJ0","T0NL","UGFja2V0","U0I=","IFNIQUxM","X2NvbnRlbnRz","aXNlY29uZHM=","dmVydHk=","w6F0","R3VpZA==","bm9t","IGNvbmNsdXNpb24=","LlVwZGF0ZQ==","IGxvdmVseQ==","IGVtaXQ=","YmVj","CQkJCSA=","IGludGVsbGVjdA==","IGJyZXc=","ZWN5Y2xl","RmlyZQ==","MzU4","IGFkbWl0","IGFyYml0","IGFycmFuZw==","IE1JTg==","TWFpbA==","IE5hdGl2ZQ==","Q3Vy","IGNvbnZlbnQ=","LlJ1bnRpbWU=","In0K","LlJ1bg==","IHByaW50ZWQ=","IGNvbnZlbmllbnQ=","LmFy","bW9jaw==","IEFkbWluaXN0cmF0aW9u","44G+","IGVsZWN0cm9u","ZmxhdGU=","IGxvbWJvaw==","IGphdmFmeA==","bmg=","IHN1cHBsaWVz","IHZpc2l0aW5n","YWhs","IHBvd2Rlcg==","IHVsdGltYXRl","IG9yaWVudGF0aW9u","dXRhcw==","X3NjYWxl","Q29uZmlybQ==","cGhvbmVz","IE9wZXJhdGlvbg==","L1Q=","NDQz","X0lOVEVS","IGFpcnBvcnQ=","IG1ldHJpY3M=","IHBoZW5vbWVu","YXVkaW8=","MzM0","IG1haQ==","KEs=","aHU=","YWxsaW5n","cm9kdWN0aW9u","IFRyYW5zcG9ydA==","IE5PVEU=","5paH","IGZld2Vy","X1RJTQ==","7Kc=","0LrQuA==","QWdl","RklO","Mjk0","IOyd","IEF0dHJpYnV0ZQ==","Z3JvdXBz","ZXJr","YXR0bw==","LmRlZmluZQ==","LkFzcE5ldENvcmU=","YXRlZ29yaWE=","IFNpcg==","KGZvcm0=","PFVzZXI=","LnJvdW5k","X2RheQ==","LkFsbA==","U2VydmxldFJlc3BvbnNl","Lk5v","bGFyZ2U=","SUdI","cXVlbnQ=","IHZpcnVz","IHJldHJv","IGltcGVy","Qml0bWFw","IHZpY2U=","IG9mZmVuc2U=","aXN0ZQ==","IEFVVEg=","IOqw","VG9vbFN0cmlwTWVudUl0ZW0=","R3U=","IHJhcGU=","IERhdmlz","IG92ZXJ3aGVs","OmZsdXR0ZXI=","LXRhYmxl","IENvbnN0cnVjdG9y","UHJpdmF0ZQ==","ZXZlbg==","Y2hy","IGFwcGxpZXM=","X2F0dHJpYnV0ZQ==","IGNvbnRyaWJ1dGU=","RVZFUg==","Mjg5","TGluZXM=","IEFmZ2hhbg==","VmlzaXRvcg==","IFNM","c2Vhc29u","Q1U=","IGludHJvZHVjdGlvbg==","IG1hdHBsb3RsaWI=","xZE=","IG5ld3NwYXBlcg==","4oCUYW5k","PHRhZw==","IGluaQ==","IGRpdmVyc2U=","SWdub3JlQ2FzZQ==","MzUz","IFVy","QWdlbnQ=","IGJ1bGw=","LmVtaXQ=","KEV4Y2VwdGlvbg==","YXJMYXlvdXQ=","IGluY3JlZGlibHk=","IFRydXN0","PXso","LW5hdg==","IGVxdWFscw==","IGxhZHk=","IFBvZA==","ZGlzYw==","YWxhbQ==","IElW","4pk=","aXZpZHVhbA==","cGhp","MDE3","YWRkZWQ=","IGRpZmZpY3VsdHk=","IGNvbXBhY3Q=","NTMw","IEFjdGlvblJlc3VsdA==","Y2Vycw==","X2NsYXNzZXM=","Tm9uTnVsbA==","IHF1aXQ=","IHBvdQ==","U3dpdGNo","aXJz","LXRlc3Q=","IEtpbmQ=","IENhbGVuZGFy","NDA2","IHN0cmVhbWluZw==","fScs","Mjc5","U1c=","IHN0ZWFk","b2Nh","IHByb3ZpbmNl","OTc4","IGNvbHNwYW4=","IHBlcnNvbm5lbA==","IEVtcGxveWVl","IHByb2R1Y2Vy","IGV2ZXJ5d2hlcmU=","b2Ri","0J8=","YnNvbHV0ZQ==","YWN0aXZhdGU=","IGdyaW5kaW5n","IEJ1aWxkaW5n","IFNhbmRlcnM=","KHNj","IE9mZnNldA==","Ly8vLy8vLy8vLy8v","fTsNCg0K","KHsi","IHNjYW5m","IFlZ","CWRlZmVy","IGpldw==","IHJlc3RyaWN0aW9ucw==","Lm1w","W2w=","5LiL","bGFiZWxz","cmVkaWNhdGU=","YXdlc29tZQ==","IHdhdmVz","IGNvbmZyb250","IG1lYXN1cmVk","IGRhdGFz","X2V4aXQ=","MzU1","b3R0b24=","IHNob3VsZGVy","YXNrYQ==","KyM=","ICAgICAgICAKICAgICAgICAK","IHRyb29wcw==","Mjkz","IFVuZA==","X2NhcmQ=","d2ljaA==","IG5vdXM=","ICIvIg==","c2I=","IGNvbW11bmljYXRpb25z","RXhwb3J0","IGRlY29kZQ==","dGhz","aW50ZXJwcmV0","QnlOYW1l","IFNwaXJpdA==","ZWRnZXM=","T0xF","IEVN","dGl0","IFRocm91Z2g=","IGJpbw==","IFBhY2thZ2U=","b3JuZQ==","Mjkx","IH0u","NDEx","YDsK","IG9rYXk=","IFplYWxhbmQ=","aWRlbnRpdHk=","KG5leHQ=","IEJhbmc=","TGlicmFyeQ==","IGhlYXZpbHk=","aWxvbg==","IGRpcGw=","IHJvdGF0ZQ==","cHV0cw==","KScsCg==","IERhdGFUYWJsZQ==","IG1heW9y","LnRvTG93ZXJDYXNl","IHNvbWVob3c=","IE5vcnRoZXJu","YWxj","IGNhcGFiaWxpdGllcw==","IHZpYnI=","Kwo=","IFN1","Mjg2","IFJlc2V0","X21lYW4=","IGNpZw==","LmNsb3Vk","IEJhbmQ=","IEZhY3Rvcnk=","IEFyaXpvbmE=","X2lv","b3BoZXI=","IGNvbnNjaW91cw==","IMO2","XENvbnRyb2xsZXJz","X3NwZWVk","IEZhYw==","X0NvbQ==","IEJpYmxl","d2Vu","RURJVA==","IHVubg==","IFN0YWZm","IElubg==","IG1lY2hhbmlzbQ==","IE1lbWJlcnM=","IG1pZ3JhdGlvbkJ1aWxkZXI=","J10uJw==","LmdldEludA==","PHZvaWQ=","CWZyZWU=","b2lkcw==","XFN1cHBvcnQ=","IGF1dG9tYXRpYw==","IGNoYW5jZXM=","0LY=","IGNvbXBsaWNhdGVk","W3Jvdw==","YWhvbw==","IH0KCgoK","TW9kZWxz","V2lu","IHRhcGU=","aXJ1cw==","aXpvbg==","b25vbXk=","KCJf","Oi4=","LnN0ZXJlb3R5cGU=","Mjk2","KGVudg==","X3JlY3Q=","KHdpdGg=","IGFzc2VydFRoYXQ=","IGNvbnN0cmFpbnRz","cHV0eQ==","RW1wbG95ZWU=","NjIw","VEQ=","IGd1aXRhcg==","ODc1","IEpld3M=","LnByb2Nlc3M=","IGZpY3Rpb24=","IFNoYXJlZA==","4pSA4pSA","IHByb3BhZw==","Lk5ldA==","IGFjaGlldmVk","CVE=","IG51cnM=","U2hhcmVk","X0ZBSUxVUkU=","IGJlaGF2aW91cg==","IGNvbHM=","aXNtbw==","IGZlbWlu","IGNoYWxsZW5naW5n","IHBvc3Rpbmc=","ZW5jaWw=","IGNhcHR1cmVk","IERvdQ==","KHdvcmQ=","IFR1cmtleQ==","cGFuaWVz","IHJlcHV0YXRpb24=","T1JNQUw=","IGVsaWdpYmxl","cHJvdG9jb2w=","NDE0","aWRhcw==","KGZyb20=","MzQ0","IGZpbmFuY2U=","LXBlcg==","IGdvdHRlbg==","SEE=","ZHVyYXRpb24=","IFBhcmVudA==","Njc4","IGludmVudA==","IHJlc3RhcnQ=","0L7Qu9GM","cml0aW9u","KHJz","PGJvb2w=","aWVydA==","IG1vZGlmaWNhdGlvbg==","IFRY","cmVhZGNydW1i","YmFuaw==","MzI2","JC8=","IE1pbGxlcg==","XSksCg==","LkNoZWNrZWQ=","IHNhY3I=","c2VjdXJpdHk=","IHBvc2U=","IEJyYWQ=","IGZpdG5lc3M=","IGFubm91bmNlbWVudA==","YXRpb25Ub2tlbg==","IHNlcnZlcw==","bmVlZA==","IGdlb21ldHJ5","QVJT","5oA=","YW5kaWRhdGU=","IHNwcml0ZQ==","X3NwbGl0","V2Vlaw==","YWRpZXM=","PigK","Pz4i","IC8vLwo=","IGVpbmVy","IHdlZWtseQ==","CWxvZ2dlcg==","X3BvcA==","X21hbg==","IG1pZ3JhdGlvbnM=","IGFza3M=","IGJz","IGZhbGxz","LldoZXJl","LWhlaWdodA==","X2ZlYXR1cmU=","Lk1pbg==","IGh5cGVy","IHZvbGF0aWxl","IHR3ZW50eQ==","VHlwb2dyYXBoeQ==","VW5hYmxl","RGV0","LGY=","LW1vZA==","IHNldHRsZW1lbnQ=","IGNvbnRyYWN0cw==","bm9tZQ==","QmFk","IEJyaWFu","NzY4","KHVzZXJuYW1l","ISEhIQ==","IGhhY2s=","LkZpZWxk","SFI=","IEpvcmRhbg==","aXph","IMKg","IFNoZXI=","LmhlYWRlcg==","KG90aGVy","IER1Yg==","KG9w","IFJvdW5k","IHZpZQ==","IGFwcGw=","CUo=","IEluc2VydA==","IExQ","cmVnb24=","IE1QSQ==","IGFuY2hvcg==","YWNh","w7hy","IGFkZQ==","YW5jaG9y","cXVlZQ==","IFRyZWVOb2Rl","IHRhcmdldGVk","IGxhaWQ=","QUJFTA==","dmV0","IE9yaWdpbg==","QW50","LicpOwo=","ZXhwZWN0","ZWRSZWFkZXI=","IE1ham9y","IGluY2g=","Q29tcGFy","IHByZXZpZXc=","IGlsbG5lc3M=","IENPTlRSQUNU","IEluZGVwZW5k","dXVpZA==","IG5vbWU=","IHRj","IEF2ZW51ZQ==","aXNhbg==","IHBocmFzZQ==","X21vdmU=","Iilb","NDEy","IHByb3Zpc2lvbg==","IGNvbmNlbnRy","X0lS","IFV0","KCkr","IG5hcw==","ISw=","IFJvYmlu","aWF0aW9ucw==","YXRpdHVkZQ==","IHB4","IFdpdGhvdXQ=","L2Jhc2g=","ZWt0","cmVlbWVudA==","MzQy","T2JzZXJ2ZXI=","MzE4","IFJlZ2lvbg==","VUJMSUM=","IHsvLw==","S04=","5bc=","R2FtZU9iamVjdA==","5b4=","ZW5jb2Rpbmc=","ICoqKg==","cHJvamVjdHM=","IHRr","IGNoZWVzZQ==","RU1QTA==","YXJv","INin2YQ=","NjEw","MzM3","IGNvbnNpc3Rz","cmVmcmVzaA==","dXJlYXU=","IFNjYW5uZXI=","IHNvaWw=","IGZsYXZvcg==","RGF0YVNvdXJjZQ==","RXhlY3V0ZQ==","0LXQvdC40LU=","IHNoaXQ=","5YiG","PGFueQ==","IHJldHJpZXZl","IGJlbG9uZ3M=","LnN0cmlw","YWJzb2x1dGU=","IGV4cGFuZGVk","Ym95","KTot","IHJlc2N1ZQ==","LkpMYWJlbA==","IHJlbHk=","IGFsaWdubWVudA==","LWZhbWlseQ==","IHJlbmQ=","T0xVTU4=","IGJvcnJvdw==","IHF1b3Rlcw==","IExldw==","IHNob3dlcg==","IERFTEVURQ==","X2xvb3A=","ISIKCg==","CXJl","IGF0dGVtcHRlZA==","YXZlcmFnZQ==","IFBhaW50","cXVpc2l0aW9u","b2xlbg==","IGxpdGVyYXR1cmU=","IFJlZmVyZW5jZQ==","X1RFWFRVUkU=","IFNlZw==","IEluZHVzdA==","Y3R5cGU=","RFVDVA==","X0hPU1Q=","IFRyYWRl","IHBsdWdpbnM=","IGJyZWFzdA==","dWxzZQ==","IGNyZWF0dXJl","Mzcy","44GZ","IFdp","IHN1cHBsaWVk","Y29sbA==","ISgi","IGZ1Y2tpbmc=","IENocm9tZQ==","IFVyaQ==","IE5hdGlvbg==","IHZlcnRpY2Vz","VEhF","IE9yaWdpbmFs","b25kZQ==","IHNoYXJw","IGNvb2tpbmc=","MzQ3","IHsvKg==","IFBzeWNo","IEhvbGx5d29vZA==","PSRf","LkRvY2s=","IGdlcg==","IGJvbmU=","X2Nvbm4=","X3NlYw==","eXNpY3M=","ID0i","Mjk4","U2Fs","c2Y=","IGRlZXBseQ==","YW5nbGVz","VGVybQ==","YmVsbA==","IFF1aWNr","NTYw","ZW5lcmF0aW9u","YWRpb0J1dHRvbg==","5YWl","fQ0KDQoNCg==","IGNhcHRpb24=","bGM=","IEVM","LFs=","ICAgICAgDQo=","cmV0dA==","KG1ldGhvZA==","IEZsYXNo","NDcw","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA==","V0lTRQ==","LnNjYWxl","IHJvdWdobHk=","X2NoaWxk","bWVtb3J5","YXlpbmc=","IGluaXRpYWxpemVk","aW5hdG9y","0LDRgA==","IHNjYWxhcg==","IEhv","YWlyZXM=","KGNvbHVtbg==","LmRlc3Ryb3k=","UEFDSw==","IGhlbQ==","YW5nZWw=","X1NVQg==","LnF1","INc=","REVGQVVMVA==","cG9zaXRvcmllcw==","NTAz","IExlbmd0aA==","IEZhc3Q=","IHNpZ25hbHM=","IC8vJA==","cmllcnM=","IGR1bW15","QU5Z","IHBlcnNvbmFsaXR5","IGFncmljdWx0","UGxhdGZvcm0=","RVJP","IFRyYQ==","IGVub3Jt","CVc=","QWN0aW9uUmVzdWx0","IGF2ZXI=","W3N0cg==","ICctLQ==","LlNwcmludGY=","IGRlYnV0","INGH","aGV4","X3V0aWxz","IHBi","VUlUYWJsZVZpZXc=","IHp1cg==","LmVuY29kZQ==","NDE2","IHZhZw==","LmVycm9ycw==","0L7QvQ==","IG1y","IEF3YXJk","IGNwdQ==","IHByZXNzZWQ=","J2VzdA==","IEZlc3RpdmFs","J1Q=","IGFr","cmVzb2x2ZQ==","MDQz","Lm1l","IG5pYw==","IGdlbnJl","IGF0dHJpYg==","IE1vb24=","IGFycml2ZQ==","IERhdGluZw==","IHRt","LkNvbmZpZ3VyYXRpb24=","NTA1","LnJlZA==","IGdsbQ==","IHN0YXRpb25z","c3dpdGNo","IHRpZWQ=","5Lq6","IC8+PC8=","UXVhbnRpdHk=","cXVpcnk=","X3RhYg==","IGFsZw==","VG9hc3Q=","cmVzaXpl","cXVlc3Rpb25z","c2NoZW1h","TGl0ZXJhbA==","KGVudGl0eQ==","TkVDVElPTg==","Y2hhbmdlZA==","X0ZJRUxE","X0hFSUdIVA==","IG9yZ2FuaWM=","UFJF","IENhdA==","LkRyYXc=","RXM=","IGxvdWQ=","Njgw","ICAgICAgICAJ","IEthdA==","IGhlYXA=","4oCcSXQ=","MDcw","ZXRy","IHVubGlrZWx5","ZXJhbHM=","L2F1dGg=","NTAy","dG9kbw==","UGxhY2U=","UG9zdGVk","Q29tbWVudHM=","IFRlY2g=","IEZpbmFsbHk=","ZWdyYXRpb24=","IG1pbmltYWw=","IEZpbGVz","IHRhbWI=","66Gc","IFJlbGVhc2U=","NDI1","LnJlc2l6ZQ==","IM8=","Y29sbGVjdA==","PXA=","IExJQUJMRQ==","IHByb2R1Y2luZw==","LXdyYXBwZXI=","IHNpbmdsZXM=","IE5CQQ==","b3Jy","ZXJlbg==","LmFkZEFjdGlvbg==","IHRoZXNpcw==","ZG4=","UFRZ","LmRlcw==","IGJhY3Rlcg==","IEV4cHJlc3M=","ICopCg==","5ZE=","L2FkbWlu","c2Vjb25kcw==","5Yqf","dXNzaW9u","YWJldGg=","IENvbXB1dGVy","IHJ1bGluZw==","KCIuLi8=","LkdFVA==","IE1lZGFs","aXRpb25hbGx5","Y29tbWl0","Zm9jdXM=","X0xFVkVM","aW5kYQ==","RmFjdA==","PW5w","PSIiPgo=","IHN1YnNlcXVlbnQ=","cG9zYWJsZQ==","LWZsdWlk","IHRob3JvdWdo","IHB1YmxpY2x5","YXB0ZXJz","IFdpbHNvbg==","X1BSRQ==","eWFyZA==","5Lw=","CWlu","MzM5","IHJldmVycw==","IGJ1bGxldA==","Y3JpYmVk","bmVzb3Rh","ICgkXw==","YW5ub24=","Y3Vyc29y","IGNsb3RoaW5n","IE11bHRp","Mjg3","Oics","IHZlc3M=","b3JkaW5hdG9y","IGVpbmVt","Q2Fubm90","IGFybWVk","CVY=","5LiK","LkZsYXQ=","IFNlcA==","IFN1YmplY3Q=","X2ZvbnQ=","IGNoYXJhY3RlcmlzdGljcw==","RG9uZQ==","ZWxu","IyMjIyMjIyMjIyMj","UE9T","IGRlbnNpdHk=","IFBsYXRmb3Jt","LWl0ZW1z","IG92ZXJz","IHB1c2hpbmc=","56Q=","LkNvbm5lY3Rpb24=","X3Rlcm0=","IGluaXRpYWxpemF0aW9u","X19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX18=","56w=","LmRvY3VtZW50","bGVzaA==","CWRvY3VtZW50","IFBpbg==","w6dh","IGRlZmluaXRpb25z","LlBhdGg=","X1dSSVRF","IAkK","Pz4KCg==","IHRlcnJpYmxl","YmVhbg==","aWNrZXRz","IFNW","QnV5","KHRhc2s=","IHJlZ2ltZQ==","Z29vZ2xl","IGNyYWNr","LnZpc2l0","TlVN","ZW5lcmd5","IHN0cnVjaw==","X3NhbXBsZQ==","LnBheWxvYWQ=","IHJldmlz","IFNjZW5l","IHBn","IGJyZWFrZmFzdA==","VVJSRU5U","LmNoYXJBdA==","X2V4Y2VwdGlvbg==","IEFudG9u","IGd1aWRlbGluZXM=","IGV4aGF1c3Q=","IEZpbmFuY2lhbA==","IGluZGVudA==","IGRlc2t0b3A=","SGlkZGVu","RmFpbHVyZQ==","IHByaW5jaXBsZQ==","IGl2","IHNla3M=","bmV0d29yaw==","IG51bWJlck9m","IEFsYmVydA==","CWxvbmc=","ODAx","LC4=","IHplcm9z","ZmFkZQ==","IFR5cA==","IFRlcm0=","IEFydHM=","LkFwcGxpY2F0aW9u","IGJlaGFsZg==","5oi3","IG1lcmU=","KGAkew==","IGF3YXJlbmVzcw==","ZWxwZXJz","ZmxpeA==","IHdlaWdo","IGVzdGltYXRlcw==","LmNoaWxk","L08=","IEJpdG1hcA==","LmJvdHRvbQ==","ICoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioq","RXhwZWN0","ZW50bw==","IEZvcnVt","dmVyYWw=","IGphaWw=","IGFiaWxpdGllcw==","IEhPTEQ=","IENpdA==","IGR5bmFt","IGdyYXk=","CQkJCQkJCQkJCQkJCQ==","Lm5leHRJbnQ=","YW50bHk=","IEFSSVNJTkc=","KHByaXZhdGU=","IHJlamVjdGVk","IE5pYw==","IGxlYXRoZXI=","PXsK","YWx5dGljcw==","dGhldGlj","LlRvcA==","Mzcz","LlBhZ2U=","PXtg","IDsNCg==","ZGVwdGg=","bWFubg==","V0Q=","IFNvbQ==","LlJpZ2h0","ICl9Cg==","IHRyYWl0","w5c=","aWFj","IHJ2","U2FtcGxl","LlhtbA==","b3BwZWQ=","INGE","bGlzdHM=","IHRlYXI=","aXZlcnNhcnk=","LmNvbGxlY3Rpb24=","IENvbnN0aXR1dGlvbg==","IEh0dHBSZXNwb25zZQ==","IGJyaWxs","IFByb20=","aG92ZXI=","MzY2","IE1pYW1p","IGFyZ3Vl","X2Zsb2F0","NTA0","IOOC","IG5hdA==","IFRhbA==","IGludGVncmF0aW9u","KGN1cg==","IHJlbW92aW5n","IGNvZWZm","IFRob3VnaA==","IGZvcmVjYXN0","NDA4","IFZlZ2Fz","U2l0ZQ==","MzQ2","IHRyYWI=","IEhlbnJ5","LWk=","IGludm9sdmVz","QlQ=","IHNsbw==","SW52b2tl","IGx1Y2t5","MDI1","cmF0","ID8K","IGhhbmRsZWQ=","KGZk","Y29udGVudHM=","IE9GRg==","UkY=","IHN0eQ==","IE1vdG9y","dGVyeQ==","dGF4","TUFQ","IE1ycw==","IHBob25lcw==","IFVJVmlldw==","IikpKTsK","KGRldg==","IElyaXNo","MDE5","IHdz","REk=","X09GRlNFVA==","IEV2ZW50cw==","IHN0YWdlcw==","IH0vLw==","IGhhYmVu","U1RBTkNF","IFNpbg==","IE1vbmV5","KHRvcA==","IGFwcG9pbnRtZW50","VkVSU0lPTg==","bWV0YWRhdGE=","X2NvbW1lbnQ=","IGNvbGxlYWd1ZXM=","bWFwcw==","4pg=","CgkK","KGFs","X3JlcQ==","IGZ1dA==","IGFyY2hpdGVjdHVyZQ==","MzUx","IFdIRVRIRVI=","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA==","X3NjcmVlbg==","IHN0eWxlVXJscw==","IG1vbnN0ZXI=","LnVw","cGhpYQ==","IHByb2Nlc3Nvcg==","IFRlcnI=","PScs","IE1hbnVmYWN0","IE5U","a2Vs","aWJlcm4=","CWZpbGU=","QWxp","cmllbnRhdGlvbg==","IC8vIQ==","YXBvcmU=","YW5lb3Vz","IENyZWF0","Zm9sZGVy","NDE1","IGhheQ==","U3VwcHJlc3M=","KGxlZnQ=","IGV1cm8=","IGRpc2NsYWltZXI=","dXN0cnk=","c2hpcHM=","X2Zk","IEZh","X2luc2VydA==","IHJvbA==","aWZ0aW5n","IENvbW1lbnRz","X2Jy","IGxvc3Nlcw==","IEFkZGVk","Y2hhcmc=","INC/0L4=","X3N5c3RlbQ==","IFNvbWV0aW1lcw==","IFNwYWlu","KGdyb3Vw","aWFsaXM=","IGRvbGxhcg==","IEFyZ3M=","NDk5","Mjk3","cXVpcmVz","IFRlbg==","LnNjc3M=","IHN1cnZpdmU=","dXNhZ2U=","IGp1bg==","aW1pdGVy","77yBCgo=","IGZpZnRo","dG9nZ2xl","IGRlY2xpbmU=","KCQi","KExvbmc=","aW5nZQ==","IHBpbG90","LWxpZ2h0","LXJhZGl1cw==","IHBvZGNhc3Q=","IG5hdHVyYWxseQ==","UGFnZXM=","5Li6","IERlc3BpdGU=","IGxpZ2h0aW5n","IGNyYXRl","IEJpbmFyeQ==","IHJlZHVjaW5n","IGVsZWc=","IE1vdXNl","IFRlc3RCZWQ=","IGJlZm9yZUVhY2g=","X0FSUkFZ","UmVkaXJlY3Q=","MzI5","IGZsb29k","IHNoaXBz","MzYz","IGVsZWN0cmljaXR5","KSoo","6rg=","IFZpZXQ=","aGVybw==","IGRpYQ==","IEtlbnQ=","aGVhcnQ=","IHRocmVhdHM=","X2FjYw==","IHN5bWJvbHM=","aXNjaGVu","X2luc3Q=","Q3JpdGVyaW9u","IFRJTQ==","LkhlaWdodA==","NTgw","IOKAmQ==","KCk7CgoK","UHJvZHVjdHM=","X1NQ","IEN5","IGRlcGVuZGVudA==","ZXN0ZQ==","IGRhdG9z","ZGl0","0LDQsg==","SUdOQUw=","IGxlc3Nvbg==","Ij4n","IENvdmVy","IEhvcGU=","IFRpbWVy","IGRhZA==","dmlkZXJz","IFBob3Q=","Lz8=","cm9weQ==","b21pbmc=","YXNpb24=","IFwo","IEVU","IFJlYWRpbmc=","IGVwaXNvZGVz","bG0=","NDIx","ZWNoYQ==","IG5ldXJv","ODIw","IGhhcm1vbg==","IGxpYmVyYWw=","LWluZA==","Mzkz","REFUQQ==","IGV2ZXJ5ZGF5","IGRpdmlkZWQ=","IEFjdGl2ZVJlY29yZA==","ZmlndXJl","VUE=","5Lk=","cmllbmRseQ==","dGVjaA==","NjAx","LmdhbWVPYmplY3Q=","0LjRgtGM","Mzc0","IG1vb24=","ZnRpbWU=","IG5vY2g=","IFRPUlQ=","IFZN","LmluaXRpYWw=","KGNoaWxk","IG11c2ljYWw=","IG9j","YmFz","IEhheQ==","MzYx","X2xvbmc=","IG1lbXNldA==","aWxleQ==","YWRlbHBoaWE=","U1Y=","cm9hdA==","X3R4","IGxvbg==","IG5nT25Jbml0","YnA=","IEdvbGRlbg==","QUNIRQ==","IHdvcnJpZWQ=","YXpp","RWFy","VGFrZQ==","KGZw","YnVyZ2g=","X0RhdGE=","Z3Jlcw==","IE9udA==","cHVz","IHRyYW5zcGFyZW50","IHBvY2tldA==","IHJhbQ==","aWdyYXRpb25z","Lg0KDQo=","IFso","IGFkb3B0ZWQ=","IHJlcG9ydGVkbHk=","IERyZWFt","IH0pKTsK","bG9zaW5n","IHRlZXRo","IEJvb2tz","Iiwm","ZW5ueQ==","TEVNRU5U","IGdlbA==","IFBsYW50","NDM3","IeKAnQ==","Lmhvc3Q=","IFJlcGx5","Mzc2","cmVuZ3Ro","IHJlY29nbml0aW9u","IH19Pgo=","TEE=","IG1pcnJvcg==","IGFzc2lzdGFudA==","KGRldmljZQ==","IHNwaXJpdHVhbA==","YnVpbGRlcg==","wqc=","IG91dHI=","IHR0","IFBFUg==","IHJhZGljYWw=","TWV0aG9kcw==","IHBhY2U=","dWR5","IGd1dA==","IEdyZWVr","IG5vbmF0b21pYw==","IFBhcGVy","X0dQSU8=","IG9ic3Q=","LkFk","dmlyb25tZW50cw==","IFNvdg==","MzU2","KGNvbg==","IFRyYW5zYWN0aW9u","LmFzc2lnbg==","CWNhdGNo","ZWx0ZXI=","IGJpdGNvaW4=","X0dS","IDw/PQ==","X2xhbmc=","7J2E","QnJvd3Nlcg==","IGNvbnNpZGVyYXRpb24=","IEV4ZWN1dGl2ZQ==","6Ze0","O1w=","IEpTT05PYmplY3Q=","IEJlbGw=","IHNwb2tlc21hbg==","fn5+fn5+fn4=","b2NrZXk=","IEdybw==","IEF3","Q29uc3RyYWludA==","IFByYWN0","IEV2ZXI=","cHJpbQ==","OnsK","X2lt","UE4=","TWlsbGlz","VU1FTlQ=","IGJhZ3M=","w6Vy","QU5ORUw=","MzU0","IGlj","IHRyYW5zcG9ydGF0aW9u","IFNhdWRp","aGFuZGxlcg==","RHJhZw==","IGhk","Y29sbGFwc2U=","X1BI","IHVi","QVJN","IEFQUA==","IHRvbmlnaHQ=","IGRpbmluZw==","UmVjb2du","IGJj","aWd0","KG51bWJlcg==","Qm9vdA==","IGVsc2V3aGVyZQ==","IGFycm93","YXJnYQ==","IGRlbGljaW91cw==","IFNO","V1I=","VmFsaWRhdGU=","IFF1YWxpdHk=","KGVtYWls","IGludGVycHJl","aWdhdGlvbg==","IGNob2NvbGF0ZQ==","NTI1","X2VkZ2U=","IHN0b3Bz","OmZ1bmN0aW9u","KXw=","IHRoYWk=","IExvYWRpbmc=","U3Rvcnk=","VHJpZ2dlcg==","YnJhbmNo","IHRk","ZW50aWNhdGVk","IGFkdmVudHVyZQ==","IGJsb2NrY2hhaW4=","RXZlbnRIYW5kbGVy","IHNxcnQ=","LlBy","TG5n","QmVjYXVzZQ==","IHZpdg==","IG9jZWFu","eWx2YW5pYQ==","0LDRgQ==","IFV0aWxz","IGRlc3Blcg==","IGRlZmVy","CXJlcXVpcmU=","aGw=","UmVxdWlyZQ==","XVw=","IGRpcmVjdGlvbnM=","X3Jlc291cmNl","IHN1YnNjcmliZQ==","IMO6","IEhlYXJ0","ZXN0cw==","LXN1Yg==","IFJo","Zm9yRWFjaA==","IGRlbGlnaHQ=","IHRlcnJpdG9yeQ==","LmNvbmN1cnJlbnQ=","ICgr","anBn","IHByZXBhcmF0aW9u","IHJvdW5kZWQ=","Q29tbQ==","LkxlZnQ=","IG9waW5pb25z","IE5hdmlnYXRpb24=","KGZpcnN0","Iiwk","IGhpcmU=","IGRldGVjdGlvbg==","LmdldEVsZW1lbnRz","IGVwcw==","IHNrbGVhcm4=","IGN6","IC8+DQo=","bWV0aWM=","IHRyYW5zZm9ybWF0aW9u","5Y+3","IHJnYg==","aXN0cmlidXRpb25z","IGltcGxpY2l0","L2lu","ZGVzdGluYXRpb24=","0LDRgtGM","WmVybw==","IHVuc2V0","OTIw","LndoZXJl","Lmdv","IGZvcm1hdGlvbg==","IGRlY2xhcmF0aW9u","KCkNCg0K","IEV4cGw=","CQkJICA=","L3Bybw==","LkpTT04=","NDQx","IGRlc2s=","LnN1YnN0cg==","Ly8tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0t","bHlu","cHNvbg==","NDA3","ZGlzYWJsZQ==","IEZ1bmM=","CUFzc2VydA==","IE1BUks=","IGRlZmVhdA==","IGJsaW5k","IGNvbnN0YW50cw==","MzYy","LmhlYWRlcnM=","VUlMRA==","IGV4cGVuc2Vz","UGl4ZWw=","IGhy","IGZlbA==","IEVhc3Rlcm4=","NDI0","NDkw","X2RlbA==","MzU3","IEN1Yg==","IHNx","CWNvdW50","IERpcmVjdG9yeQ==","IGV4Y2x1cw==","IGhpc3Rvcmlj","IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQ==","IGNvbXBvc2l0aW9u","IGRhdGFHcmlkVmlldw==","IEJ1cm4=","IEJD","TWFzdGVy","IHNwYXdu","IGJlYXJpbmc=","LlNldEFjdGl2ZQ==","aWxv","IGdhbGxlcnk=","IGZvdW5kZWQ=","IGF2YWlsYWJpbGl0eQ==","LnNxcnQ=","IHBlcw==","IERPTQ==","bWF0ZQ==","T2N0","IG1hdGNoZWQ=","aXRpdml0eQ==","IGFueGlldHk=","LnByaWNl","IEluc3RhbnQ=","7Io=","IHR1dA==","SUNvbGxlY3Rpb24=","LnNoYXJlZA==","X3NxbA==","dGJs","bGlicmFyeQ==","X2Rlc3Ryb3k=","ZXJtYWw=","IE5vdGVz","IEVpbg==","IHNvdXRoZXJu","IE9USEVSV0lTRQ==","IG1hY3Jv","Lmxvd2Vy","Y2xz","Q29udGVudFZpZXc=","Lmxpbms=","Y29uc3RhbnQ=","IEJlcw==","IHNvbWVib2R5","bmI=","Mzk5","Ij57","KGxvY2Fs","Li4uLi4=","IE51bGw=","bXg=","IMOn","IHBhdXNl","LS0tLS0tLS0tLS0=","X01P","IENN","IGZvcktleQ==","IERWRA==","IGNsb3Nlc3Q=","X0RFVklDRQ==","IFN0ZXBoZW4=","IEJCQw==","IFRyYXZlbA==","UGFpbnQ=","IFJlc3VsdHM=","IFJ1bGU=","IHRw","IHJhdGluZ3M=","Y2lu","Y3N2","Pi8=","IEdPUA==","bGFk","INGA","IGluZGV4UGF0aA==","bWF0cml4","PWY=","YXJzZWQ=","IH0pOw==","IENvcw==","IFNjb3Jl","IHRhaw==","IEVTUA==","IElOQw==","X05VTEw=","LWZsZXg=","Il1b","aW50bw==","ZWxhbmQ=","QXV0aG9yaXphdGlvbg==","X0ZBTFNF","IGdhdGU=","IHZpZA==","aXN0ZW50","VElNRQ==","IHJld3JpdGU=","IHRpZQ==","IGFyY2hpdmU=","NTEx","LmV2ZW50cw==","LmdldFBhcmFtZXRlcg==","IFBlcm1pc3Npb24=","IHByb2dyYW1tZQ==","IOk=","anVk","IGNhbWVyYXM=","MzM4","MzQ5","KHN5cw==","IFN5cmlhbg==","IGltcHJvdmVtZW50cw==","IGhpcA==","IHN1aWNpZGU=","IHNjaG9sYXI=","IGNvbXBhdGlibGU=","MDIy","cmVtb3Rl","LmRvd24=","RlVOQ1RJT04=","IG1hbmFnaW5n","IFVJS2l0","LnJhdw==","Pj4+Pg==","Mzcx","IGRlbWFuZHM=","ZWxsaXRl","IGRlbnQ=","IE1pY3Jv","5Y+W","J11bJA==","IElF","aW1lbnNpb24=","IHRyZW0=","NjMw","IGdhaW5lZA==","LndpdGg=","Lm9r","aG91","IGJvbQ==","YW1wYWlnbg==","IGpvaW5pbmc=","ZmlzaA==","IGFkZFN1YnZpZXc=","ODYw","IG5vcnRoZXJu","LmNvcg==","b3JldA==","RGll","aW5pc2g=","X2NvbXA=","IGF0dGVuZGVk","IGNvbGxhcHNl","IFNT","YWNlbnQ=","X0VRVUFM","IERlZXA=","UkdC","CXRlc3Q=","b2x2ZXM=","dXNldA==","VW5pdHlFbmdpbmU=","d3JpdGVy","UmVzb2x2ZXI=","LCU=","aWZmZXJlbmNl","X3JlbW92ZQ==","b25kYQ==","IGZlbW1l","Mzg1","ZGVjb2Rl","QnJhbmNo","IGZsdXNo","IGlubm92YXRpdmU=","VGVzdHM=","IFsnLi8=","IGNvdmVyaW5n","LmFkbWlu","dWx0aXBhcnQ=","KGxhbWJkYQ==","77u/bmFtZXNwYWNl","IFNwb3J0","ICEo","YWNsZXM=","IGRlcHJlc3Npb24=","IEtvbmc=","NTcw","IHBlcnQ=","IENvbm4=","IE90aGVyd2lzZQ==","L2hvbWU=","c3VwcG9ydGVk","IHBpbms=","IGludml0ZWQ=","w7Fvcw==","X2VuYWJsZWQ=","IC0K","Rlc=","ZW5lcnM=","IE1Z","IHN1Z2dlc3Rpb25z","Q2FudmFz","IGZlcg==","IE1hcmtldGluZw==","QFRlc3Q=","dW50dQ==","IFZlbg==","IENvdQ==","aXZhbHM=","RG9uYWxk","bGltaXRlZA==","CQkJCQkJCg==","IGFuYWx5c3Q=","KGVudHJ5","IHJlcHJlc2VudGF0aXZl","X2F0dHJpYnV0ZXM=","IGZ1cg==","LmhpZGU=","cmVzcA==","YWRvcmVz","cmlkZXM=","IEpvc2g=","cm9ib3Q=","IE5BVA==","IHNlc3Nv","IGludGVncmF0ZWQ=","OnRydWU=","cGFydHM=","IHN0dXBpZA==","OmV2ZW50","QGVuZHNlY3Rpb24=","IHB1","LlRhYmxl","IFlpaQ==","YDsKCg==","IGNsYW5n","PSIiPg==","ZW5nYW4=","X3BhcmFtZXRlcnM=","LmludGVybmFs","IE1vZGVybg==","IG1ldHJpYw==","IHNlbWk=","PXt7Cg==","NzA3","LmFtYXpvbg==","IEJC","YWludHk=","dmlld3BvcnQ=","MzY3","IHN0YXJ0QWN0aXZpdHk=","ZGlzcGF0Y2g=","KioqKio=","IGZsYXY=","aWZmZXJlbnQ=","Mzgy","W3RoaXM=","IHN0YWtl","IGFyZ3VlZA==","dmlvdXNseQ==","Lndvcms=","IE9haw==","T2xk","KGFzeW5j","bm90ZXM=","IGZsaXA=","IGRpc2Fn","IFRF","CWVycm9y","PCc=","IMK7Cgo=","IGZpbHRlcmVk","IE1hY2g=","IGh1bmc=","X2R1bXA=","X3NhbXBsZXM=","LWRpc21pc3M=","IHJheQ==","SW1wbGVtZW50ZWQ=","REs=","IGplZA==","MDkw","IGJyZWFrcw==","IGZpdHM=","Lmdy","IFplcm8=","b3Jv","IGVxdWFsbHk=","ICdb","IGNvbmNlcm5pbmc=","PG1ldGE=","cGxheWVycw==","X1BPUw==","X3NpbQ==","SmFu","IHlvdXJz","CU4=","IHNwaXI=","IGNoYW1waW9u","IEFuYWx5c2lz","YXBh","IE5TTG9n","X2xpbmVz","w7Fh","CQkgICAgICAg","ODE5","LlNj","UmVw","ZXRyb2l0","dXJhYmxl","TUlU","Y29tcGF0","b3duZWQ=","X2luZGljZXM=","XSwNCg==","IGRpc2NvdmVyeQ==","IERpZWdv","b2Jp","LkluZGV4","IHRyZW5kcw==","UExBWQ==","Lm5v","IGxlbnM=","X2NmZw==","IGFubm8=","YWdhbg==","IHBlcmlvZHM=","dGVybXM=","eXo=","IGF0dGFja2Vk","aWJyYXRpb24=","UEVDSUFM","X2dyYWQ=","IGFjY29yZGFuY2U=","LlJlYWRMaW5l","LmRldmljZQ==","cml4","LmNvbnRhaW5lcg==","bWF5","ZXJjaXNl","IEx1","IHJn","INGB0YI=","CQkKCQkK","KHVu","VEVSTkFM","IGxlc3NvbnM=","IGFsbGVnYXRpb25z","IHRyYW5zbWlzc2lvbg==","LlJlZg==","TW9iaWxl","IFRvdXJuYW1lbnQ=","IE51dA==","IEdh","IENhcGl0YWw=","ZGVmaW5pdGlvbg==","LWV4cA==","Y2xlYW4=","IGZhbnRhc3k=","IGVuaGFuY2U=","ZW50ZW5jZQ==","MDMx","J106Cg==","YWNrZXRz","IGNlbGVicmF0ZQ==","QCIs","U2VyaWFsaXplRmllbGQ=","IGFycmF5cw==","dGI=","CXN0","W2Fzc2VtYmx5","KHJlZw==","LmNhdGVnb3J5","IGltcHJvdmluZw==","IHNhbG9wZQ==","Qnl0ZUFycmF5","T3JpZ2luYWw=","IFt7Cg==","5Zue","IENsaW4=","b2VuaXg=","IFNhbXN1bmc=","IG1haW50YWluZWQ=","IGFnZW5kYQ==","ZmFpbA==","IHByZXNlbnRz","IHRpbWluZw==","Lm1hcms=","Jz48","IHByb21vdA==","IGluY2w=","X29ubHk=","66W8","IEF0dG9ybmV5","LWRhdGU=","IGxhbmRzY2FwZQ==","IGZ1","U1k=","LnByb3A=","IEFycg==","cGFn","UGFyYWxsZWxHcm91cA==","JzoNCg==","IGxvZ3M=","YXVuY2g=","dW5jaQ==","bmFtYQ==","VGFibGVDZWxs","aXNzdWVz","Lns=","ZWN1cml0eQ==","X2V4ZWM=","b2xkcw==","IGhvc3Rz","IHByb3Rv","X2ltcG9ydA==","X3NvcnQ=","IEJvdw==","IE5vcm1hbA==","IEZhcm0=","LmNyZWF0ZVBhcmFsbGVsR3JvdXA=","Um90YXRpb24=","LmVycg==","IHBsZWFzZWQ=","aXRhZ2U=","Lldo","CQkgICAg","TVI=","IE1PUkU=","IE5hdHVyYWw=","X3RyYW5zZm9ybQ==","QkFTRQ==","ZW5lcmFs","dXRkb3du","LmNvbW1vbnM=","V1Q=","IGFhbg==","LlJlc3VsdA==","ZG9n","IGNsaWNraW5n","KSwKCg==","I2xpbmU=","T3BlcmF0b3I=","IGNpdg==","IG1lcmc=","b2J1Zg==","bmd0aGVu","IFt7","IGNhbmNlbGw=","dHJpZ2dlcg==","Ljo=","V09SSw==","ZGVjbGFyZQ==","IGRlY3JlYXNl","xZtjaQ==","bG9vbQ==","Lk5vbmU=","IE1J","IEphc29u","IGhlYWx0aGNhcmU=","aWFtb25k","c3lsdmFuaWE=","Kng=","IFJh","W2I=","IHByaW50aW5n","cGhhYmV0","IExhYm91cg==","b3BwZXI=","IHppam4=","LXRhcmdldA==","X0ZVTkNUSU9O","IG9jdA==","0LXQvdC40Y8=","5Zyo","IHdlc3Rlcm4=","IGNvbXB1dGVycw==","IFJFVA==","SGFzaE1hcA==","W1N0cmluZw==","Z2V0VmFsdWU=","X0RBVEU=","Lk5leHQ=","IEZpZg==","w6ls","aWNrZWQ=","5o4=","LU1N","IHsKCgo=","IGNvbnRhY3Rz","IGRpZ2l0cw==","UHJvZHU=","IHVudXN1YWw=","IHJhcGlkbHk=","dHVyZXM=","IGFuZ3J5","Y2FuY2Vs","eHh4eA==","X3BhcnNlcg==","aWRpdHk=","X1BSRUZJWA==","NzEw","IG1laHI=","IHJhcmVseQ==","ZXRoZQ==","b3Blcw==","ICUu","d29ya3M=","IHRoZXRh","IGNvbnRyaWJ1dGlvbg==","IFRvbnk=","IHNxdWFk","NTM3","0LDQuQ==","IMOubg==","dGhlcmU=","b3V0ZWQ=","CXE=","mYI=","Z29vZA==","TEk=","6aG1","IExpdmluZw==","aXphYmV0aA==","IGt0","IERhbGxhcw==","XV0sCg==","IC8+Cgo=","IHJhaXNpbmc=","L3JvdXRlcg==","X2dhbWU=","MzY4","IENVUg==","emVucw==","LmVz","IGZvbnRXZWlnaHQ=","KGZ1bmM=","bm90aWZpY2F0aW9u","ICcuLi8uLi8uLi8=","IGJsYW1l","44CCCgoKCg==","YW5jbw==","OTgw","SWRlbnRpdHk=","Zm9sbG93","IGFydHM=","eHM=","IG9mZmljaWFsbHk=","IFN0dWRpbw==","IHJlY29tbWVuZGF0aW9ucw==","IGxvY2FsZQ==","IGFtYXRldXI=","IEVuYWJsZQ==","IGNhcHM=","LkVuZA==","Mzg4","LWFkZA==","X2dzaGFyZWQ=","IENU","Rm9yY2U=","CiAgICAgICAgICAgIAo=","IG9yYW5nZQ==","IGxw","IGFuc3dlcmVk","LkdyaWQ=","IGR1YWw=","IHN0cmF0ZWdpYw==","IG5vYm9keQ==","IGZhdGFs","X2VzdA==","KGVs","IOyg","IEJ1ZGQ=","QUlU","X2ZhY3Rvcg==","LW9uZQ==","IEhBVkU=","Ig0KDQo=","NzYw","UHJvZg==","IMOkcg==","c3RyaW5ncw==","IGRpcnR5","IEZhY2U=","IEJlZ2lu","IEJ1cw==","IHdpcw==","5a2X","IHNwZWFrZXI=","IGNhcnJpZXI=","IE9t","IGhhZG4=","QWxsb3c=","OjpfXw==","IHZlcmI=","IENvbXBsZXRl","IEVhc3k=","IGJpbGxz","ICAKCg==","VmVydGljYWw=","IHByb24=","IERlZmluZQ==","IGxvb2t1cA==","dmFyaWFibGVz","IHBhbmRhcw==","dW1lcw==","IGlubm9j","IHNldFVw","IENoYW1waW9uc2hpcA==","YXJ0aXN0","IENUeXBl","Rm91bmRhdGlvbg==","4LmI","IFNldHVw","NDI4","IHJlY2lwZXM=","IFVJQ29sb3I=","IEZpZ2h0","IGF1dGhvcml6ZWQ=","X2NsaWNr","OTkw","X3N1Y2Nlc3M=","YW5nYW4=","IE1vdW50YWlu","IERvY3Rvcg==","IGVnZw==","IE1lZGljaW5l","Y2xlcw==","YC4K","W2ludA==","ZGFzaGJvYXJk","IEFwcHJv","LWRy","IHByb2R1Y2Vz","IHJlbnRhbA==","IHJlbG9hZA==","Mzgx","IGFycml2YWw=","c3BvdA==","IHVuZGVydA==","Mzc4","IGVxdWlwcGVk","IHByb3ZlZA==","IGNlbnRlcnM=","IGRlZmluZXM=","YWxzbw==","IG9wYWNpdHk=","IFVuZm9ydHVuYXRlbHk=","IElsbGlub2lz","INC90LU=","IFRlbXBsZQ==","IFRyYWls","IEtlbGx5","IG1lYXN1cmVtZW50","IHNlcGFyYXRlZA==","LWNpcmNsZQ==","SGV5","IFJFQUQ=","aWdpdHM=","IGli","IE1PRA==","YXR0ZXJ5","0LDQtw==","IHZlbmQ=","0LXQvdGC","IEh0dHBDbGllbnQ=","MzU5","c2FmZQ==","X0FTUw==","aWNpdA==","IENvbnN0cnVjdA==","IENsbw==","IFNpeA==","X1RPS0VO","KGJsb2Nr","IHdhcm5lZA==","Lyoh","ITwv","YWNhZGVz","IG1hcmc=","ZXJhc2U=","IGRpc3BsYXlz","aXN0cmF0b3I=","Z2V0cw==","IGd0aw==","X0dFTkVS","bmVk","XyU=","IGZhdm91cml0ZQ==","IEJydQ==","IMOh","c2Vjb25kYXJ5","IG1hc3Q=","IHNvcGg=","IFNhZmV0eQ==","aGFyZA==","MDYy","cmFpc2U=","IEV4Y2hhbmdl","IGNvbnRlbXBvcmFyeQ==","IGRyZWFtcw==","IHRlbA==","IG5laWdoYm9ycw==","IEhvbHk=","Mzgz","Lm1lYW4=","ODEw","ZW1pdA==","IE1lc3M=","Q2FzdA==","TkVDVA==","cGx1Z2lucw==","IHJi","d3I=","IGh1Yg==","IFN0dWRpZXM=","NTYy","IHBvc3Nlc3Npb24=","JCgnLg==","ZW5zaXRpdmU=","IGFkZENyaXRlcmlvbg==","X18u","IGV4cGVydGlzZQ==","QXJjaA==","IGN1Yg==","ZXJ2ZXJz","IHBhcnRpY2xlcw==","dWFy","IGJvdW5kYXJ5","KScs","YWpv","IHByZWY=","OmA=","IGhhcmFzcw==","aXU=","IHJlYWNoaW5n","IG1lZw==","IHpv","KElE","X3JlcXVpcmVk","IHPDqQ==","IFF1ZXVl","QU8=","IGdlbQ==","ODEy","cHRvbg==","ODgw","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg","NjYw","aWpr","KHsNCg==","IGNvbGxpc2lvbg==","IFVrcmFpbmU=","IC0qLQo=","TlNJbnRlZ2Vy","X0JMT0NL","NTY3","IFRleHR1cmU=","IGRlY2xpbmVk","bmFu","X3dhaXQ=","IHBvbGl0aWNpYW5z","NDEz","IGNvaW5z","IGRlcml2","aGVscGVy","IFBlcmhhcHM=","LnJlY3Q=","IFBvbHk=","YWJsaW5n","fS8+Cg==","IGlubm92YXRpb24=","XyI=","ICk7DQoNCg==","IHNwb3Rz","IGNob29zaW5n","LmNz","IGZsZXhpYmxl","VUludA==","NDM1","OTMw","IHNjcmF0Y2g=","LWFs","IGZlc3RpdmFs","IG91dHN0YW5kaW5n","PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09","TWVhbg==","IE9yZWdvbg==","c3ltYm9s","LmFjY291bnQ=","ZG5leQ==","Jycn","ISIs","OTAx","IHBhcnRpY2xl","w4M=","W01BWA==","SVZFUg==","RVJFTkNF","TlNNdXRhYmxl","IENvbHVtYmlh","XwoK","LmZy","IGNvZ24=","VlI=","IE1ldGhvZHM=","IE1hZGU=","IEJS","IEVsc2U=","IGVnZ3M=","IHN3aW5n","IEludg==","IGRpc2Vhc2Vz","IGZpcm1z","IGxlbW1h","fWApOwo=","bGluZ3M=","IGd5bQ==","dW1pbnVt","LlRyaW0=","TWVt","IGNyaXRpY2lzbQ==","aWJlcm5hdGU=","X1RY","aW9uaQ==","IGd1aWRhbmNl","IHJlcGVhdGVkbHk=","IHN1cHBsaWVy","IHBhaW50aW5n","ODY0","LkZyYWdtZW50","ZWRFeGNlcHRpb24=","IHdpcmluZw==","IGNvdXJ0cw==","V0VC","5pyJ","XC4=","aWxsYW5jZQ==","IGJyb3dz","IFBhdHRlcm4=","UExJQ0FUSU9O","IFN1bW1lcg==","Q2hhaW4=","IGN1dGU=","bWVyY2lhbA==","IGRpbA==","IEZyYW5rbGlu","CWdsb2JhbA==","SU5DTFVESU5H","aGlzdG9yeQ==","IGxzdA==","UXQ=","U0RM","YWxpYQ==","aWVyZQ==","KC4uLg==","CWNpbg==","aWZmcw==","dmVsb3Bl","IFJvb3Q=","Y2x1c3Rlcg==","VXNlck5hbWU=","aWduZQ==","PFM=","IGZlc3Q=","NDE5","IGluZGljYXRpbmc=","a2VlcGVy","IGNhZGE=","w6ln","Y29uc2lu","IEdC","IGxi","ZW1vbnk=","LWljb25z","X2RvYw==","QWN0b3I=","ZWxlbQ==","LkRlbGV0ZQ==","IGluZmVjdGlvbg==","IFByaXZhY3k=","IGdyZWF0bHk=","IFBvcw==","IFRyZWF0","Rmxvdw==","IGF0dHJhY3RpdmU=","IE1hcmM=","c3Vkbw==","dGVzeQ==","LWFu","OTk4","YWJhbWE=","IFdvdWxk","IHN1Y2s=","aW5kZXhQYXRo","IEV0","VGltZXM=","Nzgw","IGNsdWJz","X2Fzc29j","IGFjcXVpcmVk","KCI6","IGludGVuc2U=","Lm1hcHM=","RXhwZWN0ZWQ=","VG9nZ2xl","IGF5","IGxpZmVzdHlsZQ==","LWNhbGxlZA==","IFNub3c=","Vm9sdW1l","IGNhbm5hYmlz","IERpcmVjdGlvbg==","IExpbWl0ZWQ=","LXNwZWNpZmlj","IGRvd250b3du","L2ljb25z","IHJldmVu","TGVn","ODg1","PW51bGw=","NDk2","S2V5Ym9hcmQ=","JykpLg==","ICIiOw0K","IGF0dGl0dWRl","Lm5hdmlnYXRl","LWVycm9y","QU1QTEU=","IEpheQ==","dnI=","Y293","LmNvbXBpbGU=","IG1lbW9yaWVz","X21hcms=","IE1pbm5lc290YQ==","IGtvc3Rlbg==","IHByb2JhYmlsaXR5","d2FybmluZw==","IGdlbmV0aWM=","Rml4dHVyZQ==","IEhhc2hTZXQ=","Tm9tYnJl","X21vbnRo","xrA=","LXN0YXJ0","eHlnZW4=","CWZ0","aWFnbm9zdGljcw==","IE1hdHRoZXc=","IGNvbmNlcHRz","IGNvbnN0cg==","LlN0YXRl","0LjQvQ==","Tm92","zrE=","IFBhbmVs","5Liq","Y29tcGFyZQ==","PigpCg==","IGFwcGx5aW5n","IHByb21pc2Vk","IG94","bmNpYQ==","IFZhbGlkYXRpb24=","b3J0cw==","X2N1cg==","ZWxlY3Q=","ZXll","KERhdGE=","IHJlcG9ydGVy","IEJ1ZmY=","Mzk1","IHNy","ICI7","aWNreQ==","IHRlbXBvcg==","U04=","IHJlc2lkZW50","cGlyZXM=","eXNpY2Fs","IGVuZG9yc2U=","IFNvbmc=","aXNFbXB0eQ==","bGVldA==","X3V0aWw=","IGRpc3Rpbmd1","IFRhbGs=","IE1vdA==","KGRlZmF1bHQ=","LkFyZw==","Z29yaXRobXM=","X3dvcmRz","aW1tZXI=","X3Jlc2V0","ZmFtaWx5","V1c=","IHNhdmluZ3M=","IOKAnQ==","X2VuYWJsZQ==","c2lkZWJhcg==","UnVubmluZw==","IGFsaQ==","IHRlc3RpbQ==","IHdhcm5pbmdz","IENoZW0=","IEV4aXQ=","IGZvdW5kZXI=","cGVjdG9y","IHJt","X2RhdGFzZXQ=","IERhcw==","IGhhbg==","R2V0dHk=","w6Fs","IG55","IHBvdmVydHk=","IHJlc3VsdGVk","LmJ5","IFZpc2l0","IG9idGFpbmluZw==","LycuJA==","ICAgICAgICAgICAK","c2hhbGw=","X0xFRlQ=","VUlJbWFnZQ==","X05hbWU=","aGF2ZQ==","IE5vYg==","bHI=","LWZvb3Rlcg==","IG5ha2Vk","IEdhcmRlbg==","XEZhY2FkZXM=","IGdyYWR1YXRl","NDE3","IGZyYW5jaGlzZQ==","cGxhbmU=","IGNvbnRyaWJ1dGlvbnM=","IHN0cmluZ1dpdGg=","IGNyeXB0bw==","IG1vdmVtZW50cw==","YXRoZXJz","IGxpZmV0aW1l","IGNvbW11bmljYXRl","amFy","IEZyYWdtZW50","X0lG","IE5hdnk=","IEZpZ3VyZQ==","IHNpbXVsYXRpb24=","X3N0b3A=","IHJlcG9ydGVycw==","IHZlcnN1cw==","YWph","IM6x","IGdvdmVybm9y","TGlzdEl0ZW0=","IHNlYWxlZA==","LkJhY2tncm91bmQ=","ZWRp","YXNoaW5n","IGxpcA==","IElo","bWVyZ2U=","IG5lYw==","MDI0","ZWxvY2l0eQ==","QVRFRw==","IHNlZWRz","IGZsb2F0aW5n","NzAx","X0ZB","d2Fsaw==","CXVzZXI=","X2RlcHRo","IHdhZ2U=","QGFwcA==","Tmls","KFsi","KHZlY3Rvcg==","IHNlY3JldGFyeQ==","NDYx","IGpQYW5lbA==","dmV6","wqDCoMKgwqA=","ZGlyZWN0aW9u","IEVQ","IGh1bnQ=","Mzk2","SnNvblByb3BlcnR5","IFBPUlQ=","XSIs","0LDQvw==","IEZvcmVpZ24=","cGFuaWM=","IHRyaWFscw==","IEFsZQ==","IHJ1cmFs","LXZhbHVl","YXV0aG9yaXplZA==","IFNjb3RsYW5k","LmRyb3A=","IE1U","57E=","Mzkx","cm93dGg=","NTE1","RmlsZVBhdGg=","IHJlY2FsbA==","aWZsZQ==","IGNlbA==","IFNFTEVDVA==","a24=","X2Nhc2U=","IGNyb3A=","NTQz","c3VyZQ==","cG90","SUNT","IHN0ZW0=","IGluZHVzdHJpZXM=","UHV0","IGFiZXI=","cm9hZGNhc3Q=","SWNvbnM=","KSIpCg==","5oiQ5Yqf","Z3Vp","IGFzc3VtZWQ=","IHJ4","RUE=","6Kc=","RUxM","IGRvc2U=","IGluZQ==","IGRlZXBlcg==","bGlkZXI=","IG9yZGluYXJ5","IGdvbGY=","NjA1","X0lNQUdF","IE5BTUU=","KG1vZHVsZQ==","IGF0b20=","IGJlbHQ=","IG9mZmljZXM=","NTA2","YmV0YQ==","IHBoaWxvc29waHk=","KEpTT04=","LWZpZWxk","IGludHJvZHVjZQ==","IGNvbnZlbmllbmNl","b3B0aW0=","PiIK","YXRoeQ==","IGVtcGxveWVy","cXVhdGU=","IGVkaXRlZA==","QXJndW1lbnRz","IE5hdGlvbnM=","X18p","IG5vc2U=","IFNhbXBsZQ==","JykKCgo=","IGNha2U=","LmdldEF0dHJpYnV0ZQ==","SEQ=","Mzky","TW9kaWZpZWQ=","NDQ1","IHByZWRpY3RlZA==","xYQ=","YW5pZQ==","U29ycnk=","KGRvYw==","d2luZA==","aWV2ZQ==","IHByb3Zpc2lvbnM=","QVRFUg==","T1RF","TVk=","LkF1dG93aXJlZA==","IEJhdGg=","NDIz","LkJvb2xlYW4=","IGJhY2tlbmQ=","Lk1vdXNl","YXRlcmFs","cGFwZXI=","Q29uc3Q=","IFZS","X2VudGl0eQ==","X0NUUkw=","IFByb3RlY3Rpb24=","IEdN","IFN0dWR5","IHNvdXA=","b3RpbWU=","J3VzZQ==","XSI=","L3VzZXJz","YXVn","IEhvbmc=","X25vcm0=","44Go","IHNlY3Jl","KEJ1aWxk","IENvbnRyYWN0","b2xhcw==","IHNhdWNl","IGFnZ3Jlc3NpdmU=","IHJhY2lhbA==","Y2hhcmFjdGVy","QEA=","IGNvbXBpbGU=","IFZvaWQ=","X3JlbQ==","X21lbW9yeQ==","MzQ4","a2s=","IG1pYw==","U2FtZQ==","VXRpbGl0eQ==","IEh0bWw=","IFhtbA==","UmVhZHk=","IGdhbGw=","IGFsbGVnZWRseQ==","CQkJCSAgIA==","IE1ldGFs","IFBlcnNvbmFs","IGJvcmRlclJhZGl1cw==","cnhqcw==","b2JqZWN0cw==","IHdhbnRpbmc=","IGJvd2w=","dmVuZG9y","b2Zmc2V0b2Y=","IFJz","IFJhdGluZw==","IHJhbGx5","X05PREU=","NDE4","IE1peA==","IGFkdmVydGlz","NDg1","NjY3","IG5hcnJhdGl2ZQ==","c2Fs","IG1j","U0Vycm9y","IGZpbmdlcnM=","IGFjY29tcGFueQ==","IHRpcmVk","IHN0cmlkZQ==","IGd1aQ==","ZWxpc3Q=","TG9jYWxl","IHJlbGVhc2Vz","aWtpbmc=","IGFuZ2Vy","KSkpCgo=","YWxsZXN0","U3VtbWFyeQ==","KE8=","KGZvcg==","IGJhc2tldGJhbGw=","IHJvYWRz","IEluc3RhbGw=","IEZhYg==","aXRtYXA=","NDc1","ICkpCg==","IGludGVyc2VjdGlvbg==","aWdoYm9y","IEJyeQ==","IEhFUkU=","U29mdHdhcmU=","ZWxmYXJl","YWNz","NjIy","IHRyYWlsZXI=","LmdldENsYXNz","Y2hhcnM=","IHJlZ3VsYXRpb24=","IHJlZmVycw==","IGRlc3RydWN0aW9u","IGNvbnRpbnVvdXM=","IEF1c3Rpbg==","6aI=","YWthbg==","LndpbmRvdw==","IFRlbXBsYXRlcw==","IGFic2VuY2U=","Om4=","IGRpc29yZGVy","Zmxhc2g=","IGRlbGV0","Ym9hcmRz","ICAJ","Uk9Q","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA==","IGFjcXU=","IGxhd3N1aXQ=","IFJldmlld3M=","IGdhcmFnZQ==","dGltZXI=","IGVq","IFJlY3RhbmdsZQ==","IGZsb3dlcnM=","Mzk4","aWxzdA==","IEluc3RhbmNl","U3VwZXI=","ZGV0","ZGlzcG9zaW5n","IEVT","IElD","dmVyZQ==","U2s=","X2NoYW5uZWxz","cHV0ZWQ=","L251bGw=","bm5lbg==","NDMx","IEdhbGxlcnk=","X2dsb2JhbA==","QXV0aGVudGljYXRpb24=","IFJhbms=","IGJsb2NrZWQ=","IGNhbG0=","bWFya2V0","CXZhbA==","IGF1Zw==","cGVyaW9k","IENvbnN0YW50","ID8+Ij4K","IGxvYmJ5","cGFs","Mzc5","IHNpbms=","NTA4","aWFo","0KE=","dXJuYW1l","IGNvbnZlcg==","IGludmVzdGlnYXRl","Q2hyaXN0","SHVi","IElORA==","IFBlZA==","dXJhcw==","CXVybA==","IFRybw==","IHByZWZlcmVuY2Vz","IGd1YXJhbnRlZWQ=","YAoK","IHBvcnRpb25z","IGV2YWx1","Jz48Lw==","KCl7Cgo=","ZW5jb2RlZA==","emlsbGE=","LkNsYXNz","ICpf","Xyc=","IHZpZXdlZA==","IFBoaWxhZGVscGhpYQ==","LnJvd3M=","QWRkZWQ=","IFRvdWNo","ODQw","LmRlbGVnYXRl","cXVlZXpl","c2xpZGU=","IFNlbmlvcg==","KHRhZw==","IGludGVydmlld3M=","IHN1YQ==","YXRhcw==","QAoK","ZGlzdGFuY2U=","IHNlaW4=","bGF0ZXN0","IFByaW5jZQ==","IGx1eHVyeQ==","IHJlZnI=","IEtpdGNoZW4=","0YQ=","KGF0","RmluYWw=","w7xjaw==","X3plcm8=","IEFCQw==","IE1hbmNoZXN0ZXI=","IGNvdw==","Q09M","X05VTUJFUg==","Y2hhbmdlcw==","Z2VuZXJhdGU=","LlByaW50Zg==","MzY5","c2hhcmU=","U3RvY2s=","IFBU","QW5pbQ==","YW5nYQ==","IGln","dXBsb2Fkcw==","IHBhY2tlZA==","IH1dOwo=","KHNlbmRlcg==","IFdpcmU=","aXNvbnM=","IHBsYXlvZmY=","XEU=","NjA4","L1I=","IGhlYWRlZA==","QWxwaGE=","KG9yZGVy","IG9wcG9uZW50cw==","YWNrc29u","X21lbWJlcg==","VHVybg==","IFNvdmlldA==","7JeQ","YXVnZQ==","NDQ4","IGluY29taW5n","IGphaw==","LWdhbWU=","IE1hbGU=","IE1vbnRo","U3RhZ2U=","LmV4ZQ==","T3duUHJvcGVydHk=","LnNldEl0ZW0=","IGRj","5L2c","IGJydXQ=","IGF0dGVtcHRpbmc=","Lmxlbg==","IGp1ZGdtZW50","IHNhYg==","IGNhZA==","IEl0ZW1z","Y29tZm9ydA==","ZWxpemU=","L2xvZw==","IGVudHJlcHJlbmU=","IGNvbXBpbGVy","X3ZhbGlkYXRpb24=","cmV2aWV3","IHRleHRCb3g=","IGZyYWN0aW9u","IEJhbA==","PjsKCg==","LkF1dG9TY2FsZU1vZGU=","IGNhdHM=","NDY1","IHJlZ2lzdHJ5","dWx1cw==","Rkk=","cGF5bG9hZA==","LXNlYXJjaA==","IHN0YXlpbmc=","YWNpb3Vz","RGVjb3JhdGlvbg==","UmV2aWV3","SW5m","S2VlcA==","aXRpcw==","LFN0cmluZw==","Q29vcmQ=","IHBlcm8=","U2V4","IEF0bGFudGE=","dWVzdGE=","QXJnYg==","Pio=","fV8=","Rm9vdGVy","IGVtcGxveWVk","X2JvdW5k","dmlkZQ==","LmZ1bmM=","JHNjb3Bl","IHNwbw==","IEFuYWw=","b3VuY2Vk","YXJvdW5k","IHJlc3RyaWN0aW9u","IHNob3Bz","5YA=","IExhdGlu","LWNvbA==","IGJhcmVseQ==","IEV1cm8=","RXI=","IGZhaXJl","X2Rpc3RhbmNl","X3VubG9jaw==","UXVvdGU=","SVZBVEU=","IOWI","IGFpbWVk","IFJldHJpZQ==","Lml0ZXI=","IHdyYXBwZWQ=","IGFncmVlbWVudHM=","c3RydW1lbnQ=","KHByb2R1Y3Q=","IHN0dWRpZWQ=","LnNldFZhbHVl","IHll","IENhY2hl","TUJPTA==","IHF1YXJ0ZXJiYWNr","IHN5bnRheA==","LmdldEVsZW1lbnRzQnk=","LnZlcnNpb24=","d2Vic2l0ZQ==","UnVubmVy","X3NpbmdsZQ==","YXRpdg==","IEFsdGVybg==","IEJlYXV0aWZ1bA==","cmlnaHRhcnJvdw==","IGRpdmVyc2l0eQ==","cGxhc2g=","KGNv","LkZpbGw=","IHR5cGluZw==","Mzg3","MDIz","IGNsYXI=","SGl0","T08=","YWNjbw==","NTA3","d29ydGg=","IHNjcmlwdHM=","IE11c2xpbXM=","IExM","ZXJ2aW5n","KGJvb2xlYW4=","IGJhc2ViYWxs","IENBTg==","Mzk0","MDQ0","TUFJTA==","ZGVwZW5k","IHJlc3BlY3RpdmU=","IGNvbnN0ZXhwcg==","Lio7Cgo=","J10pKQo=","IHlhcmQ=","IGlkZW50aWNhbA==","aWZlY3ljbGU=","VVNI","dXBpdGVy","LnZhbGlkYXRl","Y2xp","SVNURVI=","SW5kaWNhdG9y","RmFpbA==","IGRlbW9jcmFjeQ==","LnZhcg==","IHNhdGlzZmllZA==","LS0tLS0tLS0tLS0tLQ==","ZW5jZXI=","aG9y","IHJvdW5kcw==","REFP","b2E=","IGZsYXNr","PWM=","W10K","L2Rpc3Q=","IHBhcnRl","IGNvbmZpcm1hdGlvbg==","ZXJvbg==","YXdhcmU=","PD8+","IGRlcGVuZGVuY2llcw==","IFZpZGVvcw==","LXJvdw==","ICoqLwo=","IG5vdQ==","IGhvdmVy","5p4=","IG5pbg==","IFVTRA==","TWFj","X0xvYWQ=","IG91dGNvbWVz","X3NvY2tldA==","IHF1ZXJpZXM=","d20=","NTky","IGhpdHRpbmc=","aW51eA==","TWljaA==","dWRnZQ==","QVRBQg==","IHZ1bG5lcmFibGU=","5L4=","IHBvcnRmb2xpbw==","OllFUw==","CW1hcA==","Qm91bmQ=","IGl0ZXJhdGlvbg==","aW5jZXNz","IGFjdG9ycw==","IFF1YWw=","X2NsZWFu","44CR44CQ","TVNH","R3JlZW4=","IE9mZmljZXI=","IHNtb2tpbmc=","Pics","IEZsbw==","Kys7","NDMz","b2x5Z29u","IGJ1bGs=","IGRyYW1h","IGV4Y2VwdGlvbnM=","b3NlZA==","ICsNCg==","IGxlZ2FjeQ==","Q1Y=","IGNvbnRyaWJ1dGVk","IFRlcm1z","IGJ0","NDM0","IHVudHVr","IGFsaWVu","PT09Cg==","CVZlY3Rvcg==","IGxz","T25saW5l","LmZhY2Vib29r","bnVtZXJpYw==","b2NrZXRz","QXV0","YnVyeQ==","LXJlZHV4","IFJlZGlzdHJpYnV0aW9ucw==","R0xPQkFMUw==","dXJyZW5jaWVz","IHRvbnM=","4oCZLA==","IMOq","KGNvbA==","IFN5bWJvbA==","IHN0YXllZA==","IE1M","IG11bmljaXA=","IHNleG8=","U2Vu","bnI=","IGdhaW5z","IHNob3J0bHk=","Lk1lbnU=","w70=","S05PV04=","IG9wZXJhdG9ycw==","LVY=","IFBhdHJpY2s=","L2FkZA==","X0NP","aXJhdGlvbg==","KHBvc3Q=","UG9zdHM=","L18=","IHBsdWc=","IGludGVsbGVjdHVhbA==","IG1ldGFi","IHByZWduYW5jeQ==","IFByZW1pZXI=","bm0=","IHByZWRpY3Rpb24=","NjA2","IE1pbmlzdHJ5","VGhyZWU=","dmFsdWF0ZQ==","IE1pbmk=","YnU=","0L7Qtw==","PHVs","IGRk","b2x2aW5n","IEN1dA==","NjAy","IHNjaGVt","LnRyYWlu","aXRhdGU=","IHJpY2U=","IGJpcmRz","44Gr","bWlkZGxl","c3RydWN0aW9ucw==","IG5lcnY=","YXF1ZQ==","NDUz","IGZsdQ==","IHN1cnZpdmFs","IEdhbGF4eQ==","IEZhbnQ=","Lk9yZGVy","QXR0cmli","aXJ0cw==","w6lj","TW92aWU=","IGNvbmNl","cXVhcnRlcnM=","IG1vb2Q=","LkFkZFJhbmdl","OTQy","IHJlc29sdmVk","44OI","IGJ1cm5pbmc=","NzAy","CQkJCQ0K","IFdF","IGhvc3Rpbmc=","TEFC","IG1hbmFnZXJz","IHN0cmVuZ3RoZW4=","PGNvbnN0","IEZpcmViYXNl","b25lZA==","IEplYW4=","Jzwv","IDo9Cg==","YWxnb3JpdGht","IEFyYw==","IGZyb3plbg==","X2V2ZW50cw==","IG92ZXJzZQ==","Z29vZHM=","IGZhaXQ=","IHZpYWdyYQ==","b3Nlcw==","OTIy","IGNvbXBpbGVk","IEF0aA==","IHN1YnN0YW5jZQ==","YW5pbWF0ZWQ=","UEY=","cHJldmlvdXM=","IHJvb3Rz","KGZpbHRlcg==","b2x1bWVz","IGludHJv","KGV2dA==","IEJhZw==","IERlZmluaXRpb24=","IEZlYXR1cmVz","QW5ub3RhdGlvbg==","IGF2Zw==","KHN1bQ==","UVVJUkU=","IHJlbmRlcmVy","IEZpeA==","LmRhdGV0aW1l","PWRldmljZQ==","U3Bl","Z2V0SW5zdGFuY2U=","IGV4dGVuc2lvbnM=","X25ldA==","IFBhcmxpYW1lbnQ=","IGNvbWlj","NDY4","IFBpY2s=","YXJtYQ==","CW1vZGVs","IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0t","IG1lbmc=","bWFudWFs","YWRhcHRlcg==","fS0=","ZWRiYWNr","IGVsZWN0cmljYWw=","IENvdW50ZXI=","QXBwbGljYXRpb25Db250ZXh0","X2J5dGU=","KGJ5dGU=","IEF1dG9t","IHRlcnJvcmlzdA==","55A=","dGhyb3VnaA==","IGZpc2NhbA==","b25pbmc=","NDU1","IHNwZWN0cnVt","IGJpdG1hcA==","IHNsZQ==","cHJvZA==","IGFnZWQ=","IGJlbmU=","IFNwaQ==","IGJyaWxsaWFudA==","IHN0YWJpbGl0eQ==","IGRpYWJldGVz","IGNvbmZpZ3VyZWQ=","Ym9uZQ==","NzQ4","NDg0","b3VzZXM=","Lmdvb2dsZWFwaXM=","RkFDRQ==","IGluc3BpcmF0aW9u","IERldHJvaXQ=","ZW5jaA==","0YDRgw==","dmVoaWNsZQ==","U3RhdGlvbg==","IGhvbGVz","IGR1cmNo","Lk1lZGlh","IENOTg==","aW5uaW5n","NjA0","IFBlbm5zeWx2YW5pYQ==","IGVtb3Rpb24=","U2VjcmV0","w6FyaW8=","IFJhdGU=","NDUx","RGVwdGg=","IG1vZGVz","NDI2","KGlkeA==","IGhlcw==","IGdyZXk=","U3RhbmRhcmQ=","UXVlc3Q=","YnV5","c3Vy","IFRyYWNr","b21t","Lmds","IChc","dHdv","X0lP","b3NleA==","X3JvbGU=","56S6","cm91dGVz","U2hvcA==","IEFTQw==","IG1lbWNweQ==","ZGlyZWN0","NDQ2","ICoKCg==","IEJN","IFBvcg==","X2hpc3Rvcnk=","IFJlc3BvbnNlRW50aXR5","LnNldEZvbnQ=","IGVuZ2FnZW1lbnQ=","LGg=","IFdvcmRQcmVzcw==","ZmVjaGE=","IGVudHJhbmNl","RGVzcGl0ZQ==","SURFTlQ=","IHNhbml0","IEdlbmVyYXRl","KCIiLA==","X3ZpZGVv","U3RyYXRlZ3k=","X29r","IHRpZXM=","IGxvZ2ljYWw=","IEJyb24=","KEZpbGU=","IE1vaA==","LlNwbGl0","LlRyeQ==","IEhpbmQ=","IHNjb3Jpbmc=","IGFwcHJvYWNoZXM=","IGZsb3Vy","VlJU","ODA0","VVNUT00=","NDY3","c2NyaXB0cw==","IEVwaXNvZGU=","Mzg5","IEFtYg==","X09S","IGZyYXVlbg==","IHVubGlrZQ==","IHJpZGluZw==","IHBpdA==","IHRyYW5zZg==","YXJ0ZQ==","4LmJ","cmFwZQ==","cmV0dmFs","X2FmdGVy","Ijw8","NzAz","IEJlcmxpbg==","IHRpc3N1ZQ==","LkludGVudA==","INC00LvRjw==","IHN0dW5uaW5n","IEhhbA==","LkludGVnZXI=","IHdoZXJlYXM=","IGRlbGVn","IHVzZXJOYW1l","IGZvcm1hdHM=","IGNvbXBlbnNhdGlvbg==","IEh1bQ==","YXJyaW5n","IHVuc2FmZQ==","UGlu","Y2x1Yg==","a2V5d29yZA==","X3RoZW1l","IGNhbGxlcg==","IGdob3N0","IGVudGl0bGVk","IE1hcw==","NTYx","IGRlbW9uc3RyYXRl","IEhvd2FyZA==","RHJvcA==","I3VuZGVm","NDI3","IGludm9rZQ==","IEJyaWRnZQ==","ZW5kZW4=","aWJsaW5n","U2xvdA==","QVRBQkFTRQ==","IHRlbXBlcmF0dXJlcw==","c2VyaWVz","IFJlbWVtYmVy","Q2FsZW5kYXI=","QkY=","PT8=","MDY0","IEFG","KGh0dHA=","bWFrZXJz","ZmluaXR5","cHJlY2F0ZWQ=","V0g=","b2xpZGF5cw==","LXVu","aWFsZQ==","XFVzZXI=","cmVhc29u","JywKCg==","T1dFUg==","IHByZWRpY3Rpb25z","cHJvYg==","Lm5u","ICc7Cg==","LkZyb21Bcmdi","X0xPTkc=","IHRyb3Vi","IHVuaXR0ZXN0","ZWxpaG9vZA==","CWlz","NDQy","IGNvbnNlYw==","TEVBU0U=","IGNsaWNrZWQ=","IHRlbXBsYXRlcw==","Qlk=","cGVybQ==","bWF0Y2hlcw==","bGF3","KHRm","X3JhdGlv","aXRlbXB0eQ==","IGNyZWF0b3I=","Qml0cw==","RW5jb2Rlcg==","Ki4=","IFVJVA==","IE1hc2s=","Y3VybA==","LWdv","IE9jYw==","Y29ycmVjdA==","IEdlcg==","KGxheW91dA==","dW5jdA==","LmRpc3BhdGNo","O2FtcA==","LmlzUmVxdWlyZWQ=","CWRv","bWly","IHB0aHJlYWQ=","LWF1dG8=","IEljZQ==","IHZpb2xhdGlvbg==","IGNvbmNsdWRlZA==","IHZhcnM=","Y2FudmFz","IFRlbXA=","IFBoaWxpcHA=","iOuLpA==","Y3JlYXNl","IGZpc2hpbmc=","YWJiaXQ=","IGNvbmNlbnRyYXRpb24=","aXJ0aGRheQ==","IGdyb3Nz","IGtp","IEhhbmRsZXI=","IGltbWlncmFudHM=","6IA=","VW5k","cG4=","cmFj","NDU0","IENvbnN1bHQ=","Zm9sZA==","IHN0cnVnZ2xpbmc=","aGVhdA==","R2VuZXJpYw==","IHJpZGlj","IENPVklE","b21pdGVtcHR5","X09QVElPTg==","6rCA","IGNyZWF0dXJlcw==","X1BBR0U=","ZWk=","KGhvc3Q=","X0hQUA==","NTE2","IFhYWA==","IGF3aw==","YXNjYWRl","IHByZWc=","cHJvdmlkZXI=","UGFs","ZWdlbg==","Y2xvbmU=","LlJlZ2lzdGVy","IGF0dGFjaG1lbnQ=","YmVpdA==","dGhlbGVzcw==","KERhdGU=","IEZvcmVzdA==","Q0dSZWN0","IGNoaWxkaG9vZA==","YW1pbmU=","YXhlcw==","J109","TmF2aWdhdG9y","IHJlcGxpZWQ=","X2ludg==","LFQ=","IEZlYXR1cmU=","NDM4","ey0=","TEFORw==","IGNvbnZleQ==","55So5oi3","IFNlcmlm","IEF1cw==","bGljaGU=","IHVudXNlZA==","IG1vbnQ=","bm9kZXM=","IHNldQ==","LmNsYXNzTmFtZQ==","bm9ybQ==","X1NFUlZFUg==","IHdpbmc=","aW54","UmF3","IEphbQ==","NTkw","IGluc2lnaHQ=","NDcx","NTM1","IE5H","IEludGVyZmFjZQ==","IHN0bXQ=","IG5hbg==","Y3VsYXRvcg==","LWFwcA==","KEJ1bmRsZQ==","TWVzc2FnZUJveA==","4K4=","IG1lZXRz","dWJ5","T3B0aW9uUGFuZQ==","aXRhcmlhbg==","IGNvbGxhYm9yYXRpb24=","bW92aWU=","IGFybW9y","X2JpdHM=","IEhhdmluZw==","IG51ZGU=","IFNldHRpbmc=","IHN1Y2M=","RGVsYXk=","LmNvbXBvbmVudHM=","YWNodXNldA==","IEFsZXhhbmRlcg==","wqk=","IG1ldGVycw==","IHByZXBhcmluZw==","IGluY2VudA==","5ZM=","IGvDtm5uZW4=","IENvbnNlcnY=","IG51bWVybw==","YWNodXNldHRz","LWludA==","IGVtcGhhcw==","bGF5b3V0cw==","RXhjZWw=","SUJBY3Rpb24=","IHJlc2lkZW50aWFs","ZWxpbmc=","IE5D","IEFsbGVu","IGNldHRl","IG1pbmRz","LnJlcXVpcmVk","2LM=","IEdpcmxz","IH07","IHN0cmluZ1dpdGhGb3JtYXQ=","IGFkZHJlc3NlZA==","dGhleQ==","IEJsb29k","cG9zZXI=","IGphbQ==","yJk=","5pWw5o2u","IHN0ZG91dA==","IFVURg==","Q2xhc3Nlcw==","PiI7DQo=","IFNhdg==","LkJvbGQ=","IGVuYWJsZXM=","CXRtcA==","IG1hbnVhbGx5","IFNxdQ==","dXNlcmlk","LmZ1bmN0aW9u","LmNhY2hl","TE9QVA==","LlNlcnZpY2Vz","NTg4","ZGRpdA==","dGlt","PGltZw==","IFRoaW5ncw==","IEV2ZXJ5dGhpbmc=","IGFwdA==","Mzk3","ZW1hbmQ=","IHJvbGxpbmc=","66Y=","LmxldmVs","IHN0b20=","IFdpbnRlcg==","IHZpZXdpbmc=","KHZhbHVlcw==","b2NvbXBsZXRl","dmlh","dXBv","IGFib3J0aW9u","NTMy","acOocmU=","77yR","X0JVVFRPTg==","X2RvbWFpbg==","IGJyYQ==","IEFzdA==","aW5hcw==","IHN0YXRpc3Q=","Y29k","TFI=","IGRyaXZlcw==","IGZvbGxvd2Vycw==","IGFsbGllcw==","CWN1cnJlbnQ=","ZWNlc3Nhcnk=","IGRhbWFnZWQ=","X3B0","YW5kbGVz","b3VudHJpZXM=","IHNpbXVsdA==","ZXU=","IGNvbnRyb3ZlcnNpYWw=","X0dST1VQ","IHJpYg==","LkluZm8=","Om1t","Lm5vcm1hbA==","X0FERFJFU1M=","IO2V","YWRkbGU=","IER1cg==","LkVsZW1lbnQ=","NjU2","V2FybmluZ3M=","IGNyZWRpdHM=","IGluaGli","IGVtaXNzaW9ucw==","NTQ1","IGhheg==","LnlvdXR1YmU=","dWdnZWQ=","IGJvdGhlcg==","IEthbnNhcw==","IEZpeGVk","IFRlc3Rz","IEZJWA==","NTc2","VW5pZm9ybQ==","IGtvbnQ=","Pj4+","c3RhdGlvbg==","bG9yZQ==","YXR5cGU=","aXNob3A=","LyoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKio=","NTIx","Q29tYm9Cb3g=","IHZhY2F0aW9u","IGluaXRpYXRpdmU=","IGRlZmF1bHRWYWx1ZQ==","Nzcw","Y29uY2F0","IEto","NjMy","IFdlbGNvbWU=","aXplZE5hbWU=","TWlncmF0aW9u","IGdyYWRpZW50","SG90","IGhhcmRseQ==","ZWxv","IFN0dWRlbnRz","IGxvb3Nl","NzMw","YXR6","LlNlbmQ=","Jy8=","IHVuaXZlcnNhbA==","IGVudGVycHJpc2U=","IHJlZ2V4","IHZpc2l0b3I=","IEZseQ==","U2Vx","4LiZ","IFZpc3VhbA==","IGxpYnJhcmllcw==","YXRvZXM=","UGF5bWVudA==","NDQ3","IHBlbnQ=","IGdhdGhlcmVk","VlJUWA==","IERN","U3BsaXQ=","IGxldHRpbmc=","0J0=","X2Vycm9ycw==","ZXBvY2g=","UEFSQU0=","Y3U=","0YHRgtCy","b2x1dGlvbnM=","RWRpdGluZw==","Zm9udHM=","IGFsbG9jYXRlZA==","IEJhc2Vk","KFk=","IEp1ZGdl","IGJyb3RoZXJz","RklMRVM=","w6dv","NTMx","d2I=","X1BJ","J14=","IHN3b3Jk","LnNlcnZpY2Vz","IG5s","VGlt","aWdn","IE1vb3Jl","IGNyeXB0b2M=","5Ye6","X3Bvc3Rz","b3RhdGU=","Pyc=","Li4uLgoK","IGts","PSIk","IGRlY29yYXRpb24=","4bqh","IERJUkVDVA==","R1VJ","KT0+ewo=","IG5ld3NsZXR0ZXI=","IHByZWNpcw==","KHBvaW50","IEVxdWlwbWVudA==","dXR5","IERhdmU=","IHBhcnRpY2lwYXRpb24=","dWFyaW9z","eGl0","LkFz","RVRFUg==","b3JvdXM=","IHNoaWVsZA==","W10+","aWxpdGFyeQ==","Lm9yaWdpbg==","IHByb21vdGlvbg==","VW50","IGN0","VFJB","NTU2","Vmlld0hvbGRlcg==","IHNpZ21h","ZGVsdGE=","YXJlaG91c2U=","Y29udHJhY3Q=","KFZlY3Rvcg==","NzIx","IGNvbXBldGU=","L2Zvcm0=","L2NvbXBvbmVudHM=","IG5y","IEluZG9uZXM=","INC+0YI=","IFZvbHVtZQ==","LmZpbGVz","KHJlc3A=","L21vZGVscw==","IHN1cmY=","c3RhbmRhcmQ=","L28=","IFhDVEFzc2VydA==","VklDRVM=","LkNvZGU=","U0VE","IGFjdGl2YXRl","RGVsdGE=","IGxpbWl0YXRpb24=","cmlq","IHByZWduYW50","Ol4o","IHNvdXI=","cGll","ODAz","IGV4cGVuc2U=","aWNhdGlvbg==","IExhcmdl","IMKx","IEJvd2w=","KG1vZGVscw==","L04=","ODU3","UGE=","LnJlbG9hZA==","IHdvbmRlcmluZw==","NDYy","RXhlY3V0aW9u","CSAgICAgIA==","IEdyYXBoaWNz","IENvbnRpbg==","X2pvYg==","IGdldE5hbWU=","IE1hZ24=","IERXT1JE","bWFk","IG5o","ZmVhdHVyZXM=","fSIpOwo=","aGVldHM=","KHRyYWlu","em4=","IHJlY3J1aXQ=","LmNvbm5lY3Rpb24=","IGJhcnJlbA==","IHN0ZWFt","X3NldHRpbmc=","IGFuZ3VsYXI=","YW5lb3VzbHk=","IGJpbA==","IE5vcm0=","NTIy","KCEk","aWJ0","JSg=","IHBvc2l0","IEZhdGhlcg==","aW50ZW5kbw==","NTY1","TGl2ZQ==","MDQx","IHBvcnRz","IG1lag==","IGxhbmRpbmc=","cG9uZGVy","IGNvZA==","X0hFQURFUg==","Lk1hcmdpbg==","IGJhbGxz","IGRpc2N1c3Npb25z","IGJsZW5k","SGV4","IGZhcm1lcnM=","IG1haW50YWluaW5n","ICAgDQo=","c3lu","W1Q=","cnVz","NDM5","dWZmZXJz","IGNvbnRyaWJ1dG9ycw==","X3N5cw==","LkRlYnVn","IGNvbnN0cnVjdGVk","b21lcw==","P2lk","c2xpZGVy","IHN1cHBsaWVycw==","NjEx","c2NyaWJlcg==","cGVz","0J4=","IjoNCg==","XENvbnRyb2xsZXI=","KSkKCgo=","IGx1YQ==","TXVsdGk=","RU5T","U3Jj","IHBldGl0aW9u","IHNsYXZl","bG9va2luZw==","VkVSVA==","CXZlY3Rvcg==","U3BlY2lhbA==","aGg=","YW5uZQ==","IE5pZ2Vy","L3ZpZXdz","emluZw==","ZW5kYW50","PEM=","c3BlZWQ=","NTE0","IHt9OwoK","QmVnaW5Jbml0","IGZvcGVu","QFJlcXVlc3RNYXBwaW5n","RW5kSW5pdA==","IHB1bmNo","U2VuZGVy","NjAz","6ZQ=","Z2V0TWVzc2FnZQ==","L3R5cGVz","LlBJ","KCcnKTsK","b2N1c2Vk","KGFsbA==","IGRyb3Bkb3du","KS5fXw==","IFZpbg==","LkZvcmVpZ25LZXk=","NjEy","Y2FuZg==","b3VyZWQ=","IE9yZ2FuaXphdGlvbg==","INCw","IEN1bHR1cmU=","KGNscw==","LF8=","OTAy","cmdiYQ==","7J2Y","LmRhdGFHcmlkVmlldw==","IGRvemVu","IEdlcw==","ODA1","NDY0","X3NoYXJlZA==","bmljaw==","IGhvc3A=","b21ldGVy","NDk1","IGNsYWltaW5n","MDMy","aWJsZXM=","cmlr","5piv","ZW5hcmlv","IGRlbmdhbg==","b2Ji","bW9udA==","X3Jhbms=","KCcvJyw=","IGFwb2xvZw==","UHM=","X3Bvd2Vy","IEdyZWU=","IGZ1bGZpbGw=","IGZpcmViYXNl","OTEw","IGZhcmU=","IEhpbQ==","IGJlYW4=","4oCmLg==","IFNQSQ==","X1JY","IHBlcmNlcHRpb24=","cmVsYXRpdmU=","Y29tcGlsZQ==","dXVt","dXRvcw==","YXVj","IEFzaw==","IGluZGljYXRvcg==","L3Ro","LnNldFN0cmluZw==","IFdpc2NvbnNpbg==","LkRvbWFpbg==","IGFydGlmaWNpYWw=","RGV2ZWxvcA==","IFNhcmFo","IGx5aW5n","KHNlYXJjaA==","IEVtcGlyZQ==","dXJyaW5n","5pe26Ze0","PSIkew==","IGdldElk","IFBheW1lbnQ=","dHJhbnNpdGlvbg==","IF0u","aXhpbg==","VlQ=","LXNlbGVjdA==","IGRlbW9uc3RyYXRlZA==","IGxhc3ROYW1l","ZW1wbG95bWVudA==","LmdldFByb3BlcnR5","IGZvdWdodA==","ZmlsZU5hbWU=","IFBlcnM=","NDUy","LWNhcmQ=","YXN0cg==","YXR0cnM=","IHByb21pbmVudA==","RGVzaWdu","YW5jb3V2ZXI=","44GX44E=","YXJkbw==","c2VjcmV0","IHJhZw==","IHBvaXNvbg==","LW1hbg==","LG9taXRlbXB0eQ==","NzQw","CXVu","aXR6ZXI=","IENhc2lubw==","IFJvc3M=","LWZvb3Q=","KHJlc3VsdHM=","UGxhbg==","IGxhc2Vy","6riw","X0RS","NTIz","RmFjZWJvb2s=","NDQ5","IGJvYXJkcw==","c3Rh","XV0s","Njc1","IHRpbGVz","U0laRQ==","ID1+","OTcw","IHByZW1pZXI=","b2NhYg==","IGVuY29kZWQ=","IHJlc2VydmU=","NjA5","IEFmZ2hhbmlzdGFu","IExpc3ROb2Rl","dXJscw==","IHN1Ym1pc3Npb24=","IG5ldQ==","NDc3","ICMrIw==","X1BPU1Q=","IG1vaXN0","ZWxsaQ==","ZWxsaWdlbnQ=","LmFsZXJ0","w7Nk","YnJl","IENvbGxlY3Q=","IGdyYXBoaWM=","IGxvbmdpdHVkZQ==","IFByb3ZpZA==","IENhbGN1bGF0ZQ==","eGZmZmY=","Y3JpdGVyaWE=","IHdhdGVycw==","cm9jaw==","bG9xdWVudA==","IFRyaWI=","NTEz","IGJ1cnN0","IHN1ZmZpeA==","LkV4dGVuc2lvbnM=","aXNoZXM=","aXZlbA==","IExJS0U=","IEdldHR5","LkFjdGlvbkV2ZW50","LnNsZg==","IEhBTA==","dXBhbA==","RUFS","NTI0","dWRp","X3RpbWVvdXQ=","VUY=","IFNpbmdhcG9yZQ==","IEFkdmVudA==","X2ludGVydmFs","Y2hhZnQ=","IEVtZXI=","IHRlbGVwaG9uZQ==","IFR1cms=","X2ludGVyZmFjZQ==","IE93bg==","IGVuY291cmFnZWQ=","PE9iamVjdA==","X1RleHQ=","IE9udGFyaW8=","IEFwcGx5","LmZpcmViYXNl","IGFudGli","UHJpb3JpdHk=","ZW5leg==","RGF5cw==","Y2lk","dXJyZW5jZQ==","Oy8=","aW5uZWQ=","0YHRjw==","IHZleg==","Znc=","Ly8k","YXR0YWNr","NDU4","IHN0YXJ0dXA=","YWluZXJz","LmZyYWdtZW50","b3BhY2l0eQ==","KGNvbm4=","aGVpbQ==","Lm5ldHdvcms=","KHN0cmVhbQ==","Njcw","IE5PTg==","dG9s","ODMw","IFhib3g=","IERT","IGNhY2hlZA==","IHByb3N0aXR1dGFz","IEJhbHQ=","KCdb","NTc1","IG5vZXhjZXB0","Iic=","IHNk","LnZhbGlk","X2Fn","IHJhY2Vz","NDgx","IHJvZA==","aXR1ZGVz","PD4o","NTQ0","LlByb2R1Y3Q=","Rm9ybXM=","TkVX","UGF5","CWJvb2xlYW4=","X2NvbnRhY3Q=","IEVsZWN0cmlj","c2tpcA==","IHd1cg==","IGNocm9uaWM=","X2RyaXZlcg==","OTQw","IFNhYg==","IFVsdA==","IFJhZA==","U1RBVFVT","IExld2lz","T0I=","IGdpZnRz","LlJlYw==","VFJVRQ==","IGludGVuc2l0eQ==","TWFya2Vy","LmNvbXBhcmU=","ZmZpYw==","Q29va2ll","IEJhYnk=","IEJpZ0RlY2ltYWw=","aWxldA==","IEhPTERFUlM=","IExhZHk=","IGx1bmc=","IEFsYWJhbWE=","IGRlc3M=","YCk7Cg==","IEJ1aWxkZXI=","X3JlZ2lvbg==","IG5ldXRyYWw=","OTA5","Qm90aA==","IGhw","IGhvcm4=","IHNlZ21lbnRz","IEVD","Ij0+Ig==","KHJlYw==","IFBp","R00=","IGxhcHRvcA==","U2NhbGFy","NDYz","aXNk","LWRpYWxvZw==","IEFuZGVyc29u","IG1pc3Rha2Vz","NzA4","IEhhbg==","amVz","ZXN0aW5hdGlvbg==","NDM2","IHByb21pc2Vz","Ymlk","IFNjaWVudA==","R0lO","IFBlcmZvcm1hbmNl","YmFnZQ==","LnVzZXJz","bGVhZGluZw==","IG9yYWw=","R3JhcGhpY3M=","NDg4","X1BUUg==","NTE4","aGFuZw==","IGluZXY=","cHJvY2Vzc2luZw==","RmFjdG9y","IE5B","JHN0cmluZw==","IGdyb3VuZHM=","LlNhdmVDaGFuZ2Vz","Y2xvY2s=","OTQx","Y3JpcGNpb24=","IE5ld3Rvbg==","Z2M=","LmluY2x1ZGVz","IGJsYXN0","ICctJw==","IHB1ZWRl","NDY5","LlNlc3Npb24=","IGdyZXA=","X2ZpbmFs","IEdheQ==","IEdpdmU=","aXJp","LXN0YXI=","IFVJSW1hZ2U=","X2Vwb2No","dWJi","ZW50aA==","IGVsaXRl","IGNhbXBhaWducw==","IFBvcm5v","X2Fzc2lnbg==","UHJvdG9jb2w=","IEJlaW5n","IEFpcnBvcnQ=","IGNvbnZlbnRpb25hbA==","IFdhdA==","IENJ","RVRB","IEFudGhvbnk=","IHRhYmxldA==","KGZvcm1hdA==","IGNvbnNpc3RlbnRseQ==","IElvd2E=","NDc0","IGF2YXRhcg==","MDI3","LmN1cnNvcg==","IVs=","IGhhbmdpbmc=","SGVy","U3VjaA==","JzsKCgo=","b3JnZW91cw==","KCk9PQ==","IHZpZXdNb2RlbA==","IOOD","IGVscw==","IEFnZW50","RmV0Y2g=","YXBvcg==","IGN4","cHJlYWQ=","IFBpZXI=","b2VmZg==","NjE2","U24=","ODkw","IFZpcnR1YWw=","QXBy","LldoaXRl","NjE1","X01PRA==","IFBvaW50cw==","5aSx","IGdlbmVz","IHZlbmRvcg==","IG1haW5zdHJlYW0=","PHNyYw==","IEVsaXphYmV0aA==","RGVjb2Rlcg==","LXN0YXRl","IEdsYXNz","bmN5","YWRpYW5z","X21vbg==","IFJlbW90ZQ==","IHdpcmVsZXNz","IE1p","5Yk=","NDY2","6KGo","c3RhZ2U=","IFRpbGU=","bGxpYg==","VmFyaWFudA==","PT0K","IGdvbGRlbg==","KFFTdHJpbmc=","LnB1dEV4dHJh","IERvbQ==","IEFuaW1hdGlvbg==","IGludGVyYWN0aXZl","aWZhY3Q=","6Zmk","TEVU","IGZyZXF1ZW50","IDw+Cg==","RmlsZW5hbWU=","IHNuZQ==","IEZvb3RiYWxs","IHJpdmFs","IGRpc2FzdGVy","aW9uaWM=","IERhbWFnZQ==","LlJlc291cmNl","LWVu","IFR5cGVz","Z2V0U3RyaW5n","KGJvYXJk","IGJvbA==","cGxhaW4=","enlt","4Liy","IHNjYW5uZXI=","aWxkZXI=","X21zZ3M=","5o8=","KGludGVudA==","IGRlc3RydWN0","IGJ1c3Q=","IEVtcGxveQ==","b25p","IFVJVmlld0NvbnRyb2xsZXI=","IG9kZHM=","ZWFyZXI=","R2VvbWV0cnk=","IHlpaQ==","X0VYUE9SVA==","IEF0dGFjaw==","IG5pZXQ=","IGltcHJlc3Npb24=","IEdpbA==","X3Byb2I=","NTI4","IENG","IEV4cGVyaWVuY2U=","L3BsdWdpbnM=","Lk1ldGhvZA==","IGJlbGllZnM=","TmF0aXZl","X2J1aWxk","IHZpZw==","IHJhbmtz","Y292ZXJlZA==","NzA1","c3VjaA==","R3VhcmQ=","LnBhY2s=","YWRkZXI=","ODA5","aXZpYQ==","bG5n","INCy0Ys=","NTUy","VGltZXN0YW1w","X25vdw==","IHBva2Vy","IHVuYw==","IHNoYXBlcw==","LXR5cGVz","X3BlcmlvZA==","cGs=","IHZldGVyYW4=","IHNvbm8=","IGFwcG9pbnRlZA==","b3ZlcmZsb3c=","LmRyaXZlcg==","X2NhdA==","dXR0","cGxhbnQ=","aW1i","IEFjY2VwdA==","IGNvbmNlcnQ=","CW5vZGU=","CXo=","Pz4NCg==","IGJhbm5lZA==","CSAgICAgICAgICAgICAgIA==","IHRveGlj","IGRpc2FwcGU=","NDcz","yJs=","IGdyYWNl","YXRlZnVs","UmVwbHk=","IENydXo=","NDg2","IHNjcmFw","IGtleXdvcmRz","c2ltcA==","IG1vcnRnYWdl","IGN5YmVy","IEV4ZWN1dGU=","IGxhdGl0dWRl","aWZ1","LkNPTQ==","ZGJv","IHNvcnRz","IEdhcw==","b21pYWw=","LkxvY2Fs","Q2VsbHM=","LlJlcGxhY2U=","U3RyaW5ncw==","LmZpdA==","IFRoaXJk","JSIsCg==","IHt9Ii4=","IFNvbnk=","IFs6","NTg1","IGZhbGxlbg==","LicpCg==","aW5o","IE1D","IHJlZGlz","Q29kZXM=","IHByb2ZpbGVz","aG9vaw==","UmVkdWNlcg==","X0ZVTkM=","IG5hdmlnYXRl","c3RybGVu","IGhvcm0=","4Z4=","IFNS","LmJvb3Q=","IGRpZ2VzdA==","CWhlYWRlcg==","LmZpbmRPbmU=","5oE=","RGJUeXBl","bmlh","X21lcmdl","IGRvbm5l","L0dldHR5","X0NIQVI=","IGJhbmRz","LlVSTA==","YXJ0aWFs","IGZyZXE=","IHNpc3Q=","Tmc=","IHJlbmRlcmluZw==","XENvcmU=","V2lkZ2V0cw==","IFZB","IGFjdGl2aXN0cw==","U3Rl","PV8=","YWxsYQ==","U3RhbXA=","IGxvYWRz","IHh4","IExlYXJuaW5n","Lk12Yw==","dWly","KCIk","IGNvbm5lY3Rpbmc=","UmVhZE9ubHk=","dXJ1","IEVhZw==","QklU","X0RFTA==","5ac=","YXJyYXNz","ZXh0ZXJuYWw=","IFlPVVI=","IEJyZXc=","IEZpdmU=","IHJlc2l6ZQ==","aWdpZA==","ZXJhdGlvbg==","NjUz","INGN","NTM2","5Yqg","MDM5","IENhdGNo","2YE=","IExlb24=","YW1pbA==","LkJvZHk=","Q2xpcA==","L2xpc3Q=","LmJy","RWRpdFRleHQ=","CWRi","LkdhbWU=","KEJ1aWxkQ29udGV4dA==","YmFja2VuZA==","LlJlZA==","ZmFjZWJvb2s=","NTI5","LnVybHM=","bXI=","cm9sbGVk","LS0tLS0tLQ==","IGludGVydmVudGlvbg==","IHJldGlyZW1lbnQ=","IEtpdA==","IFBSRQ==","VXBwZXJDYXNl","IFNvY2tldA==","IDot","IHN0dWR5aW5n","IE1ldHJv","YXJkZWQ=","IGNvbnZlcnNhdGlvbnM=","Q2FsbGVk","IGV4YW1pbmU=","ZXJ0aWZpY2F0ZQ==","Lmd6","LXJlc3BvbnNpdmU=","IHJlZnVuZA==","X25ldHdvcms=","MDI2","YWxsb3dlZA==","ZW1wdA==","IG1lYWxz","Q2F0ZWdvcmllcw==","IHRyYXZlbGluZw==","IGtn","IHNoYW1l","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA=","IGV4cGxpY2l0bHk=","IG1hdGhlbWF0aWM=","IFN1aXRl","IFJHQg==","KioqKioqLw==","IG1peHR1cmU=","bGVhcm5pbmc=","LnRlbXBsYXRl","YXR0cw==","d3g=","CWN0eA==","LnByb3BlcnRpZXM=","IGRyaW5rcw==","IEVpdGhlcg==","c2V0VGV4dA==","LmdldERhdGE=","LnppcA==","IHJldmVhbHM=","PHRhYmxl","Lkhhc2hNYXA=","IEh1cg==","KSIpOwo=","LmZyYW1ld29yaw==","IFNUQVJU","ZmVlZGJhY2s=","NDU3","IHNhZmVseQ==","Lmljb24=","Y29uZmlndXJl","LmxvY2s=","LmxheWVycw==","Lz4uCg==","IHJhbmtlZA==","X2ltcGw=","IEhhbmRsZXM=","IGhvc3RlZA==","IHVwZGF0aW5n","YWxidW0=","6Z0=","IHNoYWRlcg==","RWRpdG9ycw==","LXJvdW5k","W117","IHNlcA==","IEhp","VEVN","bG9va3Vw","Lm1hbg==","X0lOUFVU","IHRocmVhdGVuZWQ=","X0lNUE9SVA==","IGRyb3Bz","cnVpdA==","c2lk","Ym90aA==","IEV4Y2Vs","IGplcg==","b3JkaW5hcnk=","0LXQuQ==","VklFVw==","cmVwbHk=","ICk6Cg==","Y29sb3Jz","dmVyaWZpZWQ=","X1Ry","X3BhcnNl","IGNvbmdyZXNz","NjE3","UHJvbWlzZQ==","aW50cw==","IE1vdGhlcg==","LkFwaQ==","IER1cmF0aW9u","IGZpcnN0TmFtZQ==","aW5oZXJpdGRvYw==","IE1hcnM=","IGFwcg==","T0RZ","IHZpc2l0cw==","NjMx","IGhlYWxpbmc=","bGV0dGVycw==","KSkpOw0K","ZnV0dXJl","LkZyYW1ld29yaw==","IGtpc3M=","IGludm9sdmU=","IHNpbGVudA==","YWRvd3M=","IGFueWJvZHk=","c2No","Njkw","IHNvbGVseQ==","LWltZw==","IHByb3ByaQ==","IGluc3RydWN0","IGxpY2Vuc2Vz","IG1ldGg=","IGNvbmRlbQ==","IERvbWFpbg==","IEhhcnJpcw==","IHPDpQ==","Q0VQVA==","QmF0Y2g=","QGV4dGVuZHM=","IENPTlRSSUJVVA==","LkRhdGFGcmFtZQ==","NDcy","X3BhY2tldA==","cmVjaXNpb24=","IGZvY3VzaW5n","Lmh0","X18iOgo=","OkdldA==","IEtD","IHBhc3NhZ2U=","U2VnbWVudA==","X2NlbnRlcg==","LXpB","X0JM","IGNvbnZpbg==","IGNsYXNzaWZpZWQ=","IE5TTXV0YWJsZQ==","X2Fw","dGlsZQ==","UmVjdGFuZ2xl","NDky","KG51bXM=","dmVucw==","IFVJQnV0dG9u","IEZlZGVy","YW1v","IG91dGxpbmU=","IFBhcnNlcg==","IOKJ","IFdvcmtz","LlNjaGVtYQ==","IGVuZ2luZXM=","NjM3","NTYz","X2NvbW1vbg==","NTQy","X29sZA==","IHNldENvbnRlbnRWaWV3","IC8vLzw=","IEJU","Zm0=","IGRpdmVycw==","X3dlaWdodHM=","ZW1hcms=","IEFDVA==","IHByb3BvcnRpb24=","b3ZlcmxheQ==","LmRpcm5hbWU=","IEdpdA==","X1JFRkVSRU5DRQ==","PD4=","bGI=","X3J1bGU=","6LSl","IFB1dGlu","IHNsZWVwaW5n","KCk6DQo=","IHByZXNlcnZl","IHBhcmxpYW1lbnQ=","IExvb2tpbmc=","IHBpY2tpbmc=","IERpc3BhdGNo","IHNsaXA=","65M=","IEx5bg==","X3NpZ25hbA==","Y29uZmlndXJhdGlvbg==","IFBpdHQ=","NDkx","YWRlbg==","cHJvY2VkdXJl","IGVudGh1c2k=","ZmlnaHQ=","IENvbnNpZGVy","IHRvcm4=","Q29ubmVjdGVk","LmNvcw==","X2dyb3Vwcw==","IFRoaW5r","IGRlbGliZXI=","IHJlc2lk","d29ya2luZw==","LmNvbHVtbnM=","IENhbGxlZA==","IGVzbGludA==","PiIs","X0RPV04=","aGlzdA==","IEFkdmFuY2Vk","IHJld2FyZHM=","YWN0b3Jz","IHNpbGVuY2U=","NDc5","IG15dGg=","IG5ldXI=","NTE5","IGF1Y3Rpb24=","LkdldFN0cmluZw==","ZWtz","KHByb2plY3Q=","NTk4","CW1zZw==","CW91dHB1dA==","IGNvbXBsYWludHM=","NTUx","LFM=","IHRibA==","ICwKCg==","cmlvcnM=","YWhyZW4=","IGxhd3llcnM=","cmVkdXg=","X3N5bWJvbA==","b2ZmZWU=","X1JFU1VMVA==","KE5hbWU=","VVRD","LmN1cnJlbnRUaW1l","IG9yZ2FuaXM=","LmFyZw==","NTMz","IG1pbmlt","d2ljaw==","IHJlY2VpdmVz","QmFsYW5jZQ==","IHNwZWFrcw==","IERheXM=","IEJlbG93","NDgz","dGlwbw==","UHJlc2VudA==","IHJlc2Vydg==","aHA=","IHJpdA==","X1JJR0hU","LS0p","IGNoYWlybWFu","Nzgx","RElT","IEJPT1NU","IGV4cGVyaW1lbnRz","Njg3","X18pOwo=","IHN0YW1w","IGZlcnQ=","IGZvbmQ=","VGVy","ZWx2ZQ==","dXJlbg==","K2k=","ZW5kZW5jeQ==","IHZpcnR1YWxseQ==","Li4uIg==","772e","OTI1","LWNlbnQ=","X3VuaXF1ZQ==","IHByaWNpbmc=","bWlj","UkVTSA==","IDo6Og==","IGFubm90YXRpb24=","IENpcmNsZQ==","b25nb2Ri","aXRhcw==","ICUo","KGNvbXBvbmVudA==","INC+0LE=","KHBvcnQ=","LWhvdXI=","Lm9iag==","TEJM","IGp1cnk=","R0JU","IHNweQ==","IFByb2Zlc3Npb25hbA==","ICIiOwoK","IHN0cmlraW5n","IGRpc2NyaW1pbmF0aW9u","IHBheXM=","OTM3","bGljdA==","ZW50ZXM=","IHRocm93aW5n","IFBsdWdpbg==","KGRlZg==","IFJ1bnRpbWVFeGNlcHRpb24=","IE1pZ3JhdGlvbg==","NTk5","IGRpYw==","YmFn","b25pYQ==","IGNvcnJ1cHRpb24=","NzA0","KE1hcA==","IHByeg==","LmR0bw==","IGFjcXVpcmU=","U3RhdGVUb1Byb3Bz","IGxvdmluZw==","0L7Qtg==","X3BhdHRlcm4=","IGVtb3Rpb25z","IHB1Ymxpc2hlcg==","X2Jl","IGNvdXBsZXM=","NDk4","b2o=","IENoYXJ0","IHRyb3A=","LnRvb2w=","IGVzdGFibGlzaG1lbnQ=","IGRvbA==","NjU0","IHRvd2Vy","IGxhbmU=","IFN5ZG5leQ==","IGZpbGxpbmc=","Y2xhaW1lZA==","NjQ0","IGRpYWxvZ3Vl","IGNvbnZlbnRpb24=","Ym9va2luZw==","cGFyZW5jeQ==","5rE=","IEdlbmVyaWM=","NzE4","XFNjaGVtYQ==","NDgy","NjE4","IHJhbmdlcw==","L2No","IHBhbmVscw==","IHJ1bGVk","55Sf","LnRz","X3NldHM=","IGNsZWFudXA=","UHJldmlvdXM=","IEFuaW1hbA==","NjA3","KCQo","IEF2ZQ==","b2xsYXI=","MDI4","X2V2YWw=","CU5hbWU=","KHRyZWU=","ICJd","NTcx","IGR1dGllcw==","PScv","Q2xpY2tlZA==","IGRpZmZlcmVudGx5","IENsYXJr","IGRpdA==","b2xvZ2lzdHM=","IHN5bmQ=","IHNlbmRz","LWtub3du","a2I=","IE1vZGFs","aXRhdGl2ZQ==","IHJhY2luZw==","IGhpZ2hsaWdodHM=","IFNpbW9u","IENhcHRhaW4=","5L+h","IENC","Y29udGlu","YXJhbg==","IHBoeXNpY3M=","cmV0dHk=","ZXRhbA==","Lm1k","YXhpb3M=","IHNwZWFrZXJz","IHByZXA=","IGF3YXJkZWQ=","7KeA","IENvcm4=","IE5hdHVyZQ==","VURJTw==","NzM3","IHByb2o=","LXByZQ==","W3U=","RmVhdHVyZXM=","IGlzRXF1YWw=","QmluYXJ5","c2ln","IGNvbmZ1c2lvbg==","NTQ2","NTY4","IEhhdA==","IGt0w7M=","LmNvbmZpZ3VyZQ==","TU9O","NDk0","L2VkaXQ=","X0FkZA==","LHRydWU=","NTQx","IGNsaQ==","RXJyb3JNZXNzYWdl","LWxvYWRlcg==","RGltZW5zaW9ucw==","dWx0aXBseQ==","IHshIQ==","IFNxbENvbW1hbmQ=","IHNwb2tlbg==","IHBpY3M=","IHRveQ==","KEtleQ==","IExvb3A=","2Kg=","RUFUVVJF","aW5jdGlvbg==","X3NldHVw","d3JhcHBlcg==","IHRvbmc=","Y3VsYXI=","T3B0","LlBs","PSIs","KGxlbmd0aA==","dW1u","IGNocm9t","IHNldmVudA==","IElsbGVnYWxBcmd1bWVudEV4Y2VwdGlvbg==","NDc4","CXN0YXJ0","IGJlZ3Vu","Q0VQVElPTg==","ZGF0YXNldA==","ODI1","IEZhaWxlZA==","Y29scw==","NDU5","IGtuZWU=","aW1vcmU=","LnNwbGljZQ==","c2hlbGw=","aWdnZXJz","IHRoZW1lcw==","OTk1","IERK","IEFzc2lzdGFudA==","LSQ=","TWF5YmU=","IG9yZGVyaW5n","IEludGVsbGlnZW5jZQ==","IE1hc3NhY2h1c2V0dHM=","IGZhaWxpbmc=","ZWxzb24=","R3JlYXQ=","PWk=","LnJlc3Q=","IGludml0ZQ==","LWRpc2FibGU=","Lkdyb3VwQm94","4oCZZXN0","IHRhY2tsZQ==","Z3Y=","ZXR0ZXI=","ICksDQo=","X3J1bGVz","Lndhcm4=","ZnVuY3Rpb25z","IENocmlzdGlhbnM=","IGJhY2tlZA==","IHNsaWRlcg==","IGVuam95aW5n","bmVzdA==","IGhpag==","X21z","Ly8q","QW5ub3RhdGlvbnM=","IFZhcmlhYmxlcw==","PFY=","KHNlcnZlcg==","IE9yYWNsZQ==","ZWxlbWVudHM=","IG9yZ2FuaXNhdGlvbg==","X3BvaW50ZXI=","IEhlYWRlcnM=","W2Q=","IGRlYWRsaW5l","aXNzYQ==","IGtuaWZl","IE5BU0E=","IEhlaWdodA==","Nzg0","IEFzeW5j","IHZlbnVl","LmRvbQ==","Ym91cm5l","IEhhd2Fp","IG1lbW8=","aWN0aW9ucw==","IHN1cnZlaWxsYW5jZQ==","b21p","L2Fzc2V0cw==","NTg3","IGVkdQ==","xJs=","IHJvc3Rlcg==","IGhpcmVk","IFRvaw==","IHBsYWNlbWVudA==","dXJhdGlvbnM=","IHNldFN0YXRl","IE1hZ2F6aW5l","IGhvcnJvcg==","VHJ5","IGxhZw==","IEV2ZXJ5b25l","dGh1cg==","KSk7DQoNCg==","LnJldHVybg==","IHN5bXA=","4paI4paI","IG5pZ2h0cw==","d29ya2Vy","IGFsZQ==","ZW5uZXNzZWU=","LnN0ZXA=","IHN5bmNocm9uaXplZA==","NDg3","b3VyaQ==","RG9lcw==","LmNoYW5nZQ==","Zm9u","LnNldEJhY2tncm91bmQ=","aXJjdWxhcg==","NDc2","Ky0=","IENJQQ==","NzI5","IEphbmU=","IFNpbWlsYXI=","LUk=","bGV2ZWxhbmQ=","IHByb3NwZWN0","X2ZvdW5k","CWNvbG9y","LkRpYWdub3N0aWNz","IGFubm91bmNl","IGFzc3VtZXM=","L3Ry","IGJk","OTg3","IENhcmJvbg==","IGFuYWx5cw==","NTY0","LmRlc3Q=","bmlr","IExpZQ==","LWluZGV4","RHJhd2FibGU=","IFRBRw==","IHRyaWFuZ2xl","X0ZMT0FU","CQkgICAgIA==","LmJsYWNr","dnVl","Y3VyYWN5","IGFmZmVjdHM=","OTA2","IHN1cmVseQ==","U2xpZGVy","dWtp","Y2VyeQ==","IHVudGVy","LnByb2ZpbGU=","b3Jkb24=","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA=","bGVhdmU=","IHNtYXJ0cGhvbmU=","Z2ll","IGNvbnNwaXI=","IHR1dG9yaWFs","57G7","IGNhYg==","NzY1","IFN1bW1hcnk=","KgoK","w6Ro","IlRoaXM=","IHNsaWRlcw==","Ijwv","LmRldg==","Jzw=","IFJpbmc=","xYJh","IGtvdGxpbg==","LmR1bXBz","IGJhc3M=","7Is=","UE9JTlQ=","IHV0dGVy","IMOpcw==","LmZ1bGw=","T0xM","IGNlcmVtb255","c2xvdA==","IGFpbXM=","dG9vbHRpcA==","LnNjb3Jl","LWRk","NjQy","IHByb3g=","UmVjb2duaXplcg==","ZHluYW1pYw==","w6RuZA==","L3N0ZA==","RFU=","IE5vdEltcGxlbWVudGVk","KCItLQ==","UkFX","NjM1","IGV0aG5pYw==","YW5ubw==","IGNoYW1waW9uc2hpcA==","LHNlbGY=","IGFjY2VwdGFibGU=","IFNwcml0ZQ==","W3R5cGU=","w7xo","IFZL","KGpQYW5lbA==","NTQ4","aXRy","66A=","YXVyYQ==","IGZhY3VsdHk=","YXZlcnM=","IFJlY29yZHM=","LlNlY3VyaXR5","IGNvbnN0cmFpbnQ=","LkJs","VWludA==","YmFsYW5jZQ==","IGNvbW1l","IE5paw==","U3VwcHJlc3NXYXJuaW5ncw==","IE9jZWFu","NTU0","X0lk","RGF0YVNldA==","IGluc2VydGVk","IjsNCg0K","4oCz","aXBwZXQ=","IGFubml2ZXJzYXJ5","IHJldGlyZWQ=","b3JjaA==","IHBlcnBldA==","XEZvcm0=","IGludm9sdmVtZW50","X3VzZXJuYW1l","YWxlbQ==","X1NFUlZJQ0U=","IEluZGlhbmE=","IGNpZ2FyZXQ=","YXJ0eg==","IFJD","IG1lYXN1cmVtZW50cw==","572u","IGFmZmlsaWF0ZQ==","YWNpb25hbA==","LXNlY3Rpb24=","X2NvbnRyb2xsZXI=","dmFyZA==","X2Vs","IFRveQ==","PFA=","TWFjaGluZQ==","w7ptZXI=","IFllYWg=","IllvdQ==","IG1vbA==","LkNs","Y29udHJvbGxlcnM=","IHN1c3BlbmRlZA==","Kys7Cgo=","QVRU","IHByb2plY3Rpb24=","UGFkZGluZw==","NTg2","Lm1hdGg=","Njg2","ZmFjdG9yeQ==","MDQy","IGdhbW1h","KCk+","Y3ljbGU=","IEJ1bGw=","cGF0aHM=","IHVucA==","IHZpZXdEaWRMb2Fk","X01vZGVs","IGFzc2VydFRydWU=","IHJhdGVk","RGVjbA==","dmVydGVk","IERhdA==","YnJldw==","IHBvaW50aW5n","TXM=","IFBvaW50ZXI=","KSc=","X25vbg==","NTI3","IFNFQw==","IHllYWg=","Z2VuY3k=","aW5pdGlhbGl6ZQ==","Zmx5","NzEx","W3Bvcw==","LGc=","VGVsZQ==","MDM0","IGpva2U=","IGNsYXVzZQ==","LmZpbmRCeUlk","ZW5lcw==","KGluc3RhbmNl","NjI2","wqM=","OTE1","IHNsaWM=","X2hvbWU=","ICovfQo=","X3BhZ2Vz","KHNlcnZpY2U=","OTA1","UlA=","IEFtb25n","LmdldEN1cnJlbnQ=","ODA2","44K5","IHNsZWU=","PTw/","X3Byb3A=","Zmx1c2g=","IE1N","QmVs","Tm90ZXM=","ICovCgoK","MDM1","IHJo","VGFibGVz","IEp1","IFwNCg==","bGljaGVu","IEluc3VyYW5jZQ==","XQoKCg==","IGNvb3Blcg==","4oCUdGhl","Lm1hdA==","NDg5","IGZvaQ==","KGF1dG8=","TWFyZ2lu","NjM2","IHJlc2lkZW5jZQ==","NTU5","IEhpc3Rvcg==","IH49","RGk=","ICcpCg==","IGV4Y2x1ZGU=","LkRyb3A=","JyI7Cg==","IGNvYw==","X3VwbG9hZA==","SGlkZQ==","IFVua25vd24=","IG5vcm1hbGl6ZQ==","X3JldA==","LicKCg==","Lm5vZGVz","ODcw","LkRhdGFTb3VyY2U=","YmxlbXM=","IGdlbnRsZQ==","OiQ=","JykpOwoK","LlJlc291cmNlcw==","4og=","IFRhaQ==","VkVE","IEd1bg==","bGVhbnM=","IERvYw==","LlZvaWQ=","IEFtZW5kbWVudA==","ODY2","ZXNzZWQ=","NzA2","IHJlY2lwaWVudA==","Lk5vZGU=","b3Zv","IGFsaWduSXRlbXM=","IFVuaXR5","IFJvbWU=","YnVybg==","IHZvbHRhZ2U=","IFNIQQ==","NTM0","NTcy","IEdPT0Q=","aGVscGVycw==","LyoqKi8=","IGVsaW1pbmF0ZQ==","d2Fw","X2FuZ2xl","IHJlZnVnZWVz","CWFzc2VydEVxdWFscw==","IHByb2Jl","KCcuLi8uLi8=","eW91cg==","IG1lcmNo","VUJMRQ==","CXJlc3BvbnNl","X0RFRg==","IGVudmlyb25tZW50cw==","b3VzaW5n","IHJlc3RyaWN0ZWQ=","IENPTlRSSUJVVE9SUw==","NjIx","IGNvbXBhbmlvbg==","4bqj","cG93","dXJ0bGU=","Ymll","LlBlcmZvcm0=","PW4=","cmVkaXM=","IGRpdmlkZQ==","IGNvbGxlY3RpdmU=","RGlmZg==","RHluYW1pYw==","aXNTZWxlY3RlZA==","YXN0eXBl","IExvdA==","IFN0YXRlbWVudA==","aWNpcGFudA==","YWto","NTE3","IHNlcmlhbGl6ZXI=","X0NGRw==","YXZhbA==","IHZpZXdlcnM=","IEZP","T2Nj","IHJvYnVzdA==","IE1pdA==","X0FORA==","VHJhbnNpdGlvbg==","dW5hdGU=","IHByaWRl","IGRyYW1hdGlj","IFBhZ2Vz","X3R1cGxl","IGNvcGllZA==","bW4=","IG91Z2h0","IGVxdWFsaXR5","X2hhcw==","X1dS","NTcz","ZW1p","IHN1cmdl","aWxsbw==","KCl9","MDgx","IHBlcmY=","OTIx","dWxr","IGludmVzdG1lbnRz","Nzg1","IGdlbmVyYXRpb25z","IHJlc29ydA==","IHRydXN0ZWQ=","X2ZyZXE=","IGZvcm1h","QVRJT05T","IEh1","IEdyYWQ=","X2NwdQ==","ICIsCg==","cmVzc2U=","KCoq","IGhlcmVieQ==","IGxha2U=","X1NUQUNL","IEJ1cmVhdQ==","IHN1c3RhaW5hYmxl","IFBF","IGRlaQ==","IEFuc3dlcg==","UGx1cw==","L3dlYg==","IHN0ZXI=","IG1vdW50ZWQ=","X2NsZWFy","Zm9ubw==","aWFuY2Vz","X2ZpbmQ=","IGNvbmZ1c2Vk","X2Jpbg==","REVDTA==","IGluc3RhbnRseQ==","VUlU","X0RP","U2V0dXA=","a2Vl","X3ByaW50Zg==","X3N0bXQ=","IFN0ZWFt","cHJvZg==","bHY=","IHNvbHZpbmc=","bGF0b3I=","b3R5cGVz","QW5kcm9pZA==","X2VzY2FwZQ==","TGVhdmU=","LmdldFRpbWU=","ODEx","aWZz","IGNvdg==","IENsYXNzaWM=","LWRhcms=","NTI2","RGlzcGF0Y2hlcg==","LWdyYXk=","IFBhbGVzdGluaWFu","LmRlZXA=","IEluamVjdA==","IHJlZmxlY3Rpb24=","NTM4","IGh5cG8=","Y29uc3RydWN0b3I=","LmFwcGxpY2F0aW9u","eXN0ZXI=","4pU=","c2Nob29s","IENvdw==","NTkz","IGZvb3RhZ2U=","LWlucw==","IC8qKjw=","YXRvbQ==","IHByb2ZpdHM=","OTIz","IGJvb2tpbmc=","X3RocmVzaG9sZA==","IExpdmVy","IGNpdGl6ZW4=","Yng=","IFN0b3Jt","IENvcnA=","IHdpZGVy","Iikpewo=","X0FDVElPTg==","aW9ycw==","YWlzZXM=","Om5vbmU=","IGNpdGVk","ImZtdA==","QXVn","Y29tYg==","IHdoaXRlcw==","IHNlc3M=","Xl4=","aWdodGg=","IHRhbmc=","X0NBUA==","NjE0","IGludGVyYWN0aW9ucw==","NDk3","IGdhcmQ=","NjQ2","IHByaXpl","NjQ3","YWZrYQ==","VHJp","XEVsb3F1ZW50","IER5bmFtaWM=","55CG","Z3A=","IHJlYWxt","IE5p","IEVkd2FyZA==","IGlkZW50aWZpY2F0aW9u","IHBoeXNpY2FsbHk=","5pys","IHBpY2tz","LWZyaWVuZGx5","PGk=","aWZpY2U=","X0FQ","TG9nZ2Vk","NTUz","fSIu","L3V0aWxz","IC4uLi4=","RU5USUFM","KEFjdGlvbg==","J10pOwoK","IHByb3Rlc3Rz","b2xpbmU=","X1JFVFVSTg==","IHBvcHVsYXRpb25z","IFJhaW4=","ZHVw","b3JpYWw=","IEF1dGhvcml0eQ==","X2V4cHI=","MDc1","LnVz","IGNvcnJ1cHQ=","CWltcG9ydA==","PGNoYXI=","IExFRlQ=","IGNhYmluZXQ=","IG5laWdoYm91cg==","IFNxbFBhcmFtZXRlcg==","YXR0ZXJlZA==","ZW1pYQ==","IHJldmlld2Vk","IEhlbGxv","YmxvY2tz","KHByb2Nlc3M=","OTk3","IG9ic2VydmF0aW9u","cmF0aW5n","Lmdsb2JhbA==","IHByZWZlcmVuY2U=","LnByZXBhcmU=","IGRvemVucw==","V29ya2Vy","IGNhbGN1bGF0aW9u","IFRvd2Vy","YWlyeQ==","IElTTw==","IGh1bWFuaXR5","LmFzSW5zdGFuY2VPZg==","NzEy","IGR5cw==","IHBpZXI=","aWd1ZQ==","IGFzc29jaWF0ZQ==","IGludGlt","bm90aWZ5","KHt9LA==","ODI4","IFJlcHJlc2VudA==","cGhldA==","c2V1ZG8=","64uI64uk","LlBvc2l0aW9u","IGNsb3N1cmU=","KGNsYXNz","CXRpbWU=","IE9yYW5nZQ==","X29wcw==","IHBvcHVw","IEltcHJv","X3NlY3JldA==","IEV1","LnNldExheW91dA==","dWxseQ==","IHNjcmV3","IFNpemVk","IENPTVA=","IG5vdGlmaWNhdGlvbnM=","VHJhbnNmZXI=","RW1pdHRlcg==","KG9sZA==","bGV0aWM=","NDkz","IC0KCg==","IHBhbmlj","NzE1","IExDRA==","cnVsZXM=","IGFmZmFpcnM=","IEZpbGw=","X0lSUQ==","OTEy","YXR0YWNobWVudA==","IHZvbQ==","PGJ1dHRvbg==","NTk1","IHRleHRz","IGFjdGl2YXRlZA==","LmFjY2Vzcw==","KHJlYWRlcg==","VGVt","IGNvcm9u","cm9waA==","RE1JTg==","IGVtZXJnZWQ=","IGluZmxhdGVy","IEluZGVwZW5kZW50","b3Jpb3Vz","IERlbGhp","Njcy","IGdseXBoaWNvbg==","IENhcmw=","U2k=","IGV4cGVyaW1lbnRhbA==","LmJhcg==","SUFO","IHNxbGl0ZQ==","Y2Npw7Nu","OTA0","X0JBQ0s=","LG5hbWU=","aG9ydA==","IHRlbnM=","NTQ5","6rM=","dXNpdmU=","IGdlbnVpbmU=","IGJ1Y2s=","L2Rpdg==","LnJvb20=","X05FVw==","ZXN0YWRv","IEFyaw==","b2NvbHM=","LmdlbmVyYXRl","dG91Y2g=","Zml4ZWQ=","ICco","IHJlZmVycmluZw==","IG92ZXJ3aGVsbWluZw==","KGxldA==","IGZ1ZQ==","NjIz","X0VOVg==","d29tYW4=","RmlndXJl","YW5pbWF0ZQ==","IE1vcnQ=","IGxvbmdlc3Q=","Y29sbg==","VE0=","Ol8=","cmllbA==","LE4=","IFJBTQ==","IGp1c3RpZnlDb250ZW50","IGFjdGl2ZWx5","L3B1YmxpYw==","IOuw","R2l2ZW4=","T1RBTA==","5aSx6LSl","U2VxdWVudGlhbA==","IHN1cHBsZW1lbnQ=","LmFi","IGNhdGVnb3I=","fX0sCg==","YWhhbg==","J3Vu","b3NpdHk=","IGFjY29tcGxpc2g=","VXRpbGl0aWVz","LnZpZXdz","LmNu","Y2VpbA==","IENCRA==","IFJG","UEVH","IEdpZnQ=","QVlT","IFdJTg==","cGFuaWVk","IMWf","IG9ic2VydmVy","IHNtZWxs","IHs6","TGlua2Vk","PlsK","b2xlcg==","IGxpYmVydA==","IGAK","IHdlbm4=","bGF0ZWQ=","IGltbXVuZQ==","KE5vZGU=","IFByb2JsZW0=","IEFicw==","bG9ncw==","IC4uLw==","IEFEQw==","IH19Ij4K","PicpOwo=","PWI=","IFdpbmQ=","bGFob21h","IGFsbG9jYXRl","b3JpYW4=","IHByZXNjcmlwdGlvbg==","LXF1YWxpdHk=","IE1heW9y","ODU1","aW5lbHk=","ZW5kZm9yZWFjaA==","IENvbXBsZXg=","a29t","NzA5","VFk=","Nzkw","XV0u","LlN0eWxl","X21hbnk=","JywnJA==","IGJhcnJpZXI=","IEZldGNo","IE1hcnZlbA==","IHJlc2lzdA==","0L7Qs9C+","YmlkZGVu","IFJ1bm5hYmxl","OmZhbHNl","ODk5","IGJ1aWxkcw==","IFN0YWdl","IGR1Yg==","ZW1wbw==","LnNpdGU=","NTU4","OwoKCgo=","OTk0","IERlbnZlcg==","IHJldmVs","IHRyaWdnZXJlZA==","IGRpY2U=","X2ZhaWw=","IGdj","ODMz","NTg5","CVg=","IFRocm93YWJsZQ==","Nzc1","LnJvdXRlcg==","IFJldm9sdXRpb24=","0YDQsA==","X05PTg==","MDU1","n6U=","NTc4","IGVsZGVy","IGFicm9hZA==","INC1","IEFkdWx0","Ymxy","Z2x5cGhpY29u","NjEz","IHByb21vdGluZw==","IGl6","IFNvbGlk","NjQ1","X2xvYWRlcg==","ZWFybHk=","LmVuYWJsZWQ=","LWVkaXQ=","IFVM","X3BsYXk=","IEludGVycnVwdA==","IGFkdmFudGFnZXM=","dWNsZQ==","IG1lY2hhbmljYWw=","LnRhYmxlTGF5b3V0UGFuZWw=","IFdvcmtpbmc=","IGFub255bW91cw==","UmF0aW5n","aWdpb3Vz","X3Bob25l","LmFkZEFjdGlvbkxpc3RlbmVy","IGZyYW4=","dW5kZW4=","ICopJg==","X2Jvb2w=","dWxhdGl2ZQ==","IGNvbmU=","IE11bHQ=","IG3Dtg==","IEZvcndhcmQ=","XSk6Cg==","IGNvbnZpbmNlZA==","YWN0ZWQ=","NjQz","44GT","IENvbmZpZ3VyZQ==","IGNlaWxpbmc=","RGVy","IHBhc3NlbmdlcnM=","R3JvdXBz","IHNvY2Nlcg==","L1c=","YXZpb3Jz","c3dpdGg=","IFpvbmU=","Lk9wdGlvbnM=","IE1vbQ==","aWVkZXI=","QXJyYXlz","IHRyZWF0bWVudHM=","IHByb3RlY3Rpbmc=","ZmFj","IHBpY2tsZQ==","QnV0dG9uSXRlbQ==","NzEz","IGJsb2NraW5n","c3RyYXI=","w7I=","IEV4cG9ydA==","IHRocmV3","b3R0YQ==","IEJBU0U=","Lndz","LkxFQURJTkc=","b3JkZXJCeQ==","X2RlbGF5","IFB1","LmRsbA==","IENob29zZQ==","OTky","UG9saWNl","IEJFR0lO","Ym94ZXM=","IGRpYW1vbmQ=","LGw=","IAkJCQ==","IGN1cmlvdXM=","NjI0","dHY=","IGVyb3Rpc2NoZQ==","YWNrYWdlcw==","CVNldA==","VGljaw==","LmJvcmRlcg==","c3RhdGljbWV0aG9k","IGNoZXI=","aW52b2ljZQ==","IGNydQ==","IGRlZmVjdA==","X21ldGFkYXRh","cmVsYXRpb24=","aWthbg==","W04=","KFF0","KEJhc2U=","5oGv","YmVhdA==","IEVtcHR5","CW8=","X3NoaWZ0","IHJlZ3JldA==","NzIy","VGhvc2U=","Q2VudA==","IFBvcnR1Zw==","IElzbGFuZHM=","IFRJTUU=","TWFuYWdlbWVudA==","OTk2","LXNw","NTM5","w6ptZQ==","IG5vdGlvbg==","dW5pZnU=","UEs=","ODI2","6KGM","IENVUkxPUFQ=","XCJc","VVY=","57o=","ZHJh","Y291","PWA=","IERlc3Ryb3k=","cnA=","LmNhbmNlbA==","R0c=","cnVudGltZQ==","IFZ1ZQ==","IHByb2dyZXNzaXZl","L3NlcnZpY2Vz","IHJ1bm5lcg==","X0ZSQU1F","LlRvb2xTdHJpcE1lbnVJdGVt","ICcsJw==","ZGVsYXk=","PXV0Zg==","IHNjcmVlbmluZw==","IHB1bGxpbmc=","b21hcw==","IGFudGg=","LW5ldw==","L2xvY2Fs","IGlQYWQ=","IHR3aXR0ZXI=","IGR5aW5n","IGhlYXZlbg==","IFVJbnQ=","IFNlbmF0b3I=","IHByZXN1bQ==","IFdhbGtlcg==","IG92ZXJjb21l","ZXRlY3Rpb24=","IGVtYmFycmFzcw==","Q2hpbmE=","NjM5","SW5jbHVkZQ==","Uk9MTA==","IGRhdGFUeXBl","RGF2aWQ=","4Lij","bG9w","LW1vbnRo","IHNjYXI=","IFNhZmU=","ICoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKio=","IGFjY2Vzc29yaWVz","IHJhbXA=","X1VTRQ==","IGNvbnRyYWQ=","KSldCg==","IHByZXN0","IEhS","IFJhcA==","IHVzaXpl","IGNhcGFiaWxpdHk=","IGNvcnQ=","LW5leHQ=","MDc3","NjI3","IGJ1cmRlbg==","ODIy","X3JlYWRlcg==","IEBA","cmVndWxhcg==","IEth","MDM2","TUFO","IGFzdHI=","ICcnKQo=","IGZlZA==","IHBhcnNpbmc=","IFllYXJz","IGJyb2tlcg==","Ijp7Ig==","IGFrdA==","SW52ZW50b3J5","YWJlbGVk","IGFyZ3BhcnNl","KioqKioqKgo=","dmVyc2F0aW9u","IGNvcmQ=","IFRp","IGhvcGVmdWxseQ==","IGFo","dmVyYg==","IHN0b2xlbg==","LkVudHJ5","IGV4cGVjdGluZw==","T3JpZW50YXRpb24=","IHBvd2VyZWQ=","IHBlcnNpc3Q=","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA=","J10pOw==","JykpLAo=","IENhc2g=","CWl0ZW0=","ODE4","Z3JhZGVz","cm9wb2w=","YmFzaWM=","ICIpOw0K","IGF3YXJkcw==","KHJhbmdl","LWFsbA==","IElCT3V0bGV0","IEluZGVlZA==","LS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQ==","IHN0b21hY2g=","IGZsb3dlcg==","IHNldw==","X3RpbWVz","YXZpcw==","UVN0cmluZw==","IFJvdXRlcw==","X3Byb3Q=","IGNvbWVkeQ==","IGxvZ291dA==","IHdvb2Rlbg==","IHBvc3Rlcg==","cGllY2U=","LkpvaW4=","IFBvaw==","Y2Vsb25h","bXV0ZXg=","Ow0KDQoNCg==","IHN0cmlrZXM=","Nzg3","TG9hZGVk","KWFyZw==","ZXNh","VW5pdGVk","RXA=","UEVMTA==","ODA3","IEF0bGFudGlj","dWxsZXQ=","NjUy","YXBwbGU=","IHNldHRsZWQ=","YWNvbg==","IHByaW50ZXI=","IEdD","5a6a","IHJlbmRlcmVk","LOKAmQ==","aGVpdA==","c29jaWFs","Lmdl","NzE0","IFJpY2s=","IFV0YWg=","Z290","b25pY2Fs","IFNjcm9sbA==","IFNjaWVuY2Vz","IGp1Zw==","IGFtcGw=","ZW50aQ==","TEVGVA==","IHRhYnM=","IGVub3Jtb3Vz","LmdldEtleQ==","bG9jYXRl","LkVY","LnN0b3JhZ2U=","Lldl","IHRvYXN0","IEFkZGl0aW9uYWxseQ==","ODgy","IE5PVw==","NTQ3","X1VQREFURQ==","IHRyYW5zZmVycmVk","dGhh","LkRpc3BsYXk=","X3Vp","SURFTw==","IG1lYW5pbmdmdWw=","IE1vc2Nvdw==","LHRoaXM=","IFZpY3Rvcmlh","5pS5","INCf","LnN0YWNr","IEJhcm4=","cGFyZWRTdGF0ZW1lbnQ=","OnN0cmluZw==","IGJpag==","IFNUQVRF","IGVtcGxveWVycw==","CWlucHV0","KHw=","IGxleA==","aW52b2tl","CW51bQ==","Kyss","YXRpYWw=","b3JzZXM=","IGZvcms=","X3R4dA==","IEFudG9uaW8=","ICg8","YXZlcnNl","IGRldmFzdA==","44CA","LkRlYw==","IEdhcmQ=","L3Vp","LiU=","dHJp","IHJvbGxlZA==","VmFsdWVQYWly","aXR0ZW4=","IFRoZXI=","IHZyb3U=","IEZsb3c=","IEZpbmFuY2U=","IENvbWI=","SEM=","LnNldFZpc2libGU=","aXNs","IHBr","Nzcz","IHVwc2V0","KHJhdw==","IFZpY2U=","ZWF0dXJlcw==","IExhbmc=","MDI5","TG9va2luZw==","NzY3","IEFTVA==","IHRyaXBz","IEp1c3Rpbg==","YnJvd3Nlcg==","PSInLiQ=","LnZlcnRpY2Vz","ODIx","LWNv","fS97","ID8s","IERvbWlu","IEJlbGc=","Ijw=","IHN1cHBvc2U=","YWRkeQ==","IHdhbGtz","Njg4","RVJSVQ==","X2ZpbHRlcnM=","UHJlZmVycmVk","c2NlbmU=","0LXRgQ==","IEFmZmFpcnM=","ICIjew==","IG9uU3VibWl0","IHN0b2Nrcw==","L3ZpZXc=","Z3JlZQ==","LWdldA==","OTAz","aGl0","Sm8=","LmdldEM=","NzI1","SW5pdGlhbGl6ZWQ=","0YLQuA==","Y3V0cw==","KFR5cGU=","IEFncmVlbWVudA==","IFZpZXRuYW0=","IC8qIQ==","IHBpenph","LXZpZXc=","X2Vt","IGxocw==","IG11eQ==","IElkZW50","IEZyaWVuZHM=","MDYx","IGFidW5k","X0FE","LnRpbWVzdGFtcA==","LSc=","IGR1cGxpY2F0ZQ==","IGh1bnRpbmc=","IHJlZ3VsYXRvcnk=","aWFv","YW1vdXM=","IEVudGVydGFpbm1lbnQ=","W0E=","aWF0cmlj","X0NMSUVOVA==","IEtpZHM=","L3BrZw==","QnJlYWs=","KSkpOwoK","IFNoYXBl","IHJlbGF0aW5n","SW50ZXJydXB0","YWJsZU9wYWNpdHk=","ZW1icmU=","IG15c3Rlcnk=","IGpvdXJuYWxpc3Rz","cml0YWJsZQ==","Lkxpbms=","IHN0b3BwaW5n","Q1JFVA==","LkRC","IHBvcHVsYXJpdHk=","IGdldw==","IGltcHI=","c2V0VmFsdWU=","RkxBRw==","CW1heA==","IGJha2U=","d3k=","IEVjb25vbWlj","IGVuY29udHI=","IGZuYW1l","L2Rl","UmFuaw==","IGJ1Z3M=","LnNt","IG1lZGlhbg==","RE9XTg==","IFN1cmU=","QXRJbmRleA==","IERpY2s=","IChfXw==","LmRlbHRh","RnI=","IHN1Z2dlc3Rpbmc=","IFJlY3ljbGVyVmlldw==","LGU=","U1RBUlQ=","LyoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKio=","eGZvcmQ=","IHJlY2VpcHQ=","Q0xBSU0=","cmVhZG9ubHk=","OTY4","IGVuZ2FnaW5n","NjE5","Q2E=","YXNtYQ==","IGVuc3VyaW5n","RW5nbGlzaA==","IFZhbmNvdXZlcg==","aHl0aA==","IHB1cmNoYXNpbmc=","IFBJ","LndvcmQ=","KHNw","LmhvbWU=","OmRlZg==","IGdpZw==","NTc0","Njcx","IFZl","Zm9ydW0=","IE1pdGNo","QmF5","X0ZM","NjUx","IHNvbGw=","NTc3","X2NvbHVtbnM=","IG1pbm9yaXR5","YmlyZA==","IGhhbmRlZA==","U1NM","U1RBVA==","IG5lcnZvdXM=","g70=","IGZpbGVQYXRo","Q1JFQVRF","QXc=","IHBlbnM=","ODM1","c2VlZA==","IENvbXB1dGU=","b2xr","NTk0","IEFzc2V0","cmVhY2g=","JyksDQo=","bmF2aWdhdGlvbg==","TEY=","L3V0aWw=","IFB1Yg==","IOKU","Y2lvbg==","IyMK","MDcy","SUlJ","VGFnTmFtZQ==","IGFtaWQ=","cGVybWlzc2lvbg==","aWZpYWJsZQ==","eEZGRkZGRkZG","0L3QuA==","LkJ1ZmZlcg==","X2lycQ==","ZGFyaw==","IHJldHZhbA==","LmZpcmU=","cHJvZHVjdGlvbg==","Lmxpc3Rlbg==","IFdlYXRoZXI=","IGJ1eWVycw==","Lm5l","ZXJw","IFBlbnQ=","Njk5","IHdlbGZhcmU=","IHBhZ2VTaXpl","IFN0YWRpdW0=","ZXJ0YQ==","IGxldg==","YW1wYQ==","UGFnZXI=","NjY1","IGNoYXJnaW5n","IE5ldGZsaXg=","fG51bGw=","X3JhbmRvbQ==","LnhwYXRo","IHN0ZXJl","IElTSVM=","cG9uc2Vz","KGxvYw==","NTY2","ZXlvbmQ=","IE9mZmljaWFs","NjU3","IE1hcnlsYW5k","RGF0YVR5cGU=","X3Bhcg==","e30s","IEVuam95","NzI3","X1NISUZU","IEF3YXJkcw==","X0VOVFJZ","IHNlZW1pbmdseQ==","ZW50aWNhdGU=","IGhlYXJ0cw==","NTgz","XzsKCg==","IEhJVg==","IGluZGl2aWQ=","IEZsYWc=","X2N0cmw=","IENhbGxiYWNr","LHo=","IEdQVQ==","CW9iag==","IFBob2VuaXg=","IEJVUw==","OTA3","IHJ1YmJlcg==","X0FVVEg=","IFNvbHV0aW9ucw==","KGxvY2F0aW9u","VmFyaWFibGVz","LnNldEVuYWJsZWQ=","X2hpZ2g=","V08=","R2VzdHVyZQ==","IHJldHJ5","IG9iamVjdEZvcktleQ==","YWxsb3dlZW4=","IG1vcw==","IENlbGU=","IGlra2U=","KGNlbGw=","IE1PREU=","cmVuYQ==","IGRlc2NyaWJpbmc=","NjQx","IHBoaQ==","IHJk","IGRlc2VydmU=","IHdoZWVscw==","5biC","IGNyaXRpY3M=","NzU1","TmFtZXNwYWNl","IEZyYQ==","IAoKCgo=","IGFsbGE=","IHJlcXVpcmluZw==","5pyf","dXRhdGlvbg==","IGRlbGF5ZWQ=","IGFkbWluaXN0cmF0aXZl","IGJheQ==","LmhpZGRlbg==","VGV4","MDUx","IGJvdW5kYXJpZXM=","IF0pOwoK","IEZvbGxvd2luZw==","fi8=","Rmk=","X2NvbnY=","X1RJVExF","IGRlc2Rl","SUNvbGxlY3Rpb25WaWV3","QWxpYXM=","IGJpdGU=","cGF0aWVudA==","X0NPTU1BTkQ=","Q29tcGxldGVk","CWVsaWY=","KDw=","QnVzaW5lc3M=","IFBvb2w=","IHB1cnN1ZQ==","IEJhbg==","X3N0ZXBz","X0RFQ0w=","dW1ibGU=","IGNvbWJv","IExheWVy","Lnhy","IGR1cA==","LS0tLS0tLS0t","NjI4","IG1vZGlmaWVy","cm9i","cmV6","Njk2","IGF0aGxldGVz","VXNlZA==","d2Vhcg==","ODE1","IGxlZ2l0aW1hdGU=","ICIKCg==","IGh2","U3Rk","MDM3","IEhvbGQ=","IHN1cnZpdg==","IEFsbGlhbmNl","IEVhcmx5","Nzc4","QmVoYXZpb3I=","KGZvbnQ=","L2xpYnM=","IHJlY3RhbmdsZQ==","IHNpbmdlcg==","IGFtcA==","RXF1YWxUbw==","ICIuIg==","IGdpcmxmcmllbmQ=","5bE=","bGluZWFy","b2JzZXJ2","IHBpw7k=","IGNvbXBsZW1lbnQ=","V2l0aFZhbHVl","KHBhc3N3b3Jk","dGFrZQ==","Qmxhbms=","IENvbXBhcg==","JyIs","X3BvbGljeQ==","bW9uZ29vc2U=","X0ZBSUxFRA==","LnJlcG9ydA==","UmF0aW8=","LlBlcmZvcm1MYXlvdXQ=","NzQ3","dXNhYmxl","bWVycw==","X3JlbmRlcg==","UEVFRA==","Nzcy","IGxlc2I=","CUU=","X3Rvb2w=","IGxhZGllcw==","OTA4","0L7RgQ==","KSkpKQo=","Ozs7Ow==","LmRvdA==","IG5lc3Q=","cGVhaw==","dWtraXQ=","ZWNh","X1NX","ICYo","IE9rbGFob21h","IGJhbmtpbmc=","NTY5","IE5pbnRlbmRv","NzUy","IHJlcHJvZHVjZQ==","X2VsZW1lbnRz","X21hYw==","cHJveHk=","IHJlbWFya2FibGU=","fS8kew==","IG91dHM=","Lmhhc05leHQ=","TU9ERQ==","NjU4","IGFuaW1l","LmNvbm4=","VW5pcXVl","RG9t","IGltcG9ydGFudGx5","aXR0eQ==","IGp1aWNl","VHc=","IFBhcnRuZXJz","IGF0dGFja2luZw==","IHBvcnRhYmxl","YW1pZW50bw==","LlBpY3R1cmVCb3g=","Lmdlbg==","IG9wdGltYWw=","NTgy","IHJlY3Jl","IGpvdXJuYWxpc3Q=","IEV4dHJhY3Q=","IE1vcmVvdmVy","IG1hcmdpblRvcA==","LkFw","IGZpcmluZw==","TmFO","CXRlbXBsYXRl","0LDQtA==","LkVu","IGRlZmVuY2U=","IFRlbA==","aWxlbg==","amFu","PWRhdGE=","IFVybA==","IFJldXRlcnM=","KHRvdGFs","IEZpZnRo","IGVzc2F5cw==","IGludGVycHJldGF0aW9u","IGNoYXJpdHk=","IFJ1bGVz","IHN1YnNlY3Rpb24=","c3R5bGVk","YXplcg==","bGFncw==","TElTVA==","IHVwbG9hZGVk","IHRyYXNo","IHJlZ2lzdHI=","IHNlbGxlcg==","Pic7DQo=","IHN0YXJ0VGltZQ==","55k=","c3k=","KEh0dHBTZXJ2bGV0UmVxdWVzdA==","IHRyYXA=","R0M=","IGVtYmVkZGVk","IHN1cnJvdW5kZWQ=","ODE2","aW1pdHM=","VFg=","eWxpbmRlcg==","Njg1","IEZhbA==","IHNlbnRlbmNlcw==","IEph","SUZJQ0FUSU9O","d2VhcG9u","b3ZhdGlvbg==","IGNvYXQ=","IGludGVycG9s","IGxpcHM=","IEt5","IHZlY3RvcnM=","X2Ft","IGludGFrZQ==","Lndvcmxk","IGluYm94","IE1BQw==","X2Fi","KG5hbWVvZg==","NjMz","IGVudGVydA==","IGdhdGhlcmluZw==","IFNJTQ==","Kysu","bnlh","J319","IFVQREFURQ==","IHBhYw==","KGh0bWw=","IFNhbnQ=","aWF0aW5n","IElkZWFz","IHNwcmF5","IEhhcnQ=","IHZlcmlmaWNhdGlvbg==","YWRlc2g=","L21vZHVsZXM=","IE1pbmQ=","IFNpemVkQm94","IHNoZWx0ZXI=","IGhlcm9lcw==","YXR0eQ==","IGNlcnRpZmllZA==","c2o=","IMOqdHJl","xYJv","IHB1Ymxpc2hpbmc=","IE1hbGF5cw==","LmdldFVzZXI=","IFByb3ZpZGVy","IExpbmtlZExpc3Q=","IEJvcg==","Uk9VTkQ=","ZGlk","dGFpbg==","cGlyZQ==","IEplbm4=","dGVs","YW5kZQ==","NzU3","X2Zyb250","IE1jRw==","VGVzdE1ldGhvZA==","4Lit","IG9jY2FzaW9uYWxseQ==","IFdhbGVz","IGV4ZXJjaXNlcw==","INCS","MDQ1","LXBsdXM=","IHZhbGlkYXRvcg==","IHByYXllcg==","TEFURUQ=","X2F1dGhvcg==","IGxhYm91cg==","KysK","LWVxdWl2","IEdQTA==","IGZhY2Vib29r","c2ltcGxl","Z2x5","UHJvY2Vzc29y","aXB5","NzQ0","ICo+","NjQ4","IGNsZWFyZWQ=","IFB1c2g=","ODU4","IHBlbmlz","U3RydWN0dXJl","bGlq","IE1vcmdhbg==","IGhhbmRmdWw=","Ii4K","OTg0","fFw=","ICoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioq","IEFxdQ==","NTg0","X0lD","LmxvYWRz","IG1ldGVy","IE1hcmluZQ==","Ojp7","IFRT","Nzc2","IEFycmF5cw==","LlRpdGxl","R1JBTQ==","dGVybWlu","IGNvaW5j","RWxzZQ==","X3N0YXRlcw==","LXJ1bg==","bWVtYmVycw==","Nzgy","YXN0cm8=","MDY2","IG9uUHJlc3M=","IGJlaW5ncw==","IGFiYW5kb25lZA==","IHRheHA=","b3duZXJz","Lm1vZGU=","IGRpYWdub3Npcw==","IF8K","IEtuaWdodA==","CUE=","IG9ic2VydmU=","KSwn","ODIz","ISIpCg==","IFBhcmE=","IHZhcmlhdGlvbg==","KEZhbHNl","IEFudGk=","IGdyaQ==","IGhvbWVsZXNz","P3Y=","IGJleg==","LlNlcnZlcg==","cmVsZWFzZQ==","IFBhdHJp","IGNoYXJz","IHJhbmtpbmc=","YWN0aXZhdGlvbg==","NTgx","IHdpZGVz","cXI=","LlNxbA==","YWN1bGFy","IEJvdA==","X3N5bmM=","IGhhcHBpbmVzcw==","IHZvbHVudGVlcnM=","ODc3","IHNpdHM=","Lzw=","W2U=","KGZpbGVOYW1l","IGNhcGFj","ODMy","IE1hcmlh","ZmF0aGVy","IGdyYW0=","Kmk=","IGNhc28=","X2RyYXc=","IFJhdw==","IEl0ZXJhdG9y","NjY0","IFBhZGRpbmc=","OTI0","UEQ=","Qk9Y","IFNQRUNJQUw=","IGZlY2hh","IHZpZGU=","IExlYWRlcg==","5Lul","JCgiLg==","IGRpYW1ldGVy","IG1pbGQ=","NzQ1","IHJvY2tz","YXBwaW5ncw==","MDQ4","ZGlyZWN0b3J5","NTU3","LmZsdXNo","IEplc3M=","VU5JVA==","IFBlYXI=","IG1hbmRhdG9yeQ==","U3Vy","cXQ=","IHN0cmVhbXM=","IGNvb3BlcmF0aW9u","IFNhYw==","IGNoZWFwZXI=","CWNo","YW5pbWF0aW9u","ZmFyZQ==","KGhlaWdodA==","KFRydWU=","Tlk=","IHdyZXN0","IHBvbGxz","IGVuY291bnRlcmVk","IE1hcmtldGFibGU=","X1BBU1NXT1JE","NzE2","X1NFTEVDVA==","IEFyYWJpYQ==","X2Nsb2Nr","IHZveQ==","INC40Lc=","IHN0aXI=","aXNpYmxl","LWVmZmVjdA==","LmNyZWF0ZWQ=","IHRveXM=","IFRyYWRhYmxl","IHJ1c3Q=","IHN0cmNweQ==","X3RpbWVzdGFtcA==","IHRhbGVudGVk","LG51bGw=","IEpvYnM=","IFBvcnRsYW5k","IHdlYWtuZXNz","VGhyb3c=","IEFuZ2Vs","5L+u","NzU0","IHVuY2VydA==","77yJCg==","IOydtA==","V2hpY2g=","IFstXTo=","U29tZXRoaW5n","IGNvbnZpY3RlZA==","a2xl","ZWRpdW0=","IGJyYW5jaGVz","IGJhc2Vz","564=","IGNvbXBsZXhpdHk=","IEZpZw==","LnJlc2hhcGU=","JGRi","NzM2","X0NPTlNU","IFRlcw==","LnJ1bnRpbWU=","IGRlbnk=","IEJTRA==","IGty","aGF0dA==","IFN0YXRpYw==","IHVuaXZlcnNpdGllcw==","UmVwbGFjZQ==","IGRyb3Zl","IGFkb2xlcw==","X3BsdWdpbg==","IExHQlQ=","IHRleA==","ZHVjdGlvbg==","NzUx","Nzk5","RURJ","IFRlZA==","X1VSSQ==","IHJlY2VwdGlvbg==","YXJ0ZW4=","LlNpbmdsZQ==","cmljZQ==","c2Npb3Vz","ODQz","X2Jn","IHdhZ2Vz","IFNlcnZsZXQ=","VUlMYXlvdXQ=","IGZvcm1hdHRlZA==","Lk1vZA==","PGNsYXNz","aXNlbg==","IHJlcHJlc2VudGF0aXZlcw==","Il09","IHBvcnRhbA==","IEh1bnRlcg==","IGhpcmluZw==","X18pCg==","cmljdWx1bQ==","dW8=","bGllc3Q=","IHRlYXJz","TGF0","IGxpdGVyYWw=","Lkluc2VydA==","IGN1cnM=","IENvbXB1dA==","IHRlcnJvcmlzbQ==","IHN3ZWVw","IFtdDQo=","IHBhc3Nlbmdlcg==","IGVhc3Rlcm4=","IHR3ZWV0cw==","IG9wZXJhdGVk","d25k","IFN5bg==","LnRvb2xz","IFdN","dWxhdGVz","IGJhY3Rlcmlh","KGJ5dGVz","LnNldERhdGE=","IHZpc2liaWxpdHk=","Ly89PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09","ZWxt","IGdlbmVyYXRpbmc=","IG12","IGto","amVu","L3NlYXJjaA==","IGFjY291bnRpbmc=","c2VnbWVudA==","YWN0aWM=","Lmlw","IGRlcGxveW1lbnQ=","IGZvb3Rlcg==","PicsCg==","IGV4cGFuZGluZw==","IEhhbWlsdG9u","IENvbnRyaWI=","LlRhYmxlcw==","NzI4","QWN0aXY=","SEg=","b2NvbW1lcmNl","Xzs=","IGFtb25nc3Q=","b3dpbmc=","ODU5","IENvbGQ=","QVBI","IHBzeWNob2xvZ2ljYWw=","X3RlbnNvcg==","IHBhY2thZ2luZw==","IFN3ZWRlbg==","IHBhcmU=","IGFnZ3JlZ2F0ZQ==","IG1vZGVyYXRl","ODYy","X2hhbmQ=","IGRlc2lnbmF0ZWQ=","IGRydW0=","IGdldFVzZXI=","IENyZWVr","X3Njb3Bl","IFRyYW5zZmVy","IE1hcmc=","IGZpZ2h0ZXJz","V25k","IFNlbA==","IExhdW5jaA==","IGVtZXJnaW5n","aWZyYW1l","IEFkZGl0aW9uYWw=","IGZlYXJz","IHNhdGVsbGl0ZQ==","Xzo=","IGRpc3Bvc2luZw==","R2V0VmFsdWU=","SHR0cFBvc3Q=","QVRJVkU=","dWxhcnk=","Vmlld3M=","IGF0dGVuZGluZw==","IFRlbm5lc3NlZQ==","IE1pc3Npb24=","IG1lZGljYXRpb24=","IFd5","IEFubmE=","2Lk=","IFZlcnRleA==","LnR5cGVz","T3JnYW4=","LkRhdGFHcmlkVmlld1RleHRCb3hDb2x1bW4=","IFJT","IHRlbXBv","KEFwcA==","ODky","VmVyc2lvblVJRA==","LnBvaW50","IER1dGNo","SG91cnM=","TFU=","IHF1b3RlZA==","LmJ1aWxkZXI=","IFBlcmZlY3Q=","IEFsd2F5cw==","X3R3bw==","IGV4Y2x1c2l2ZWx5","IENyYQ==","aWZpY2Fy","IEFXUw==","aW5naGFt","Y29tcGxleA==","a2VybmVs","IGdyYXZpdHk=","IHdp","MDUy","IG92ZXJ2aWV3","NjYx","IFdhbnQ=","IFdQ","KHNo","LnJvdGF0aW9u","U3RhdGVz","IFRlZW4=","X2NvbXBvbmVudHM=","7IiY","UmVjZWl2ZWQ=","IGx5cmljcw==","cml0ZXM=","CQkJCQkg","LUFtZXJpY2Fu","W251bQ==","L3B5dGhvbg==","IFVBUlQ=","IGFwcGxl","IEpvbmF0aGFu","IG1vbWVudHVt","4Lix","grk=","IG1pY2g=","YW5kcmE=","IGJpb2xvZ2ljYWw=","IE1lbnM=","ICUl","ZWxzZWE=","IE1leGljYW4=","LnJhbmRpbnQ=","IHRhbGU=","IFZhbGlkYXRl","IGRlZmVhdGVk","Lmh0bQ==","IGNvcHBlcg==","PS8=","Y29zeXN0ZW0=","IHJpcA==","ZGVjaW1hbA==","LlZJU0lCTEU=","IFRh","CQkJCQkJCQkJCQkJCQk=","IGRvd25sb2FkZWQ=","ZW52aXJvbm1lbnQ=","IG5vbWluZQ==","YnVpbGRpbmc=","IFNwb3Q=","aXBoZXJhbA==","IGFsdG8=","cXVldA==","IEZU","L2dldA==","L21hc3Rlcg==","V0lO","5YWD","Njc2","V2VzdA==","YXJnYw==","IHByb2R1Y2Vycw==","IE11Y2g=","X3N0b3JhZ2U=","Y3JlZGl0","Q09OVA==","IHZldA==","IHZvaWNlcw==","KCcnLA==","IGluc3RydW1lbnRz","NjYy","IE1TRw==","ZXNzZQ==","cmVwb3NpdG9yeQ==","b21pY3M=","IGRlYWxlcg==","U3RpbGw=","IGJhbm5lcg==","YXNjaWk=","IHJlbWFya3M=","W2pz","IHNob3J0ZXI=","Z3VscA==","IG15c3Rlcg==","IGt1bg==","IEJpcmQ=","IHRpZW5l","Nzg4","bnV0","IFVt","IHdpc2U=","WWVhaA==","SU5FU1M=","MDQ2","X2JlZ2lu","LWhlYWRpbmc=","Q291cnNl","IA0KDQo=","b21iaWU=","Z3JhZGVk","IEdQUw==","IMW8ZQ==","Rml0","Y2FwdGlvbg==","w7Zu","L2ltYWdl","bGlh","KG1vZA==","IGxlYWs=","ZW56YQ==","NjI5","L0g=","IEhhcHB5","OTkz","RGlzdA==","bng=","IEdvdmVybm9y","KGxhc3Q=","dGVhY2hlcg==","IFNlbnQ=","c3VwcG9ydA==","ODM4","amVjdG9yeQ==","INmF","UmVnaXN0cmF0aW9u","MDYz","IEdyYXk=","LGZhbHNl","IGFkanVzdGVk","KHNldHRpbmdz","PFI=","IE1hZ2U=","IHBsYWludA==","XykK","CWl0","b21ldHJpYw==","LmJvb3RzdHJhcA==","IGNhcnJpZXM=","SXA=","ICEk","IHN3aW1taW5n","IE1hcmlv","IFF1ZXN0aW9ucw==","UEFDRQ==","5pa5","ZW9y","fX0i","IG92ZW4=","IEtvbg==","IHdpc2RvbQ==","IGFjcXVpc2l0aW9u","ZXNzbWVudA==","YWdpbmU=","IGV4cHJlc3Npb25z","U2VxdWVudGlhbEdyb3Vw","RnJvbnQ=","dWxwdA==","YXdr","J10pCgo=","ODEz","NzMy","X0FS","IGFuYWxvZw==","dWxpbg==","X1BSSU5U","IExH","IGJsb2I=","IEZ1cnRoZXJtb3Jl","X2NvbXBvbmVudA==","IENvbGU=","TEFO","U0NSSVBUSU9O","IGxhcA==","aWNlbnNpbmc=","X1RJTUVPVVQ=","IEZybw==","IGxpYWJpbGl0eQ==","IGNvbXBvc2Vk","NjM0","LmNyZWF0ZVNlcXVlbnRpYWxHcm91cA==","X3BlcnNvbg==","IGJlYW0=","CSAgICAgICAg","IE5vdEZvdW5k","Njg0","LicK","w61z","LlRleHRWaWV3","UERG","IGthcg==","X18oJw==","ICI6Ig==","X21lc3NhZ2Vz","IGhhcnZlc3Q=","Lmhpc3Rvcnk=","PicK","LWZvbGQ=","5oo=","IEJldHRlcg==","ICJcPA==","c3BhY2luZw==","IGZ1cm5pc2hlZA==","OTEz","b3Nlcg==","XX0K","ICQi","cHVsbA==","LlBvc3Q=","OTE5","KGlw","l48=","LmZyb250","bnRl","IEZN","Z3VpZA==","ODQ0","IG5lZ290aWF0aW9ucw==","YWdvbmFs","OTM0","IHRyZW1lbmQ=","dW5nZW9u","QWR2","Y2Fyb3VzZWw=","w59l","X0RFU0M=","IGhhbW1lcg==","4bqt","ICAgICAgICAKCg==","LWNvcmU=","LXNlcnZpY2U=","IGNvcm5lcnM=","IFNG","cHJlZA==","PkE=","IEpMYWJlbA==","IHJvbWFudGlj","IHRlc3RpbW9ueQ==","b3Nj","IEdlbmVyYXRpb24=","YXN1cmVz","X2ludGVybmFs","IHByaW50cw==","IF0pCg==","IENsZXZlbGFuZA==","cmVwbw==","RGlzYw==","Njc3","NzYy","ICI+Cg==","77+977+977+977+9","IG5lYXJlc3Q=","NTkx","X3Ri","KHJlcXVpcmU=","RU9G","LWNoaWxk","IGJ1ZGQ=","Llh0cmFFZGl0b3Jz","YWx0aWVz","NzIz","XCI6XCI=","V29yZHM=","OTE3","IGxvY2FsbHk=","IHB1cmNoYXNlcw==","Njk1","RHJhd2Vy","ZXh0cmFjdA==","IGV4ZWN1dA==","fScu","dXNlcmRhdGE=","IGZvY3VzZXM=","LW1pbnV0ZQ==","NzY0","IFB1Ymxpc2g=","b2dv","IG1vdW50YWlucw==","Qm90","fT57","IHRlbnNpb24=","cm9k","bWVzaA==","IHRyYW5zZm9ybWVk","LFI=","KCl9Cg==","Lmxvbmc=","IGdvcmdlb3Vz","IFNjaGVkdWxl","IG9sZGVzdA==","IHN1YnByb2Nlc3M=","KElO","eWVjdA==","IENvb3Blcg==","YXJuZXNz","IE1vbml0b3I=","LnBhcnQ=","OTcy","IE5CQw==","NjY4","IGNvdHRvbg==","IGhvbA==","NzI2","IHJnYmE=","IEJpbw==","Q29udGludWU=","UG9k","IHBhcnRpY2lwYXRpbmc=","Y2x1c2lvbnM=","KEJ5VmFs","NzM0","w6w=","IEhPVw==","X3NldG9wdA==","IGFjY29tcGFueWluZw==","MDkx","YXRvbg==","IC9c","IEF1dGhlbnRpY2F0aW9u","acOpbg==","IEJhcmFjaw==","Lyou","IGVhZ2Vy","IENhbmNlbA==","PGxlbW1h","ZXBo","CXdpbmRvdw==","IGluY2lkZW50cw==","NzU2","KSwo","LkRlcw==","aWJl","IEZ1bmN0aW9ucw==","IGhvc3BpdGFscw==","MDM4","IG94eWdlbg==","cm9vdFNjb3Bl","IGRyZXc=","CXJlcXVlc3Q=","bm90aWNl","YWt1","YW1lbnRz","ZmFy","OTcz","Nzc0","IHByZWNpc2U=","X3dyYXBwZXI=","IGxpc3RlbmVycw==","QVo=","LmJvdW5kcw==","IEF2ZXJhZ2U=","ZmllbGRzZXQ=","X2F4aXM=","IGV4YW1pbmF0aW9u","Jy4K","bW9ucw==","Kyspew0K","IEZvcm1z","7ZWc","OTE2","Q3BwTWV0aG9k","X3RyYWNl","IGVuZ2luZWVy","NjYz","IEZsYXQ=","IHJldmlzaW9u","IGhlYXRpbmc=","NjM4","L3Byb2ZpbGU=","LnJ1","cHJpb3JpdHk=","IGluZmVy","X1NUUkVBTQ==","ICopKA==","PiQ=","T0xFQU4=","T0tJRQ==","SUJJTElUWQ==","VUFHRQ==","IFN1cnZleQ==","MDcx","IHJlc2lnbg==","d2luZw==","IHNlY3JldHM=","IGNoaXBz","SlNPTk9iamVjdA==","RGVza3RvcA==","NTk2","X1NZTUJPTA==","KHJlc291cmNl","IDwvPgo=","IG5ld2VzdA==","dWxp","IGRlc2VydA==","IGRpcA==","IFBvdw==","IGVxdWF0aW9u","IHBvc3NpYmlsaXRpZXM=","IEZlZA==","b3NwaA==","IFsl","IGJ1YmJsZQ==","ZXRoZXJsYW5kcw==","Nzkz","IGNlbWVudA==","LmF1dG8=","X0FO","4oCZLg==","c2VsZWN0aW9u","IEJvbmQ=","OTg4","RGVu","LU8=","LmdldFR5cGU=","ODk2","LldpbmRvdw==","cHJlcw==","IHN3aW5nZXI=","In0pCg==","IHBpcA==","IG1pY2U=","IGNvbXBvdW5k","LXBsdWdpbg==","aWtv","IGNlbnR1cmllcw==","aWN1bGFy","LWlubGluZQ==","CWtleQ==","Plw8","RU5TSU9O","IFsNCg==","IHByZWNpc2VseQ==","IMOpdMOp","IFBhc3Q=","IENhbWJyaWRnZQ==","LWZ1bGw=","IGFuYWx5emU=","IFN0ZXZlbg==","IG5lbQ==","ZHVl","b3Jlbg==","IG11c2NsZXM=","aWppbmc=","ODUy","Ly0=","IEtlbm5lZHk=","NTk3","Uk0=","b3NzaWJsZQ==","IGFjdHJlc3M=","IGRvbG9y","OTE0","5b2V","TmVlZA==","LnRvZ2dsZQ==","IFJhY2U=","d2Vycw==","Lm1hdGVyaWFs","IER1ZQ==","IFBlbA==","I3ByaW50","IGluZGVwZW5kZW5jZQ==","ZXh1cw==","U2hhZG93","IGVuY29kZXI=","KGxldmVs","IFN3aWZ0","LmRvYw==","X3NlbGVjdGlvbg==","OTUy","IHNlcmlhbFZlcnNpb25VSUQ=","OTQ1","TGFiZWxz","IHBlcmZvcm1hbmNlcw==","LlRhZw==","IE5ITA==","aXplbg==","L1VJS2l0","OTkx","X0NPTlRST0w=","IGVhcm5pbmdz","OTc1","IEFsdA==","X0hBTkRMRQ==","Q3R4","IHBlcnN1","IHRyYW4=","56g=","X0NIQU5ORUw=","IHNhdGlzZmFjdGlvbg==","IEdQ","NzY5","aW94","bWl0dA==","bGFuZG8=","IHBpZw==","aW5hbHM=","w6puY2lh","NzMx","U3VyZmFjZQ==","IFVVSUQ=","IGJlbmVmaWNpYWw=","IHNlcXVlbmNlcw==","CW1lbXNldA==","IG1hZ2ljYWw=","wqs=","IHdvcm4=","QVND","cG9wdXA=","Q09NUA==","X2JlZm9yZQ==","ZW5lc3M=","VWk=","TGVz","LnJlcXVpcmU=","LlNlcmlhbGl6YWJsZQ==","YWRkR2Fw","IGF1dGhvcml6YXRpb24=","MDg1","LnB5cGxvdA==","dXJyYXk=","bGF0aXR1ZGU=","ODQ1","ZnJhbWVz","YWpz","IGNvbXBhc3M=","IG9ic2VydmF0aW9ucw==","X3N1cA==","LmVudmlyb24=","IHRyaXBsZQ==","IFJ1Ynk=","IGRyYWlu","X0ZJTFRFUg==","U2Fu","VU1Q","TnVsbEV4Y2VwdGlvbg==","IEdhYg==","b3dl","IFR1cmtpc2g=","X3NlcXVlbmNl","IEdyYW50","dWVsYQ==","IHdv","IGN1YmU=","aXE=","IGRpc29yZGVycw==","IGV4dHJhb3JkaW5hcnk=","IGN0cmw=","IFNlcQ==","ZW50cg==","ODY1","IHNhbmN0aW9ucw==","OTQ5","dXRzY2g=","UmVwb3J0cw==","IGluaGVyaXQ=","UGVyaW9k","IHBob3RvZ3JhcGh5","IEZyYW1ld29yaw==","IHNwZWNpYWxpc3Q=","ID8KCg==","X3NlbGVjdGVk","LlBsYXllcg==","IGFsbG9jYXRpb24=","KGFjY291bnQ=","IHN0cnVjdHVyYWw=","dmFibGU=","LW9mZnNldA==","LkFwcENvbXBhdEFjdGl2aXR5","0LDQvA==","LkFkZFdpdGhWYWx1ZQ==","IGljb25z","IHNodXRkb3du","X2xvdw==","IENvbXBhcmU=","IENl","PWhlYWQ=","bGFt","LnByZWRpY3Q=","X0RFQw==","IFNsZWVw","IEdyYXRpcw==","IHN1Z2dlc3Rpb24=","IERFTA==","Y2FmZg==","YXZpcnVz","Tm90aGluZw==","nos=","IHdpZGVzcHJlYWQ=","IG1lY2hhbmlzbXM=","IHRleHRBbGlnbg==","b2NjdXA=","IFJhaWw=","Ok5T","IGZpYmVy","IG1r","IHZpbnRhZ2U=","LWxvbmc=","LnJlZHVjZQ==","LkVudGl0aWVz","KHJlY29yZA==","IHBsZWFzYW50","RlJJTkc=","LkNlbGxz","T1RU","CWVsc2VpZg==","NjQ5","NzI0","X2NvbmZpcm0=","IFZpZXdHcm91cA==","c3lt","IHByYXk=","IHN1c3BlY3RlZA==","Q29udGFpbnM=","OTgz","IGJvcmRlcnM=","IGNvbXBvbmVudERpZA==","QVNTRVJU","IGluZmluaXRl","LW9yZGVy","IGhlbGxv","IEdyYWRl","LmN1cnJlbnRUaW1lTWlsbGlz","YXBvbGlz","emg=","CU9iamVjdA==","Olxc","SE8=","dmFsdWF0aW9u","IHZvY2Fi","NzE5","IGNvdXBvbg==","YXRhYmFzZXM=","LkdldFR5cGU=","TGVhcm4=","Nzky","XT0i","IEdhcnk=","b3RpdmU=","IGFzaA==","IGJpYg==","WFhYWA==","IGJhbGFuY2Vk","VkFMVUU=","IE5hdA==","X0Fk","PEU=","5Yy6","IE1ldGhvZEluZm8=","ODk3","TElC","IGNvbnNpZGVyYWJsZQ==","IEluZHVzdHJ5","dGVzdHM=","LnNldFRpdGxl","IEJsdWV0b290aA==","IG1hcHBlZA==","IEJydWNl","IE1haW5XaW5kb3c=","CXN0YXR1cw==","IHJheg==","IE1hbmQ=","IGNsYXNzaWZpY2F0aW9u","UGVybWlzc2lvbnM=","OTY5","IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0=","IGNvbnRhaW5lcnM=","OnNldA==","X3htbA==","IHdoaWxzdA==","VGhyb3VnaA==","IHZhbGlnbg==","IHdvcmxkcw==","Q09SRA==","RURJQQ==","0YDQvtCy","IHNwYXJl","IEhhZA==","IERFRg==","KHB0cg==","IHdhcm1pbmc=","ODk4","4KS+","IGNvbnNlbnN1cw==","YWduZQ==","Q1RM","IOyV","Lk1haW4=","d2ViRWxlbWVudA==","IHBpc3Q=","Rmxhc2g=","QXBwZW5k","LnR3aW1n","VGFw","IHZlZ2V0YWJsZXM=","YWxn","MDU4","LnNhbXBsZQ==","IGNvYWNoaW5n","KGluZA==","Q2VsbFZhbHVl","Q2hlY2tCb3g=","IEhlbGw=","Uk9PVA==","Nzk2","IHN0YWRpdW0=","IGludmVzdGlnYXRpbmc=","KSU=","c3RlZA==","OTY1","IFdyaXRpbmc=","IOqy","IHVubw==","IHt7LS0=","IGNvb3Jkcw==","IHVuc2Vy","b3JnYW5pemF0aW9u","IENyaW1l","IERlbW9jcmF0","NTc5","IHZpbg==","L2ZpbGU=","MDc4","LWFwaQ==","IEF5","IGZ1bmRlZA==","IEJyZXhpdA==","IEdo","ZW50aW5h","Y2FzZXM=","IGRhc2g=","ICEhfQo=","SEk=","T2ZmaWNl","IGNhcHRhaW4=","IHdvcnNoaXA=","XEM=","NzMz","ODUx","IGdsb2Jl","X2JvYXJk","IGJhYmllcw==","ODc2","IGNvbnNlY3V0aXZl","IGVuaGFuY2Vk","ZXJldW0=","IEFkdmlz","IGdyYWlu","Nzcx","IGNyYXc=","YW5jZWxsYXRpb25Ub2tlbg==","LmFscGhh","X1dJVEg=","IE90dA==","IENvb2w=","LmJhdGNo","IHZlcmlmaWVk","KGNhbGxiYWNr","IHJlZ2FyZHM=","Njgz","IEludFB0cg==","b3VjaGVy","IGtpbg==","IHRvdWNoZWQ=","aXTDoA==","YXRob24=","IGFkamFjZW50","IGFjY29tcGFuaWVk","TEVBUg==","IGltcGxpZXM=","IGhpbGw=","IEJhbHRpbW9yZQ==","PSIt","RmluYWxseQ==","ODgz","U2Ft","aWNvcHQ=","IHNvZA==","IG1hag==","IFNoaXBwaW5n","IGdldEFsbA==","IGNvYWNoZXM=","IGRvbmF0aW9ucw==","aWxvdA==","IFRhcg==","Y2Vycg==","IGJhZGdl","IG1hcmtlcnM=","IFJhbmQ=","YWlzZWQ=","aXNzYW5jZQ==","IGV4cGxvcmluZw==","ODI3","dWNlZA==","IEluZG9uZXNpYQ==","IGJlbmVhdGg=","IG1hZ25ldGlj","IG11c2V1bQ==","bWF0Y2hDb25kaXRpb24=","IGRpc3J1cHQ=","IHJlbWluZA==","IFRN","IC8+PA==","IGZvb2w=","IGVzaw==","Lk51bGw=","IERpZXM=","X09VVFBVVA==","X1RZUEVE","IHBhaW50ZWQ=","Njcz","NzM1","IHNvcGhpc3RpYw==","IEJlYXI=","Km4=","X1BBQ0s=","IGRlbGl2ZXJpbmc=","IENPVU5U","5Y2V","IGplZw==","LWNhcg==","Zm5hbWU=","IHJhbmdpbmc=","ODQ4","IE5lZw==","LyoqKioqKi8=","IENIQVI=","IHVsdHJh","R3JhZA==","PXQ=","IGp1ZGdlcw==","IERpc2U=","YW5uZXJz","OTg1","ODkx","ODYx","IHNjYWw=","X2NhbA==","IENPTk5FQ1RJT04=","X2VtYmVk","KGZu","IENyYWZ0","MDQ3","IFBhcw==","IiktPg==","LmNvbnZlcnQ=","LnJlc291cmNl","IFNUQVRVUw==","w7RuZw==","IFRpdA==","IGNsYXNzcm9vbQ==","IEFyY2hpdGVjdA==","IEtpbmdz","IHN0ZWFkeQ==","LyohCg==","IEdlbmU=","KSI7Cg==","aWNpYQ==","c3Rhbg==","IENvbnN0cnVjdGlvbg==","dW1wZXI=","OTUx","d2M=","IENCUw==","aW5naW5n","LXBhcnR5","KGRyaXZlcg==","TUFSSw==","MDgy","IG5lc3RlZA==","ZXdhcmQ=","IGRlcGVuZGVuY3k=","IG1hbGVz","OTI4","IE9ORQ==","IFByb2R1Y3Rpb24=","XVsk","44O844M=","X0xPQUQ=","IEJvbA==","ZWxyeQ==","ODMx","oOmZpA==","IFJlcXVpcmU=","IHBsYWNpbmc=","eHh4","Q0FMRQ==","IHRodW1i","ODI0","Q2hvb3Nl","IHByb3RvdHlwZQ==","Vk9JRA==","IGxlc2JpYW4=","NzQx","IHRyYWl0cw==","U2hhcnA=","IGNvbnN1bWU=","VHJ1dGg=","IGFjdGlvblBlcmZvcm1lZA==","IEVudmlyb25tZW50YWw=","IERlYW4=","IGVzdGFkbw==","c2FtZQ==","IG51bWVyaWM=","IHRyYW5zaXQ=","LkVtYWls","LXNpZGU=","X1JVTg==","IFZpbGxhZ2U=","X09QRU4=","6KY=","LnJlbQ==","LXdhcm5pbmc=","YW55YQ==","UHJvcGVydHlDaGFuZ2Vk","ICghXw==","KGNoZWNr","aWxpYQ==","IFNvZnQ=","c3RlcHM=","IE1hZHJpZA==","TWVtb3J5V2FybmluZw==","IGhhbmRsZXJz","IGV4cGVyaWVuY2luZw==","IGluc3BlY3Q=","YnV0dG9ucw==","UmVjZWl2ZU1lbW9yeVdhcm5pbmc=","Y2hlbXk=","TGlua3M=","IHVybGxpYg==","LlN5c3RlbUNvbG9ycw==","IEVpZ2Vu","IHB1bmlzaG1lbnQ=","OlVJQ29udHJvbA==","YmFyYQ==","LXNldA==","IH0NCg0KDQo=","IHRvbGVyYW5jZQ==","IGludGVyZmFjZXM=","LnJlZGlyZWN0","aWdoYm9ycw==","Y3NyZg==","X2JhY2tncm91bmQ=","LlV0aWxz","X0hU","Njky","IEludGVyZXN0","aW1vcw==","IGdyYW50cw==","MDgz","IGV4YW1pbmVk","0JQ=","IGNm","Zm9yZ2U=","YmFja3M=","IE9iamVjdHM=","X3NlbnQ=","LmVudHJ5","IFRIRU4=","ZWxsaWRv","Y2lh","LHJlcw==","NjU5","Njgx","L3N0ZGM=","Lm5k","KEludA==","IEF1dGhvcnM=","IEFwcENvbXBhdEFjdGl2aXR5","J3s=","IG1lZGk=","TXVzaWM=","aWdt","Y2VpcHQ=","IGF1c3M=","IHRhcmdldGluZw==","IEtleXM=","aG4=","Ol0K","IG1pbmVyYWw=","w64=","LmNh","NzYx","b21lZA==","IHNoZWV0cw==","IGNhbWI=","IGRlYWRseQ==","LmluamVjdA==","KHVuaXQ=","IFNlbGVjdGlvbg==","Lmdtcw==","KGNvbm5lY3Rpb24=","ICQoIg==","w6ltb24=","IEN1cnJlbnRseQ==","cHRl","X3BhdGhz","ODQ3","bGVhZg==","IGltcGxpY2F0aW9ucw==","cG9zYWw=","5L2N","Wy8=","YW5jaWE=","6Zs=","bXVs","Y2ll","IGdlaWxl","Njc5","aW1hbHM=","VUlWaWV3","IHN1cnJl","c2VyaWFsaXpl","SVNP","IGFyYml0cmFyeQ==","IHNvY2thZGRy","LmZu","IE1lcmM=","IGNhc3Rpbmc=","S2V5RG93bg==","IG5ld1ZhbHVl","b3BlbnM=","NzE3","VG9kbw==","IGZsZXhpYmlsaXR5","CQkJCSAg","VmVsb2NpdHk=","w7pu","cm93aW5n","IGNvbXB1dGVk","YCkK","c3RhdGVtZW50","IHJp","X2NhcnQ=","TG93","dHJhbnNmZXI=","Lm5hdg==","IGdyYXZl","IERvb3I=","CWFsZXJ0","Njkx","Njk4","LnN1YnNjcmliZQ==","LXByb2ZpbGU=","CWJhc2U=","IOKIkg==","X18KCg==","IGVuZ2luZWVycw==","IGV4cGxvc2lvbg==","IGRhcmk=","Njgy","CUxvZw==","b25hbA==","IGlzb2xhdGVk","e2k=","IE1zZw==","RnV0dXJl","IHJhY2lzdA==","LXdyYXA=","IFZlcnM=","Ym9yZw==","SVNJT04=","INGA0LDQ","IFlhbg==","ODM2","aW5pdFdpdGg=","IG5vbWlu","KGVtcHR5","w61u","44Kk","CXdpZHRo","IGNoYW1iZXI=","L2FqYXg=","RU1Q","MDkz","IG5lY2Vz","aXZvcw==","bG9naWM=","Kikm","Y3JpcHRz","OTc2","Um93QXQ=","MDUz","aWJsaW5ncw==","IGVhcnM=","IGNvbXB1dGluZw==","IG1ha2Vy","IE5laXRoZXI=","YnJlYWRjcnVtYg==","IHNlcmlhbGl6ZQ==","IFdpdGhpbg==","IGRlbGw=","X1RSQUNF","MDky","PWE=","IHdpc2hlcw==","LWluY2g=","IERvcg==","IGlubm9jZW50","IERvbA==","IGludGVucw==","Zm9yY2Vk","MDU0","IEJJVA==","IHBob3RvZ3JhcGhz","IGNhc2E=","IExlbg==","XEZyYW1ld29yaw==","LlNpbXBsZQ==","IGRlYXI=","ODk1","KS8o","aXBwaQ==","IG93bnM=","UGxheWVycw==","IHByb3Bvc2Fscw==","LnBp","dXNhbGVt","RGFtYWdl","IGNhbG9yaWVz","IENyZWF0aXZl","IFsk","IC8vDQo=","Nzg2","QW5kVmlldw==","w6htZQ==","LmN1c3RvbQ==","X2ZhY3Rvcnk=","Y29tbWFuZHM=","X2xvb2s=","IHN0cmNtcA==","WU4=","YWlyZWQ=","IGF1ZGl0","0L7RgdGC","IFJldmVyc2U=","cm9wcmlhdGU=","ZXRpY3M=","PHZlY3Rvcg==","LnNlbGVuaXVt","Lm9y","IHByZWRpY2F0ZQ==","IGZpbmlzaGluZw==","IGtsZQ==","IFJlcG9z","IEtoYW4=","IE1ha2luZw==","IEZT","IHB1dGU=","CXN0YXRl","X1NVUFBPUlQ=","Jy0=","b3JpZW50YXRpb24=","IGV4aXN0ZWQ=","YXR1cmE=","IGV4cGVjdHM=","IFNoYWRvdw==","OTY2","IG9yZ2FuaXo=","5Z6L","IHN1c3BlbnNpb24=","NjY5","IHVpdA==","IHNpbXVsdGFuZW91c2x5","IEFmZmVybw==","OiIpOwo=","IHJvY2tldA==","Y2Fz","ZXRlcm1pbmU=","YWNldXQ=","Njkz","eGw=","IEFNRA==","KGdyYXBo","NzU4","ODcy","YXNzb2Np","X0NS","LmFyYW5nZQ==","MDQ5","KGpMYWJlbA==","IGJlZWY=","UXVpY2s=","LmNhcmQ=","XSk6","LWdy","Nzk3","LkdPTkU=","X0NMT1NF","IE5ldg==","w61hcw==","IHN0ZXBwZWQ=","IEZyZWVkb20=","IFdS","TlNBcnJheQ==","X3J4","X2RpYWxvZw==","IGhvdGVscw==","OTUz","IChcPA==","IERpYW1vbmQ=","IGFzc3VtcHRpb24=","dW1p","KGl0ZW1z","DQ0NCg==","5rOV","IG5lbA==","Qm9va3M=","5Y6/","dXNi","IEZJTg==","ODgx","5qw=","IGNvcnBvcmF0aW9ucw==","VVNB","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA==","OTI5","LnByb3BlcnR5","ZXdpc2U=","X3Bsb3Q=","Ij4nOwo=","IHBlcHBlcg==","OTg5","IHNoZWQ=","IE1lZGl1bQ==","IENvb2tpZQ==","ODg5","IG92ZXJzZWFz","ZWRvcg==","YXN1cmVtZW50","NzY2","5a2Y","ICcuJw==","IHBocA==","IFBST0M=","IGV4Y2VwdGlvbmFs","KHRo","IEpldA==","IG9jY3VwaWVk","LnNldEltYWdl","IFJlbGF0ZWQ=","dWNrZXI=","TWVtYmVycw==","UFJJTlQ=","IEdsbw==","X1ZJRVc=","fSIsCg==","IGFkb3B0aW9u","W10pCg==","ODQy","IE1pc3NvdXJp","IExpbmNvbG4=","ZXJhbGQ=","UG9wdXA=","IGZhdGU=","LWJvb3RzdHJhcA==","ZmVjdGlvbnM=","IFBvbGw=","X0FSR1M=","aW5hbmNl","Njk3","LWhvbWU=","Liks","X2RvbmU=","Njk0","OgoKCg==","IGRpc2N1c3Npbmc=","IFNRTEV4Y2VwdGlvbg==","IGVsZWN0cm8=","CXJlcQ==","IHp3","ODg2","IGx1aQ==","OTMy","IG92ZXJuaWdodA==","JHVzZXI=","IFdBWQ==","IGFsbGVyZw==","IGRpc2FwcG9pbnRlZA==","IHJhZGlhdGlvbg==","IGltcHJlc3NlZA==","aWZpY2F0ZXM=","IHRvYg==","Q0xBU1M=","IGN1ZGE=","X2RldA==","LXBvc3Q=","dWx1","VHJhbnNsYXRpb24=","LWhhbmQ=","LnllYXI=","IE1vbmdv","IHVuY2xlYXI=","LmVuZ2luZQ==","V0VCUEFDSw==","cmljZXM=","X0FDQ0VTUw==","IGhvbGlkYXlz","cGVyY2VudA==","LklkZW50aXR5","IEdvdg==","IHBhc3Npb25hdGU=","ISEu","IEdyZWVjZQ==","cGx1c3BsdXM=","JykpOw==","R1A=","IGV4Y2l0","LnRhYlBhZ2U=","X2NvbmQ=","IHNwb25zb3I=","TU9EVUxF","X3Byb2M=","ICQK","IHJhdGlvbmFs","LlRvb2w=","IGlocg==","Y2Nh","5ZOB","IEVzdGF0ZQ==","SUJVVEU=","QWN0aW9uUGVyZm9ybWVk","IFNvbGFy","poI=","IGVxdWl0eQ==","dGlk","OTM4","IHJlY2lw","LnNpbXBsZQ==","bWs=","Njg5","IEx1a2U=","IEd1YXJkaWFu","IGVuY3J5cHRlZA==","IGRvbWluYW50","LnBsYWNl","IE5W","ODM5","IHRvbmd1ZQ==","KEdldA==","IHN0YWlubGVzcw==","LlBsYXk=","IGVi","YWNp","LmJ1ZmZlcg==","cmVhZGNydW1icw==","IHZhY2NpbmU=","cHJvbQ==","OTc5","IHVzZXJJbmZv","IHNsdWc=","U2VyaWFsaXplZE5hbWU=","LXdpZGU=","IHJlYWN0aW9ucw==","IFlhbmc=","IEFkZHM=","KHVzZXJJZA==","IHBsYXRlcw==","IE1FTQ==","IGJhaWw=","SW5zaWRl","ZXRlZA==","IGVsc2lm","IHNha2U=","IGN5Y2xlcw==","IOyX","CUk=","LWNvbGxhcHNl","ODQx","IEdNVA==","ODE0","RGVjbGFyYXRpb24=","IGdyb3M=","IHJlYWNoZXM=","IGN1c3RvZHk=","VW50aWw=","NzUz","ODU2","dHU=","IENoZW4=","IG54","KGFkZHI=","IE9mZmVy","IGNvbGxlZw==","YXNzYWRvcg==","Njc0","IG1hcHBlcg==","ODU0","IFNJR05BTA==","IEJsb29t","IEhvbGw=","IEltcGVy","LWRlcw==","X3NpdGU=","UHJvYw==","RXF1","IGF0b21pYw==","IFdvbWFu","c2VudA==","NzM4","ODE3","c2Nhcg==","IGludGVsbGlnZW50","IEdldHRpbmc=","IFJlZ2lzdHJhdGlvbg==","IFBoaWxs","IGtpbGxlcg==","dW5pY29kZQ==","CgkJCg==","IEphY29i","IENvbnN0","IGxvY2F0ZQ==","IGNhdXM=","NzQ5","IFNjaG9sYXI=","IGNvbnN0aXR1dGlvbmFs","IGluZmxhdGlvbg==","IEdvdA==","PWFycmF5","ZW5kdW0=","IHRyYW5zbGF0ZWQ=","IGRpdm9yY2U=","RW50cmllcw==","IHNvcg==","IFF1b3Rl","aXJsaW5lcw==","VUs=","IGV4Y2Vs","KG9wdA==","IEFEVg==","LDos","IGNvbnRhY3RlZA==","NzQy","IERB","IHJpbmdz","IEluZHVzdHJpYWw=","LmdldENvbnRleHQ=","IGZvcmdvdHRlbg==","IFRhbg==","IHBhbnRz","IG92","IGRlY29kZXI=","IFBhcnRpYWw=","IHZj","IGJhdHRsZXM=","QXJpYWw=","RlJJTkdFTUVOVA==","aXJhdGVz","LHc=","YWludGVuYW5jZQ==","IE9k","IFRlY2hub2xvZ2llcw==","5YmN","IENhcnRlcg==","LmZpbmRBbGw=","Tm9tZQ==","QmVu","IFVzYWdl","IFBpY3R1cmU=","IGJhZGx5","X3BhbmVs","IHBhdGVudA==","IFByb3RvY29s","bG90dGU=","CXBsYXllcg==","amVjdGlvbnM=","NzQ2","IGRvdQ==","X3JlbGVhc2U=","dXJuaXR1cmU=","X3RheA==","IEZpZWxkcw==","LmRhdGFzZXQ=","X21hc3Rlcg==","Q0xVREU=","IFBoYXJt","YnN0","IG9wZXJhdGlvbmFs","LmNlbGw=","IGlkZW50aWZ5aW5n","IGp3dA==","dHVwbGU=","IFRD","IENybw==","OTM2","aXhtYXA=","LWNvbXBvbmVudHM=","Z2VuZXJhbA==","IG96","X0Rl","X2RvdWJsZQ==","IFRvbw==","MDg4","LlZpZXdHcm91cA==","ODc5","Z2F0ZQ==","ZGluZ3M=","cGhvdG9z","IGdyYW5kZQ==","b2xsZWN0","X2xpbg==","IGF3ZnVs","ZmlsdGVycw==","IGFsdGVybmF0ZQ==","ZXNw","IGNvbXByZXNz","ZW8=","IFNjYWxl","IGluZGlyZWN0","IGludm9pY2U=","CgoKCgoKCgoKCgoKCgoKCg==","U3RhcnRpbmc=","IFBsYXllcnM=","aWVsZQ==","LnRoZW4=","OTgx","T3Jk","IFR1cGxl","IGJvdXQ=","IFN0YXRpc3RpY3M=","UHJldmlldw==","IHB1enpsZQ==","IFdpZHRo","U1RBVEU=","IG92ZXJsYXk=","CW9u","IGluZnI=","IHNtYWxsZXN0","bG9ja2Vk","0YLQvg==","c3Ns","Nzc5","IGRlZW1lZA==","IHNjbw==","cmVjaw==","IGpCdXR0b24=","IG1pc3Npb25z","ODcx","56ew","LlNlbGVjdGVkSW5kZXg=","VEFCTEU=","U2VwdA==","IGFja25vd2xlZGdl","IHN0cnRvdGltZQ==","IFRlbGw=","IERhaw==","IGFsdW1pbnVt","IGZlbmNl","IFN0YXJz","Q09ORklH","IHJldHJvZml0","IGVtcGhhc2lz","L2hlYWRlcg==","IFNvbWV0aGluZw==","aW5pc2hlZA==","PSciLiQ=","IFZhbGlkYXRvcnM=","IHBvbGFy","c2VjdGlvbnM=","OTQ0","LmFzcHg=","IGFzcGly","Lk1vY2s=","Q29kZUdlbg==","IHBldXQ=","OTcx","IGFjY2VwdGluZw==","IGJhY2tpbmc=","UGljdHVyZQ==","L2Fw","0LXQsw==","X1NFQw==","LXVzZQ==","YW5ub3RhdGlvbg==","IGNvZ25pdGl2ZQ==","IGdyaXA=","aG91cg==","IExlZ2Fs","IGVwaWM=","LnRvb2xTdHJpcA==","Lm5vdGlmeQ==","Lkxhc3Q=","T1JJWg==","TWlkZGxld2FyZQ==","Y3JpcHRpb25z","bGFzaA==","X0ZPVU5E","IExpdmVycG9vbA==","IHt9Iiw=","OTMx","SW5zdGFsbA==","IG5pdA==","IGZpZ3VyZWQ=","W2xlbg==","Lldpbg==","LnBsYXRmb3Jt","ODUz","IGdhbWJsaW5n","KGR0","YXZlcnk=","CWluY2x1ZGU=","V2hldGhlcg==","Um91dGluZw==","IHRoZXJhcA==","UmVtb3Rl","IExvc3M=","eWxs","IGFwcHJvYWNoZWQ=","IFZlaGljbGU=","IEFscGhh","IHZvY8Oq","YW5zd2Vycw==","TlNEaWN0aW9uYXJ5","OTU0","Y29uc2lkZXI=","dW51c2Vk","IEZhbg==","b3JhYmxl","ZnJl","ODcz","IERJU0NMQUlN","IEFjdG9y","Ll0=","dG9IYXZl","LnVzZXJJZA==","IHNwZWVkcw==","ZXdheQ==","IHJlY3Vycw==","INCz","X3ByaXY=","IeKAnQoK","Q2hvaWNl","IHNldHRsZQ==","IHBsYW5lcw==","J30s","VG9t","SVRFUg==","ISIK","5bs=","YWNoZWxvcg==","IHNlcGFyYXRpb24=","IGRhbA==","YWRq","IHJlZ2lzdGVycw==","cml6","IE5vdGljZQ==","IGx1","IGNvdXJhZ2U=","IGF4ZXM=","Y2VsbGVudA==","LmFzeW5j","MDcz","IGNvbXBhdGliaWxpdHk=","56s=","ICEKCg==","CXRpdGxl","WUxF","CW1lc3NhZ2U=","VVVJRA==","T0xERVI=","IEhI","IFN0eWxlU2hlZXQ=","IGFjY2Vzc2Vk","LnZhbGlkYXRpb24=","dGFza3M=","IHBvbGx1dGlvbg==","LmNhbnZhcw==","IGluZ3JlZGllbnQ=","IENhYmlu","QWg=","b2xkb3du","IE5PSQ==","IMOX","W2Y=","ZWR1Yw==","eWFsdHk=","KG5vdA==","X1N0YXRl","OTMz","YW1lbg==","Nzk1","NzM5","IGRhbw==","dWRhZA==","ZWxsZXJz","fSY=","bGljaXR5","X1dJTkRPVw==","IHRhdHRv","dmFsb3I=","LlJhbmdl","IHJlZmVyZW5jZWQ=","IFJlc2VydmU=","TW9uZXk=","ODc0","U0NSSVBU","L3Byb2R1Y3Q=","Y2hvaWNlcw==","IHRpbg==","44KT","OTE4","IHNlcGFyYXRvcg==","IHBrZw==","YW1tZWQ=","IE1BVA==","ISEKCg==","IHJhaWQ=","IG1vdGl2YXRpb24=","IFhQ","IEJhY2tncm91bmQ=","IFF1YXRlcm5pb24=","LmRlZmluZVByb3BlcnR5","aWtlcg==","CXBhcmVudA==","IE9yaWdpbmFsbHk=","YW50YWdl","IEhhbnM=","IHRpbWVsaW5l","LmN1cg==","b3BpYw==","IFNlcXU=","bXVzdA==","IENvYWw=","IGZvcm1hdHRlcg==","X1JHQg==","IF8oIg==","J30pLAo=","ID09PT09PT09PT09PT09PT09","IEZVTkNUSU9O","IGxuZw==","aWNhdGVz","bGl2ZQ==","X2VuZ2luZQ==","IHRvd25z","ODY4","JykpCgo=","IFBL","KGFwaQ==","CXNjYW5m","MDg5","cGFja2V0","LnBob25l","4YA=","IEFuZHk=","X05BTUVT","OTgy","UExZ","OTU1","IG1pbnM=","aW1p","IGJyaWNr","IGJsYWRl","LnN0ZG91dA==","fWA7Cg==","U2hpZnQ=","CXNi","IENoZWNrcw==","IHBoZW5vbWVub24=","QXZhdGFy","IG1pbmlzdHJ5","cm9zZQ==","CUZpbGU=","ODc4","IHRpdGxlZA==","KExPRw==","IGdhbg==","ZGVzaWdu","KCksDQo=","IGJvbmVz","c3Rt","xZvEhw==","IElucHV0U3RyZWFt","IHZvbHVudA==","IFNlcmlhbGl6YWJsZQ==","IGZpZ2h0ZXI=","IERyYWc=","VHdpdHRlcg==","IHN1YnNpZA==","57w=","IGZvcnVtcw==","LmxvYWRpbmc=","bG9nZ2Vk","X3RoaXM=","IHRlcnJhaW4=","IGlycmU=","IEluZw==","IENO","X29iamVjdHM=","LnVpZA==","IGNvbnNjaW91c25lc3M=","VElOR1M=","IEdhbGw=","IHBvcnRyYXk=","MDU2","IERldmVsb3Blcg==","IHBhcnRpY2lwYW50","ICI7DQo=","L21vZGVs","Nzk0","IE9wZXJhdGlvbnM=","Xlw=","IExhdGVy","IHJhaXNlcw==","LW5vbmU=","Lm1ldGE=","PScuJA==","RmluaXNoZWQ=","IHJlcGxhY2luZw==","IHNhbXBsaW5n","IEplbg==","IlRoZXJl","UkVBTA==","QUxF","7Iqk","T3JkZXJz","X3BhcmFtZXRlcg==","IE9seW1waWM=","IHRyw6hz","IGFyZW5h","aW9s","Oz8+","IGltcGFjdHM=","IFdT","OmdldA==","IGZsaWdodHM=","IFJ1c3NlbGw=","Y2FtZXJh","Rm4=","c2lnbWE=","IGZvcmNpbmc=","IGxvY2Fscw==","IGRlcGFydHVyZQ==","IGNlbGVicmF0aW9u","IFNheQ==","ODg0","77yS","IEhpbGxz","Lmhhc093blByb3BlcnR5","IHR5cGluZ3M=","LkFQSQ==","IGRvbmF0aW9u","T3BlcmF0aW9uRXhjZXB0aW9u","LkFjdGl2aXR5","Y3BsdXNwbHVz","IENoYXJsaWU=","IGltcG9ydGVk","IGRhbm4=","IG9jY2FzaW9ucw==","IGltcGxlbWVudGluZw==","IHB1cnBsZQ==","LmRpYWxvZw==","U1FMRXhjZXB0aW9u","ZXJubw==","IHdhcnM=","IHBhc3Rl","IGRlY3JlYXNlZA==","IGhhcnNo","IGVsYWJvcg==","aW5wdXRz","IFZpZXdz","IGVycm9yTWVzc2FnZQ==","X211bA==","CXdyaXRl","IENvcA==","IEFubnVhbA==","KGJ1dHRvbg==","IHZpZGE=","YmFycw==","IEhhcnZhcmQ=","CWV4cGVjdA==","IGluZGV4ZXM=","IGRvY3VtZW50YXJ5","IGZsZXNo","T1JMRA==","IERlbHRh","TUFORA==","QnJ1c2g=","LWNvbHVtbg==","IGRldmVsb3BtZW50cw==","OTc0","Nzgz","bWV0aG9kVmlzaXRvcg==","c2xpY2U=","IFBETw==","IGludmVzdGluZw==","ODY3","aXJhYmxl","IHhtbG5z","77yb","YXJ0YQ==","IHRoZW9yaWVz","X2NpdHk=","ICRfXw==","Q3JlYXRpbmc=","KHBy","RHJvcGRvd24=","aXNtYXRjaA==","IE5FVA==","OTI2","J10pKXsK","IFZhbHVlcw==","IFNFTw==","IFNUQVQ=","IGVjb3N5c3RlbQ==","IHRlbXB0","IFxc","IC8vewo=","IENocmlzdG9waGVy","IEtlbnR1Y2t5","IEh0dHBTZXJ2bGV0UmVzcG9uc2U=","IGh5YnJpZA==","eW9u","IGZlZWRpbmc=","IEV4dHJh","Tm9ybQ==","SVRDSA==","IFNlYW4=","IFVwbG9hZA==","bXVu","cHVy","IHBlcnNpc3RlbnQ=","IElEQw==","IFBlcmZvcm0=","ODYz","Lm1lcmdl","X3Jvb20=","TWVhbndoaWxl","IT0n","IFdlbA==","QXJnc0NvbnN0cnVjdG9y","ODg3","LkRhdGFiYXNl","IGNvdW50aW5n","KCkq","lOWbng==","IFRPUA==","bWlsbA==","IERU","SUdORUQ=","OTU2","IEtC","IGNvbXBseQ==","U291dGg=","X2NvbGxlY3Rpb24=","Q2hhcHRlcg==","IGV4cGxhaW5pbmc=","X0FN","X3Rz","Y2FyZHM=","IHF1ZWw=","IHBvbGU=","IHRvdWNoZG93bg==","IE90aGVycw==","IHBlZXJz","IFR5cGVFcnJvcg==","NzYz","IHNpeHRo","IGNoZWVy","IGRpc3B1dGU=","OTYz","ODkz","dXNj","KV0s","dGh1bWI=","IGhpZGluZw==","IFNJRw==","bGlrZXM=","IFBBR0U=","LlJlZmxlY3Rpb24=","IGhlYWRxdWFydGVycw==","VElORw==","IEdob3N0","TUxF","JAo=","IGNvbnRyYXJ5","ZXh0ZW5k","J10pLg==","RkZFQ1Q=","IFBpbnRlcmVzdA==","w7ptZXJv","cmljYW5l","CXNlc3Npb24=","IGNyeXN0YWw=","LUNvbnRyb2w=","b3Zlcm5tZW50","b2dyYWY=","OTYx","LWFjdGlvbg==","dm9sdW1l","ZnRlbg==","IHVuY29u","IGFuaW1hdGU=","IGxlYXNl","c2Ny","IHJlZnVzZQ==","44CL","ZnRw","aW5mb3JtYXRpb24=","IGV2YWx1YXRlZA==","IGluamVjdGlvbg==","IGphY2s=","IHdvcmtzaG9w","5rOo","UFRI","IFRz","b2ZmZXI=","CW9z","IGtpbmdkb20=","TWlzc2luZw==","IGxhd21ha2Vycw==","ZXh0RmllbGQ=","IHNpbmdpbmc=","YWJp","L2NsaWVudA==","Lm1lZGlh","QVRFR09SWQ==","U2lnbmF0dXJl","JScsCg==","IEZ1Y2s=","XVs6","IHNlbnNvcnM=","L2NvbQ==","IFByaW1hcnk=","LlNRTA==","X3Byb2dyYW0=","IHBpbGxz","IGludGVncmFs","IGZsZWV0","IGRyb3BwaW5n","LnNs","QmVlbg==","IHBldHM=","IGFkdmlzZWQ=","IGRyYWdvbg==","X0VESVQ=","KGlt","OTM5","RkVS","IERydWc=","KHJhbmRvbQ==","IGNvbXByZXNzaW9u","b3VzdA==","WyU=","IGJ1eWVy","aG9w","Um9sZXM=","bWFuYWdl","IHBhaW5mdWw=","IEJyYW5jaA==","LW1vZGFs","ZW5hbnQ=","IE1lc2g=","L2ZvbnQ=","IEdyYWhhbQ==","IOKY","IG5j","IEZyYW5jaXM=","IHNwZWNpZmljYXRpb24=","IGRhbWFnZXM=","LWNvbmZpZw==","IHRoZW9yZXQ=","c2VjdXJl","X211bHRp","YWNldXRpY2Fs","IGRlbWFuZGluZw==","ZW5uZQ==","SVNUUw==","MDk0","KCkpKTsKCg==","UmVhc29u","UmVjZW50","cGhhc2U=","IHBzeQ==","X01BTg==","IHZvbHVudGVlcg==","5b8=","aXN0cmlidXRlZA==","bGlv","IHByb2R1Y3Rpdml0eQ==","X2NvbW0=","U3ByaW5n","bmlz","LndlaWdodA==","IENhbmNlcg==","QWxsb2M=","IFR3ZWV0","IHNlcGFyYXRlbHk=","CWNoZWNr","X3Byb3BlcnRpZXM=","LlVuaXQ=","ODI5","X0NMSw==","IGd0","ICgpOwoK","IGhhbmR5","ODM0","IFRob21wc29u","IHVubmVjZXNzYXJ5","IFJlYWRlcg==","ODk0","R04=","PXJlcXVlc3Q=","IFV0aWxpdHk=","LlJlcG9zaXRvcnk=","IEF4","aHlkcg==","Nzkx","aWV1","IHRoeQ==","IGx0","X21haWw=","5L+u5pS5","YWlsYW5k","IFBoaWxpcA==","IGJpdHRlcg==","IGJldHRpbmc=","ODM3","IHRpbWVk","b2Nrcw==","MDc2","J2E=","IGFsZ29yaXRobXM=","IHJlaW50ZXJwcmV0","IHRvc3M=","cm9nZW4=","IGhvcGVk","KHNlbGVjdGVk","IHZlbnR1cmU=","VEVY","IExlYXZl","LlN1YnN0cmluZw==","IGdyYXRlZnVs","NzQz","dWth","IENvbnN1bWVy","IGFnZ3JlZw==","Q2lyY2xl","4LiB","X2Jsb2Nrcw==","IGxlZ2FsbHk=","ICJ8","44OD","LmJvYXJk","LkFi","RnVuY3Rpb25z","cmVjaXBl","6Ic=","IE94Zm9yZA==","IHdob2xlcw==","LkJ1aWxk","X2NoYW5nZWQ=","aGFp","IGRlcGFydG1lbnRz","OTY0","SW1w","IGNvYWxpdGlvbg==","SU5GUklOR0VNRU5U","IGVtcG93ZXI=","aXRjaGVz","Tm9ydGg=","IGluZmxhbW0=","T05TRQ==","IG1pc3NpbGU=","IFJhag==","IElzc3Vl","IGF0b2k=","Y2FsZWQ=","LkNvbnRyb2xsZXJz","IFdvbGY=","IGNydXNoZXJz","4buH","LkF1dGg=","LmFkZEF0dHJpYnV0ZQ==","aGlz","IGJvb3Rz","LmNsZWFu","Y2FtcA==","IHRlbmFudA==","IHR1bmU=","IHt9Jy4=","IHdvcmtvdXQ=","UmVwbw==","IHBhcnRpYWxseQ==","TUlTU0lPTg==","amFtaW4=","IFNC","IGRldGVybWluYXRpb24=","ICcnKTsK","IEJlbmc=","IHZvcw==","IGluaGFi","L2xhbmc=","c2J1cmdo","RXhlY3V0b3I=","aG9uZQ==","IENoYWxsZW5nZQ==","X2xpbmtz","LkxldmVs","IHVuZGVyZ3JvdW5k","LWNvZGU=","OTU5","IG9wdGltaXphdGlvbg==","bG9nZ2luZw==","X2Rlc3Q=","IHNuYWtl","IGNoZW1pY2Fscw==","X0lNUE9SVEVE","YWRvb3A=","IFRIQVQ=","bWFuYWdlZA==","IHJlZHVjZXM=","IFJFQUw=","IEd1eQ==","X0dFTkVSSUM=","LyoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioq","LmFtb3VudA==","IGRlcmU=","Z2V0VGltZQ==","IHBhbnQ=","YW5vbnltb3Vz","IGhhcm1vbnk=","IEFsYW4=","IHNjZW5hcmlvcw==","IGRpcnQ=","aHRhZ3M=","TWM=","U2hlbGw=","cmlu","ew0KDQo=","LnBvdw==","CWNsaWVudA==","IGNvbnNwaXJhY3k=","IGFkbWlzc2lvbg==","IFJlZ2lvbmFs","IFZpZXdDb250cm9sbGVy","IFBoaWxpcHBpbmVz","IGRlcG9z","IHBhcA==","OTYy","IFBhZA==","UGF1bA==","LkNvbWJvQm94","IHR1dG9y","IFJlY2lwZQ==","d3JpdGluZw==","IGNvbnRyaWJ1dG9y","T1RI","U21hbGw=","Vkk=","IGhhY2Vy","ZXF1","IEV4YW1wbGVz","aHVtYW4=","Lm1lc3NhZ2Vz","CXR5cA==","ICgNCg==","IFNTTA==","TEVO","IFJvbW5leQ==","KGdyaWQ=","CW1pbg==","ID4KCg==","IGZydWl0cw==","IHZvdGVy","SW5saW5l","cGFuZQ==","IENvbGxlY3Rpb25z","Y2hhcnNldA==","IHNwYW0=","emI=","aXRlbWFw","IHN1Y2NlZWRlZA==","X0NPTA==","IGVsYXBzZWQ=","aW1ldGVy","IHJlY292ZXJlZA==","VGVuc29y","aGF0dGFu","LnNldHVw","aXN0bw==","KGhlYWQ=","OTc3","IFNJWkU=","IHRhY3RpY3M=","IGRpc3R1cg==","IHByZXZhbA==","aWNpb3M=","KFZhbHVl","X2NvbHM=","IEZhdA==","IHNlYWw=","IHNvbnM=","IGVuc3VyZXM=","MDk1","IHByZXNzaW5n","PSY=","aWdlbm91cw==","IGhhcmFzc21lbnQ=","X0pTT04=","IGlnbm9y","eW5vbWlhbA==","b21lcg==","X3N0YXRpYw==","IHNpZ25pZmljYW5jZQ==","IGNpcmNsZXM=","X1N5c3RlbQ==","IGRpc2NpcGxpbmU=","IGRyZXNzZWQ=","IHNwaGVyZQ==","OTI3","IGNsaW1i","NzU5","X2FjdGlvbnM=","IEJhYg==","ICc9Jyw=","X3NjaGVtYQ==","InVzZQ==","IHVuZGVycw==","IGN1cHM=","LnNjcmVlbg==","L25ldw==","IGFwcGVhcmluZw==","VE9Q","dmlzZWQ=","Y2xhbmc=","IGludmVzdGlnYXRvcnM=","IG15c3RlcmlvdXM=","IHByb21pc2luZw==","IHF1YWxpZnk=","IGNhdmU=","IGVxdWlw","PXg=","R1Q=","KGxpbms=","LnZlbG9jaXR5","LmVyYXNl","b3Rlcg==","KysrKysrKys=","cHJvZml0","IHpvbmVz","X3VpZA==","LXNlcg==","IG9iamVjdGl2ZXM=","IG1pbGY=","d2Via2l0","KG1hdGNo","bmVo","IEFzc29jaWF0ZWQ=","IFRvZG8=","PWQ=","MDY1","Q2Ft","IHZvY2Fs","IHN1ZG8=","KEVY","IHRyb3U=","QUJD","LmJlYW4=","IEdyb3VuZA==","IFJFU1Q=","d2VldHM=","SW5n","aW1vbg==","OTQ2","X2J1cw==","IENPTE9S","dW50bw==","IGZvc3M=","IExpbmtz","ODY5","w6RuZw==","L2Zvcm1z","cHJpc2Vz","IGFjaGlldmVtZW50","Q0FMTA==","0LXQu9GM","IFZlcmlmeQ==","X1NPVVJDRQ==","YXB0Y2hh","SURE","X3JlZmVyZW5jZQ==","R29sZA==","ICAgICAgICAgICAgICAgICAgICAgICAgICAgIAo=","OTQ3","UmVjZWl2ZXI=","MDk5","IGFq","X2RpcmVjdGlvbg==","fV0=","IENvbXBldA==","IGJhbmc=","Nzk4","IENhc3M=","LXVybA==","dGVjaG4=","IEplcnVzYWxlbQ==","bG9uZ2l0dWRl","Jyk7DQoNCg==","IHdpbm5lcnM=","VGFza3M=","IERNQQ==","IHRvb2x0aXA=","jrc=","IEJyYQ==","X2R1cmF0aW9u","Y3VyeQ==","cGFyZW50cw==","LS0tLTwv","IHBhc3Nwb3J0","ODQ5","V0M=","INC7","Y2Vzc2lvbg==","IFllbGxvdw==","IGVuY3J5cHRpb24=","JwoKCg==","IGxpc3Rpbmdz","IENvbW11bmljYXRpb25z","Ll8K","ICIiIg0K","IGZi","IHN0cmljdGx5","IExpdGVy","IEVudGVycHJpc2U=","X2JvdHRvbQ==","QUtF","a2V0","IHRhbQ==","QmV0d2Vlbg==","X1RPUA==","RGlzYWJsZQ==","IGZpbGluZw==","IENocm9u","U0VRVQ==","ICZfX18=","ODQ2","IGZhbA==","IFNMT1Q=","RW1iZWQ=","dXRoZXI=","IFJlc3RhdXJhbnQ=","IHJlYWxpc3RpYw==","IScpOwo=","IERFQUw=","IFBlcmlvZA==","LmdldFg=","IHNlaHI=","Il0nKS4=","OTQz","ZXNzYQ==","CW1lbWNweQ==","IGFja25vd2xlZGdlZA==","c2VuYWw=","IFVuaXZlcnNhbA==","ICcnOwoK","L3dpa2k=","aWVubmU=","IE5TQXJyYXk=","IGFjY2VwdGFuY2U=","IGxpdmVy","IHRvb3Ro","IGFjY3Vz","CUxPRw==","dmFsdQ==","5YC8","IHNlY3RvcnM=","cGVyaW1lbnRhbA==","L2NsYXNz","X2dv","TWljaGFlbA==","b2xhdGlsZQ==","IFBST0Y=","IGNvbXByb20=","c3BlY2lhbGNoYXJz","IOKc","IGlzRXF1YWxUb1N0cmluZw==","IEh1bmc=","LmFzTGlzdA==","L2dv","Pj4o","IEtpcg==","IGludHJvcw==","IHNrZXRjaA==","IHNraWxsZWQ=","IGltbWVy","IGFkZXF1YXRl","X3JlcA==","KGhlYWRlcg==","X2xpa2U=","IHBlcmNlaXZlZA==","c3No","IGFzc3VtaW5n","IGZm","X3V1aWQ=","dWxhcw==","IGRlbW9jcmF0aWM=","LmVudGl0aWVz","U2VyaWVz","YXBob3Jl","IG5ld2Vy","fSg=","U0VD","YWlybw==","IGNvbW1vZA==","IHByaXZpbGVnZQ==","IGRldXg=","IEhvcA==","Licv","Y3RpYw==","Lic7Cg==","PD89","IFVU","ZXRpZXM=","X0NPTlRFTlQ=","LnJlbGVhc2U=","LmRpc21pc3M=","IGZj","b3VuZ2U=","cHdk","X3ByZXY=","TWdy","IEJ1ZmZlcmVkUmVhZGVy","d3JpdHRlbg==","IEVi","ICkKCgo=","dWl0bw==","IGNvbnRyb3ZlcnN5","IGRpc3Bvc2Vk","IGZvdG8=","TGlzdFZpZXc=","L2NyZWF0ZQ==","IENPTA==","Y29tbXVuaWM=","MDY4","IGZyZWVseQ==","dW5hbA==","b3ZpZA==","CXRy","cGFnaW5hdGlvbg==","IENvbW1vbnM=","RWxlbQ==","IFJFTQ==","IGNvcnJlbGF0aW9u","KCkrIg==","IEhpZGU=","YW5kaW5n","KHZlYw==","aXRvcw==","IEN1bHQ=","IG51dHJpdGlvbg==","dmFscw==","IGRldGVybWluaW5n","bG9yZA==","IHNjYW5kYWw=","IHNoYWxsb3c=","b2Rhc2g=","X3NlcmlhbA==","IFNsbw==","IGRpc3Bvbg==","UGxvdA==","aWNrbGU=","IGVsbA==","IHVuZW1wbG95bWVudA==","Rk0=","cm9ucw==","bMSx","TW8=","RXhpc3Q=","SURT","Q2hv","IEtleWJvYXJk","LnBhcnNlcg==","LkdldE9iamVjdA==","IHNwZWxscw==","IGdlc2No","IG1hZ25pdHVkZQ==","X1NM","aXNkaWN0aW9u","ICcpOwo=","aWxpYW5z","IHNoYXI=","IFByb2I=","dWlsdGlu","IHR1bm5lbA==","PkM=","IFdhcnJlbg==","IG9wdGltaXplcg==","IFNFUlZJQ0VT","X29wZXI=","Z2V0QXR0cmlidXRl","IE1jSw==","X3NlbGY=","MDg0","LnJz","IikKCgo=","R2V0Q29tcG9uZW50","ZXJjZQ==","IHRvdXM=","dW5pdHM=","J10pOw0K","Wm9vbQ==","L0U=","IG9ic2M=","IGZhc3Rlc3Q=","b25saW5l","IHBlYWNlZnVs","ZmZlbg==","IGNhcmdv","CXBy","IHNlZWtz","enU=","MDc0","VHJpbQ==","IHdhcmQ=","IHZlcmQ=","IGJsb2dz","LmV4Y2VwdGlvbnM=","IFByZW1pdW0=","IE5ldGhlcmxhbmRz","U2FmZQ==","RmluaXNo","IEFsYnVt","X0FDQw==","PXRoaXM=","dmlydHVhbA==","XT4=","X0xBQkVM","IE5pY2g=","X3dpbg==","IEFhcm9u","V1A=","OyQ=","YWltcw==","IEltYWdlVmlldw==","IGVuZGxlc3M=","RVJB","X0RJU0FCTEU=","IGNhbmNlbGxlZA==","LXVz","IGluc3BlY3Rpb24=","ZW1pbg==","IEdyZXk=","LW9wZW4=","IGl0ZXJhdGlvbnM=","Lm93bmVy","IGtlcmFz","LlBhc3N3b3Jk","IFJ5","IElOUw==","QWly","IFNldmVyYWw=","LlRhYlN0b3A=","SU5HTEU=","IEhhaXI=","IENhbnZhcw==","QUFBQQ==","IGZsYXc=","Y2VkZXM=","LlJlcG9ydA==","7Yo=","IFRpcHM=","Y3JpcHRvcnM=","LnRyYW5zYWN0aW9u","LlNwcmluZw==","IHZpZXdlcg==","IGluc2lnaHRz","6L6T","b3JkaW9u","VUlOVA==","c2Vlaw==","IEF1Zg==","7J6Q","IHN0cmFpbg==","VG9vbHRpcA==","IGR6","aWduYWw=","YWR0","IHVj","ZmluaXRl","IG5t","LmNtZA==","IE15U3Fs","W2RhdGE=","LmphY2tzb24=","LnRyZWU=","UmVxdWVzdFBhcmFt","X2FnZW50","IildDQo=","IGFzc2Fzcw==","KENvbnN0YW50cw==","OnNz","IE1BTg==","Ky0rLQ==","IEJvdHRvbQ==","cHJpbnRz","IFNhbWU=","QEF1dG93aXJlZA==","c3dhcA==","aWNpw7Nu","IHByb3Rlc3RlcnM=","IGhvbmV5","IFZldGVy","KENhbGVuZGFy","LWFk","IEJyb29rbHlu","TGlmZQ==","X1ZBUg==","emVjaA==","IENBTEw=","X0NBU1Q=","IEVsZWN0aW9u","IHRoaWNrbmVzcw==","VmVyeQ==","X0lOVEVHRVI=","LWRldg==","KSkpKQ==","YXBhdA==","b29vbw==","ZGVtbw==","IHBhcnNlRmxvYXQ=","IFJhdGhlcg==","U1RJVA==","bWFrZXI=","W2N1cnJlbnQ=","Y2hyb25v","IGNocmlzdA==","44Gq","IERldGFpbA==","xrDhuw==","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg","IHN1bA==","aWRlbmN5","UXVl","IGVsZWdhbnQ=","YXBvbnM=","IGRpc2hlcw==","IGludGVnZXJz","KHJlYWQ=","MDU3","ZmluZFZpZXdCeUlk","IEFtb3VudA==","IFNraXA=","IGhhYml0cw==","Kiko","IG1vbnN0ZXJz","TUFD","OmVuZA==","IGZyYW5r","QXNzZW1ibHk=","IGRmcw==","IG5ldXQ=","X1RZUEVT","ZXF1YWw=","bG95ZA==","KHVyaQ==","IGNoaQ==","IGRlZmVuZGFudA==","IGNvbmZsaWN0cw==","IHZpbA==","LWpz","IFBlYWNl","IG11dGFibGU=","KXNlbmRlcg==","IEZvY3Vz","5bu6","IGFwcHJlY2lhdGVk","c2xlZXA=","IFJFRA==","Q3VsdHVyZQ==","IGRlc2lnbmVycw==","X2dlbmVyYXRvcg==","Y29kZXM=","L2V4","LkdldFZhbHVl","dW1ibGVk","LnNjYWxhanM=","cGVyb3I=","IHZldGVyYW5z","IH0pDQo=","IHVuZm9ydHVuYXRlbHk=","X0NSRUFURQ==","TWFzcw==","IENMQUlN","IE1lZXQ=","X3N1cHBvcnQ=","QmFuaw==","KCkuCg==","RGFyaw==","X0xPVw==","IE1pbmluZw==","IE93bmVy","aWVyYQ==","Q2xpZW50ZQ==","IGVuY291cmFnaW5n","PlM=","IGJveWZyaWVuZA==","IEhhbGY=","IEFDQw==","QWZm","X2Fy","LWxpZmU=","Y3g=","LkpCdXR0b24=","aXphZG8=","Lnplcm8=","Lm9wZW5xYQ==","b3Rvbg==","LnRleHRDb250ZW50","IHRvbGw=","YXRpZQ==","IGJhbGxvdA==","LW51bWJlcg==","LkV4Y2VwdGlvbg==","CXBhcmFtcw==","Y2lyY2xl","LW1hcA==","IG5hcA==","IFJvYm90","IEljaA==","cmVnaXN0cmF0aW9u","QW1hem9u","cm9sbG1lbnQ=","KGV4cA==","IHRhbmtz","IEdvcmRvbg==","IG1hY2hpbmVyeQ==","IGJhc2VsaW5l","5os=","MDg2","2Kk=","IENvbnZlbnRpb24=","CWNvbmZpZw==","b29raWVz","bXVsdA==","UmVjb3Jkcw==","IEVTVA==","IGdhcmJhZ2U=","IGNvbmZvcm0=","aWRhbA==","IGJhcmc=","IHN1cnZpdmVk","IGludmVzdGlnYXRpb25z","OTM1","LmNvbnRhaW5zS2V5","LS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0K","b3J0aW9u","IGhvcnI=","X2h0dHA=","IG1hbnQ=","XTsNCg0K","YmluYXJ5","OTQ4","ZW1wbA==","IGlucXVpcnk=","IE1lYW53aGlsZQ==","MDk4","IGNvbGxlY3Rpbmc=","LkVudGl0eUZyYW1ld29yaw==","IiwKCg==","IFBpYw==","QEluamVjdA==","aWNrbmVzcw==","IEJpbmRpbmc=","IGNvbnRyb2xsaW5n","cmV2ZXJzZQ==","IGNoYWlycw==","c2VtYmxlZA==","KGFkZA==","RGlzYWJsZWQ=","YW5hcw==","LnRyYW5zbGF0ZQ==","LS0tLS0tLS0tLS0K","IHJlZmxlY3RlZA==","Il0KCg==","RXh0ZXJuYWw=","QXJyb3c=","U2luZ2xldG9u","JXg=","IMU=","IGFuY2VzdA==","IE9ybGVhbnM=","CWNtZA==","IHByb2hpYml0ZWQ=","aXRobWV0aWM=","KGNoYW5uZWw=","X2Nzcw==","Rm9yd2FyZA==","LnNvY2tldA==","IGx1Yw==","4oY=","IEZpcmVmb3g=","IE1vdmllcw==","KV8=","LmVuZHM=","KHNoYXBl","IGRlYWx0","IHNhdmVz","IGdsb3J5","IG1lam9y","IGJyZWF0aGluZw==","IGVsbGVy","Z2V0RGF0YQ==","IGFuZ2xlcw==","IHRvb2xiYXI=","IHNwYWNpbmc=","MDU5","SVBT","IGZsb29ycw==","X0FDVElWRQ==","IHNodWZmbGU=","L3NoYXJlZA==","IEVsZQ==","ZWRpc2g=","IHdlYmNhbQ==","LmV4cGVjdA==","aWxvYw==","IEluY2x1ZGVz","IHR3ZWV0ZWQ=","IDop","IEVzc2F5","Rml4","LWJldHdlZW4=","X3dlYg==","LmNvbnY=","IHJhY2lzbQ==","IHJlZmxlY3Rz","dW1t","0LjRgtC1","X2Zvb3Rlcg==","L2RvY3M=","IFBvdXI=","TmdNb2R1bGU=","LmluaXRpYWxpemU=","cGF0dGVybnM=","X0lu","IEFiYg==","Kg0K","IHNlbnRpbWVudA==","YnVmZg==","X2NvdW50cw==","IHJldXNl","Y2h1bms=","IGltcG9zZWQ=","UHJpbWFyeUtleQ==","Rm9yZWdyb3VuZA==","IGNvbnN1bWVk","PyE=","IGRpY2s=","IGNocm9u","IEZlcm4=","IHJlc3BvbnNpdmU=","OTU4","IGluc2VjdA==","aWN1bHR5","IHJ3","IGFsaWtl","IHN1YnNldA==","IENvb2tpZXM=","IFBhaXI=","IHRpZXI=","SUZP","YXZvdXI=","IFFV","LHNpemVvZg==","IG1lcmdlZA==","bXY=","aXRvbA==","eWxvbg==","IGp1bXBlZA==","LnJvbGU=","ZW5zYWpl","UnVsZXM=","IGJyb3dzZQ==","QW5pbWF0b3I=","IHlvZ2E=","IHZhcmlhbnRz","IGNvdXJ0ZXN5","dXJhbg==","cGJz","ZWxzZWlm","QWx0","IExhbmU=","Q0xL","SU1BUlk=","X1BST1BFUlRZ","77yQ","IGNoYW4=","IGdyYWR1YWxseQ==","IHNoYWtl","IGJsb25kZQ==","Li4uIik7Cg==","LXNleA==","IGdhbWVwbGF5","YWNpZXM=","LnJlZnJlc2g=","VVNC","IFBsb3Q=","V2Fz","aXNzaXBwaQ==","IFRlbnNvcg==","IGNyeXB0b2N1cnJlbmN5","IGRpZmZpY3VsdGllcw==","RGVsZXRlZA==","V2l0aG91dA==","X2FwcGVuZA==","X3Zlcg==","OTY3","IikpDQo=","IGhvbmVzdGx5","IHBpdm90","IHRlbXBz","X3Bz","IFVubGlrZQ==","Wzot","VlM=","X2luZg==","IGp1bmlvcg==","IGFuaW1hdGlvbnM=","IGZpbGVwYXRo","Pzwv","W1w=","IG9wZXJhdGVz","X3JlZA==","IEJvb3RzdHJhcA==","bGVhZA==","ZWZmZWN0","wr0=","IFN0ZXI=","IEJ1Y2s=","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg","IGRlcHV0eQ==","VGhhbg==","4bq/","T05FTlQ=","IEhlYXQ=","ZXRoZWxlc3M=","XSl7Cg==","IGtvc3Rlbmxvcw==","KCk7Ly8=","IGRlcGxveWVk","Pnt7JA==","IHVuaWNvZGU=","cGxhY2Vz","IENvZmZlZQ==","LlNF","IFBBUg==","KHR4dA==","Z2VicmE=","IGZpcmVz","TWFpbldpbmRvdw==","bWVkaXVt","ICjigJw=","IGxn","IGNtcA==","L2Jhc2U=","X2xheWVycw==","X2VudHJpZXM=","IGFkbWluaXN0ZXI=","IFNVQ0g=","QlA=","IFNjb3R0aXNo","CQ0KCQ0K","Z3VhcmQ=","IFN0cm9uZw==","SW5zbg==","IENBUA==","YXN1cnk=","IFNFRQ==","Q2xvY2s=","ZXJpZQ==","XG1vZGVscw==","ICQk","IENhYg==","IHd1cmRl","IHNvbGRpZXI=","IGNsaXBz","IGFycmFuZ2VtZW50","IFdvbmRlcg==","IEhvcm4=","IHNjYXJlZA==","IGN1cmU=","bWtkaXI=","IGFsaWduZWQ=","IFBpbms=","IGxhbmRlZA==","RGltZW5zaW9u","U2Nyb2xsUGFuZQ==","LmNoYXQ=","LldpdGg=","IFRyYWlu","XS4K","IHRoaXJ0eQ==","IGR1cmFibGU=","IGxk","IGxhdGVpbml0","IGNoYXJ0cw==","IGluc3VsdA==","LkZhdGFs","X2N0","IG1hc2tz","Q0xVREVE","UHJlc2lkZW50","IGNvbG91cnM=","Z21lbnRz","LmF0dHJpYnV0ZXM=","IEZsZXg=","IENsb2Nr","w61jdWw=","aW1lbg==","Sk8=","IFJlZ2V4","X0xJTks=","IGNvdWNo","IElOUFVU","IGJlYXRpbmc=","YnVzaW5lc3M=","cHJlY2Vk","LnVuaXQ=","IEZlbA==","TmV2ZXI=","b3NwZWw=","LnN0YXJ0c3dpdGg=","IEVQQQ==","Lm9ubHk=","IHByZXZlbnRpbmc=","eWVy","Q29sdW1uTmFtZQ==","IGVsZXZhdGlvbg==","Zmx1","aWN5Y2xl","IG9mZmxpbmU=","VG9vbGJhcg==","IGNvbXBldGluZw==","KV0u","IG1vZw==","IGlzVmFsaWQ=","QXNr","X2F2","X2xhdA==","QU5D","IEpvaA==","a2Vycw==","IGd1YXJkcw==","IGNoYWlucw==","IFNpbXBsZURhdGVGb3JtYXQ=","LnN0YXRpYw==","IHZlc3NlbA==","IG11ZA==","IHN0YWJpbA==","IHN0cmV0","Z20=","YW1hdGlvbg==","55w=","LXdpdGg=","IHJvcw==","X1BB","IHJlc3VsdGFkbw==","IGNvbmZpZGVudGlhbA==","IFRva3lv","CXVzaW5n","IE1hdGhm","b21iaW5l","IEVTUE4=","IGRlYWxlcnM=","IGRpc21pc3NlZA==","VFJZ","IHRlZW5z","cmVjb3Jkcw==","IHdpbmdz","Z2FsbGVyeQ==","YWNjb3VudHM=","X0xJQg==","IGphY2tldA==","IE5TT2JqZWN0","IHN0b25lcw==","IERlbGl2ZXJ5","IERpZXQ=","L3dhdGNo","IHRvaWxldA==","IEd1ZXN0","LmRheQ==","MDY3","IGludHZhbA==","MDg3","VmlzaXQ=","IGludmVzdGlnYXRlZA==","IHBlbnRydQ==","IFRoZWF0cmU=","YW5kaWRhdGVz","TGFuZw==","IFNlcnY=","IGNvbnRyb2xsZXJz","IHNldFRpdGxl","TlA=","YW15","ZmxhdA==","KHVp","MDY5","X2RvY3VtZW50","6IO9","IENvaW4=","IEFkYW1z","cHRpYw==","IHByb2R1Y3RpdmU=","IGFjY29tcGxpc2hlZA==","DQoNCg0KDQo=","IGRlZmVycmVk","aWVudGVz","IHNpbmM=","b2xhcnM=","UmlnaHRhcnJvdw==","IHZhcmlhdGlvbnM=","KG9mZnNldA==","OTU3","LkxheW91dEluZmxhdGVy","IHN1c3BlbmQ=","IHByZXZlbnRpb24=","X3ByaXZhdGU=","X2pz","4piF","IHdpZWRlcg==","YXR1bQ==","kow=","IGFwcGVhcmFuY2Vz","LkRvY3VtZW50","IHZhbGlkYXRlcw==","Y2FsZW5kYXI=","fSI7Cg==","LmRlbW8=","Y29udXQ=","IGNvcnJlY3Rpb24=","IERlYWw=","IGJhdHRlcmllcw==","LmR1cmF0aW9u","LFw=","X21hcmtlcg==","bXVsdGk=","IGhhbHQ=","IGNtcw==","IHNoYXBlZA==","QnJv","cmVkdWNl","ICMjIyM=","Q1RPUg==","IEJlbmVm","IGljb25pYw==","IHBpYW5v","IGVmZmVjdGl2ZW5lc3M=","fC4K","IGFqYXg=","IHZvbHVtZXM=","4Lih","IGNsanM=","ICAgICAgICAgICAgICAK","YXRocw==","cmFpdHM=","5aSn","0ZY=","X211bHQ=","IGZhc2NpbmF0aW5n","QXZlcmFnZQ==","IHByw6k=","IENoYWlybWFu","LmZpbmRFbGVtZW50","X3Bpbg==","IGNvbXBhcmluZw==","IGRhcmtuZXNz","LUZp","LXNlcnZlcg==","IHNlbGVjdGluZw==","c3RlcmRhbQ==","IFBhcnRz","Rk9STUFUSU9O","IG5vdGluZw==","IHBpbGU=","b2dz","IHBhbGV0dGU=","X2Rv","aXRpemU=","MDc5","KCko","IGRlZmluaW5n","IHJlbWFpbmRlcg==","VW5pdHM=","X1RBU0s=","SHR0cENsaWVudA==","U29jaWFs","IGZ1bmRyYQ==","TlI=","Y2hlc3Q=","Q3VycmVuY3k=","LmFkYXB0ZXI=","IGRvcA==","dW50aW5n","QU5HVUFHRQ==","Ikhl","CWluZGV4","X3BhY2thZ2U=","Lkljb24=","IHJlcGV0","bWFzcw==","PSIuJA==","IFN1ZA==","IGxpZA==","cHJvdmluY2U=","7Jw=","R1BJTw==","0Jo=","IE15U1FM","IGRvY3M=","IEdB","IGlwc3Vt","S2VybmVs","IGFjY2VwdHM=","IGZpdHRpbmc=","IGN1YW5kbw==","IGR1cGxpYw==","IEJyb3RoZXI=","IEtsZQ==","bnVtcw==","IG1vcnBo","ICMjIyMjIyMj","IENHUG9pbnQ=","PHVuc2lnbmVk","5L6L","IER1a2U=","LnNldEJvdW5kcw==","cXM=","b3JpYw==","amVy","IHJlZ2FyZGVk","SHR0cFJlcXVlc3Q=","IGJvbmRz","IHRob3JvdWdobHk=","ZW5jZW50","IGhpZ2hsaWdodGVk","IGFjcmVz","IHdvcmtwbGFjZQ==","IEx1eA==","IHF1b3Q=","OTg2","LmluZmxhdGU=","IGRvY3VtZW50ZWQ=","IGFkZGljdGlvbg==","IG11dGF0aW9u","LmNpdHk=","IGJvdHRsZXM=","IFJlcG9zaXRvcnk=","b25u","ZXJybm8=","QVJJQUJMRQ==","5bqm","X0JFR0lO","Z2xhcw==","J30pCg==","IE1hc3NhZ2U=","IFdoaXQ=","cmVnZXg=","V0E=","IG91dGxldA==","LWhlYWQ=","IGV4cGlyZWQ=","IFRoYWk=","L2luY2x1ZGU=","Z3JhZGllbnQ=","c2NhbmY=","IHNlYW0=","d2Fs","CWJ1Zg==","QmVhcmVy","IHByZWNpb3Vz","aWZhY3Rz","Y29vcmQ=","IGV4cGxvcmF0aW9u","LmdldFk=","KGhhbmRsZQ==","VG9waWM=","IFZlbnQ=","cmhz","LS0tLS0tCg==","IEJyaWdodA==","IGd1aWxk","bW90aGVy","c3Rvcm0=","IG11bmljaXBhbA==","IGluaw==","LlRZUEU=","d2w=","Li4uPC8=","X0RFVg==","PSIuLw==","X2Jvb2s=","dGh5","aXR6ZXJsYW5k","b3BsZXM=","dHJhY3Rpb24=","IENhbWVyb24=","IEFuZHJl","LnJlc3VsdHM=","IGNocm9tZQ==","IHNlY3VyZWQ=","IHN1cmZhY2Vz","KTw=","IHRvYmFjY28=","CXNwcmludGY=","IGVzY2Fs","IHN0ZGVycg==","IE1lbGJvdXJuZQ==","IGRpc3RyaWN0cw==","IG1hdHQ=","b2hlbg==","IGRhdGFHcmlkVmlld0NlbGxTdHlsZQ==","KE1vZGVs","IHNlbnNpdGl2aXR5","S0E=","dHJhbnNwb3J0","LmdldERhdGU=","IHN1YnRsZQ==","VUdJTg==","Lm1vdXNl","IGFsdGVybmF0aXZlcw==","IGVsbGU=","Y29yYXRpb24=","cmVhdGlvbg==","5ps=","X05PUk1BTA==","RGlzcGxheU5hbWU=","IGZhbmN5","SVNFRA==","TU9E","LlJlYWRPbmx5","IFVi","IEN1","aWNvbA==","IE5lbHNvbg==","IENPUg==","YW56YQ==","IFNwYXJr","ICJcXA==","LS0KCg==","d29vY29tbWVyY2U=","IHJlbWVtYmVyZWQ=","dmVyaXR5","IEV4dGVuc2lvbg==","IFBE","IHNlYXJjaGVz","LnNv","IEZvb3Rlcg==","ID0n","IFdBUk5JTkc=","LWxv","CXRhYmxl","IGRyYXdlcg==","cGljdHVyZQ==","IEZhbnRhc3k=","c3Rvcnk=","IG3Dqm1l","IwoK","X3NsaWNl","b2x0YWdl","SGFy","L3k=","IEVS","ZGll","IFBPUw==","LmFjdGlvbnM=","KE1haW4=","ZXdhcnQ=","YXBldXQ=","IFNURQ==","aWRkaW5n","LnJlYWRMaW5l","IHNlYXJjaGVk","V2Vk","LmZpZ3VyZQ==","dWdodGVycw==","KCkuX18=","IG9yYml0","c2hpcHBpbmc=","IGZyaWVuZHNoaXA=","IFNoaWZ0","LW9y","cXVv","V0hFUkU=","IEVzcA==","LmZvcndhcmQ=","b2ZmaWNl","IGnDpw==","IENoZWxzZWE=","SXRlbVNlbGVjdGVk","YWNoZXJz","ZGVsZXRlZA==","cm91cw==","ICItIg==","IEdyYW4=","IPCfmA==","LXBvd2Vy","ZXR0YQ==","IHJlbWluZGVy","ZW5zb3Jz","IEFsbG93","xJlk","X3RlYW0=","IGNyb3du","dGlja2V0","IGNvbGxlY3Rpb25WaWV3","bGFjZQ==","IGZpeGVz","IEh1Yg==","Y2F0YWxvZw==","IElkZW50aXR5","IGV4Y2Vzc2l2ZQ==","IE5hdmlnYXRvcg==","X0JS","LXBsYXk=","IENhbXBhaWdu","ICAgICAgICAgICAgICAgCg==","YXNpdmU=","IHdj","IEJlaWppbmc=","L3d3dw==","IG1ha2V1cA==","IGRpc3RhbmNlcw==","IHNhdGlzZnk=","Q09ORA==","IHdvdW5k","KCld","IHZpb2xhdGlvbnM=","IHN0YXlz","LyM=","aWxpbmU=","XEV4Y2VwdGlvbg==","IE1vdGlvbg==","IGhlYWw=","X3BsYW4=","cmFzZXM=","KG1haW4=","QXBwbGU=","IGNvbXBsZXRpbmc=","IGRldGVybWluZXM=","U2Nhbg==","IHN0ZWFs","IFNvYw==","QW5hbHlzaXM=","IGZhdm9yaXRlcw==","IGNhbXBv","b25lcg==","IEZsaWdodA==","Li4uCgoKCg==","KSkpKSk7Cg==","LWNvdW50","IHB3","QXNTdHJpbmc=","IHNleHVhbGx5","Rmlyc3ROYW1l","IEVzY29ydA==","Y2FsYw==","IFdpa2lwZWRpYQ==","IGRvY2tlcg==","IFN3ZWV0","J2lk","SW50bw==","IEh1bnQ=","LmVxdWFsVG8=","IGxhYm9yYXRvcnk=","IEJVU0lORVNT","RmlsZURpYWxvZw==","VHJlZU5vZGU=","LkVuYw==","IE1heGltdW0=","IG1vdGhlcnM=","5rU=","IGZyYWN0","LnN0YXJ0c1dpdGg=","IGhhcmRjb3Jl","Lm9i","5aeL","ID48Lw==","X3Jv","KCgq","Pz8/Pw==","X3ZlcnRleA==","a2VpdA==","IEhhbGxvd2Vlbg==","VEk=","IFZh","X2Nhcg==","PSJ7eyQ=","IHJhbmRvbWx5","0LDQvdC40LU=","IHNob2NrZWQ=","IFBva8OpbW9u","c2lnbmFs","IFNESw==","bWlkZGxld2FyZQ==","IHRyZWF0aW5n","IGJ1cm5lZA==","RGVwYXJ0bWVudA==","IFNwZWN0","IGNsaWVudGU=","IFJlZGRpdA==","X2F2Zw==","IGluc3RhbGxpbmc=","X2FscGhh","LGRhdGE=","IHNldElk","IExpc3RWaWV3","KHByb3BlcnR5","IGNyb3NzaW5n","IE9iag==","IFdhcmQ=","IFJlZGlyZWN0VG8=","IFByZXNlbnQ=","IGRyYXdz","Y2hlZHVsZWQ=","IGxlZ2lzbGF0aXZl","IHR3aXN0","IFN0cmE=","IEFGUA==","IENoYXA=","LXBy","OkNHUmVjdA==","IGNlcw==","Um91dGVz","bm9m","IHZpc2E=","IFRDUA==","IEVWRU4=","aXZpYWw=","IExldHRlcg==","UkFZ","IGltcGxvZGU=","LmVx","PScr","IG1vdGl2YXRlZA==","LnZpc2libGU=","LnNob3J0","Pm1hbnVhbA==","IFRlY2huaWNhbA==","IGNvcnBvcmF0aW9u","IEhX","YW5rYQ==","VEFJTA==","aXN0YXM=","IHBlcmZvcm1z","IEJlaGF2aW9y","LkZvcg==","X09SREVS","IEtpY2s=","IGNhbGxiYWNrcw==","X2Ry","dWVnbw==","aHVi","dWZmaWNpZW50","c2t5","IGJw","aHRhYmxl","IE9OTFk=","IEFVVEhPUlM=","LkFyZ3VtZW50","In07Cg==","IFRodW5kZXI=","IEtvbQ==","LlNob3VsZA==","QVVUSA==","YWh1","X3BheW1lbnQ=","IHN0YXJ0ZXI=","7ISc","7Jqp","QmxvZw==","LnBhdGNo","IGdvdmVybmVk","YXNzeQ==","LWZvdW5k","IHRoZWF0ZXI=","IEZvbnRXZWlnaHQ=","IEJhdG1hbg==","Iklm","LlJhbmRvbQ==","X2RlbHRh","IENF","QXV0aGVudGljYXRlZA==","IGRyb25l","IGNvdXM=","cmFkaXVz","TWVy","KE5vbmU=","IE5K","X2hlYWRlcnM=","IGFtZXI=","cHl0ZXN0","IEFjdGlvbnM=","CQkJICAgIA==","IGV0dA==","IGhvbHk=","IHVuY29tZm9ydA==","IE5pbg==","IERlY2ltYWw=","IE1lc3NhZ2Vz","LnNlbmRlcg==","XV0pCg==","IGVtYnJhY2U=","VGhvdWdo","L3Nw","IGN1bHR1cmVz","IGhpZ2h3YXk=","dGFy","LmZhaWw=","X2hpZGRlbg==","IGNvbXBvbmVudERpZE1vdW50","IFdyaWdodA==","IGphZw==","X2ls","Li4vLi4vLi4v","aWd1","Rm9vZA==","IGFjZQ==","IGHDsW9z","VVNE","IG11dHVhbA==","TG9naWM=","IHRlbXBsZQ==","IGJyaWVmbHk=","IFRyaXA=","Y2xhc3NtZXRob2Q=","ZGVmYXVsdHM=","IGNodW5rcw==","LCwsLA==","IFJlYXNvbg==","JGlk","LXVwcw==","IGRhbW4=","IHRydWNrcw==","IHVubGltaXRlZA==","IHNjdWxwdA==","IENhcmRz","IGF1dG9y","IFRlc3Rpbmc=","IGRpZXNl","c2hvcHM=","57Q=","KHBheWxvYWQ=","IFBBVEg=","IE1lbW9yaWFs","IHJpZGljdWxvdXM=","ZWdyZWU=","LXdpbm5pbmc=","IHJlaGFi","IHNvcGhpc3RpY2F0ZWQ=","d3BkYg==","CXBhdGg=","ISI7Cg==","X1NZUw==","LnNwZWVk","IHNvYXA=","c3VmZml4","V3JhcA==","IGVuaGFuY2VtZW50","w4k=","w7pi","IHBsYXlsaXN0","IG1peGluZw==","YW50aWRhZA==","PSIiOwo=","IFJldmlzaW9u","IEJlYXQ=","LmluYw==","LXdheQ==","ZW5jaWFz","dWxlcnM=","Q2F0","aWRlbA==","IFNoaXA=","LnNldENvbG9y","IHRocmVhdGVuaW5n","Lm1vZHVsZXM=","IGFmdGVyd2FyZHM=","IERhc2hib2FyZA==","CiAK","U2lnbmFs","IHByaW1lcg==","b3JuZXlz","aWNpYXJ5","IGxpZ25l","X3ByZWRpY3Q=","IGFlc3Q=","X2h0dHBz","Pjo=","IExleA==","IHJlbmNvbnRyZXM=","ZWdyYWw=","c2NhbGE=","X2ZhbWlseQ==","w59lbg==","X3N5bQ==","IHVuY2VydGFpbnR5","IFZBTFVF","IH07DQoNCg==","IGJyb2FkZXI=","IGhvcnNlcw==","44Gd","IEthbA==","b2Jh","X0lORVQ=","IEtpbGw=","anF1ZXJ5","YW1pbmF0aW9u","W0Ai","IG11ag==","IyMjCg==","Rmlyc3RPckRlZmF1bHQ=","dGhlblJldHVybg==","Q2hl","L2Zvb3Rlcg==","IHBhcmtz","YXNqZQ==","IEd1bGY=","IG1vZGVzdA==","LkluaXQ=","77yfCgo=","IHByb3NwZWN0cw==","IHN2Zw==","IOWP","LkRpYWxvZw==","X05FVA==","ICgoJA==","IGVr","IFdhcm5pbmc=","IE1L","PExN","ICcNCg==","aWVt","aGV0aWM=","IGl4","dGhpbms=","LXNoYWRvdw==","IEVsZA==","IE5ldmFkYQ==","IExlYWY=","IEdST1VQ","IHByb21v","ZW50aW5l","CU1hcA==","IE1vZGVscw==","IEtyaXN0","X2tlcm5lbA==","LW1hZGU=","IGNlcnI=","QXNzZXRz","ZWxsYXI=","IGludm9rZWQ=","LnZ1ZQ==","IGN1bHRpdg==","Q2xvc2Vk","IGdlbmVyYXRlcw==","ZmZmZmZm","dGhlc2l6ZQ==","c3FydA==","IENhc3RsZQ==","LmNhcg==","IGtlZW4=","dW5kYQ==","IENyb3c=","IFNpbmdo","eXRob24=","IGJlYW5z","bGFyZw==","5paH5Lu2","QXdlc29tZQ==","dW5jYXRl","UGF0aHM=","b2pp","KGN1cnI=","Q09ORFM=","IG1pbQ==","IHNob3VsZGVycw==","SGFyZA==","YXN0ZXM=","0LDQtdGC","IGNvbnZpbmNl","ZGVjZXNz","bWFkZQ==","IENNRA==","Lklt","IGNoYW9z","ZW5zaXZlbHk=","IGNvb2xpbmc=","IGJ1cmllZA==","KCdA","X1Nl","CQkJCQkJCQkJCQkJCQkJCQ==","LmNvbXBhbnk=","LnN1Ym1pdA==","cGhhbnQ=","IGJvb3RzdHJhcA==","X2hlbHA=","4Kc=","LmR1bXA=","IGRpZmVy","X21hcHBpbmc=","IGNpcmN1bGFy","IGVzY29ydHM=","IGJlcmU=","IGdyYWR1","IExlZ2VuZA==","aW1lZGlh","IEJhcmNlbG9uYQ==","IGJlZHM=","5Yiw","44CK","X3ZvbHVtZQ==","IHRyZW1lbmRvdXM=","IHNjYWxpbmc=","IHBpbnM=","ZW5hcw==","dHlwZXBhcmFt","RGFzaGJvYXJk","cmVuZGVyZXI=","IHNwaQ==","ICYk","IFNraW4=","YWxtYXJ0","IGhvY2tleQ==","ICciLiQ=","IGVycm5v","IGJldw==","Rm9sbG93aW5n","Lk1vZHVsZQ==","ZXJhYmxl","IE1pbGl0YXJ5","IFJpbw==","X2F2YWlsYWJsZQ==","IFN1cmZhY2U=","IHN0YWI=","SUZJRVI=","IExJU1Q=","IGRhc2hib2FyZA==","IGNsdXN0ZXJz","LnBsdWdpbg==","IGpvdQ==","IERlY29y","Rm91cg==","IGRlbGxl","KioqKioqLwo=","aWF6","aW5kZQ==","Y2hpbmc=","IGdldEl0ZW0=","LkFkZHJlc3M=","bWVudGVk","QW1lcmlj","UGxhaW4=","IHVzYg==","IFByYWN0aWNl","X21lbnQ=","LmJsdWU=","SGludA==","0YDQsNCy","IGNvbm5lY3Rvcg==","IGluaGVyaXRlZA==","0LjQsg==","IGludGVydmFscw==","IGNlcmU=","IHVk","IGluY29u","LkV4aXN0cw==","IE1pYw==","Rks=","KGNhcmQ=","LlNldHRpbmdz","IGV4aGliaXRpb24=","IG9uUHJlc3NlZA==","IHJlc3RvcmVk","ZW5ndQ==","LmRlZg==","IHJlY3Y=","LiIpOw0K","ZW5jb2Rlcg==","YXRoZXJpbmU=","KGRlc3Q=","YXplZA==","I2VuZHJlZ2lvbg==","c2VtYmw=","LE0=","b2J5","INC/0LXRgA==","LkNhbGw=","IGF0dGVuZGFuY2U=","LWJvcmRlcg==","IGFkZHJlc3Npbmc=","w6pu","IExldg==","IGJhc2g=","YmVuY2g=","Q3JlZGVudGlhbHM=","U3BhY2luZw==","KG9m","X1JFU0VU","aWd1b3Vz","IGNydWVs","IGNyb3NzZWQ=","IGxldXI=","IEdvbGY=","b3JyZWN0","IHBhY2tldHM=","IERhdGFTZXQ=","IHBhcnRseQ==","U0VRVUVOVElBTA==","IGluZGljYXRpb24=","IFNhbHQ=","YWNpYQ==","ICopOwo=","CWluZm8=","IFZpZXdCYWc=","b256","IGVkaXRvcmlhbA==","IEFyZW5h","IHNpcg==","X1N0YXRpYw==","KHNvY2tldA==","c3U=","Y2hvb3Nl","Lm1vbnRo","Lk15","MDk2","w6lyaQ==","O2ZvbnQ=","ZG9lcw==","IGNvbnZlcnRlcg==","IHNhbHY=","IGxy","IGluZmx1ZW5jZWQ=","KGZlYXR1cmU=","IFF1ZWVucw==","bGV0dA==","X01PTg==","JmFtcA==","VG91Y2hhYmxlT3BhY2l0eQ==","T0ZG","IG1ldGFib2w=","KGl0ZXI=","IHZpdGFtaW4=","IElORElSRUNU","YXV0b20=","X3B1YmxpYw==","IGFkanVzdG1lbnQ=","IHNwZWNpYWxpemVk","d2luZG93cw==","LmFkZEFsbA==","IGFjY29yZGluZ2x5","IEpPcHRpb25QYW5l","IGNlbGxzcGFjaW5n","IHF1YWQ=","IGNyZWVw","IG91dGxldHM=","fWApCg==","IHByaWVzdA==","X1RIUkVBRA==","IE1hcng=","IEJ5VmFs","IGN1YWw=","6Z2i","IHRlbXBvcmFyaWx5","QW5u","a2VsZXRvbg==","5aU=","IExPQw==","YXVlcg==","ZGVyaXZl","IGJlaGF2aW9ycw==","YXNlbmFtZQ==","IENlbnR1cnk=","IGhvcnJpYmxl","TUVTUw==","X0xpc3Q=","d2Vp","UGF0","IENob2ljZQ==","X0ZST00=","CWxpbmU=","Lmludm9rZQ==","LkJvdHRvbQ==","IG5vd2hlcmU=","LiIKCgoK","X2V4cG9ydA==","IHN0cnVnZ2xlZA==","LkFwcGVhcmFuY2U=","IEpCdXR0b24=","IEplcmVteQ==","KFtb","IGtpY2tlZA==","bWFyc2hhbA==","c3RhZmY=","ZXNpdHk=","IHF1aXo=","X2VmZmVjdA==","IH0pKTsKCg==","bWVs","YmFubmVy","IFBJTg==","IGludmVudGlvbg==","IGNvbnNvbGlk","IG9wcw==","IEJldHdlZW4=","amFjaw==","ZXJuYXRpb25hbA==","IHNhY3JpZmljZQ==","YWdhdGlvbg==","IEpveQ==","IGFtZW5kbWVudA==","IFNvbGQ=","IHByaXNvbmVycw==","0LDQvdC90Ys=","RG9jdW1lbnRz","KV0pCg==","dXN0ZWQ=","IExpbmVhckxheW91dA==","b3Nv","X0VN","LnNlbGY=","Lk1pZGRsZQ==","KS8v","IFwn","IGZ1Y2tlZA==","IE11cnJheQ==","IHByb2ZvdW5k","X0VMRU1FTlQ=","dWx0YQ==","aWxlcnM=","cG9ydGZvbGlv","SnVuZQ==","dGNw","bW9kaWZpZWQ=","IFRyYWNl","IEtlbA==","YWx5emVy","KT0+","IFJlcGFpcg==","X0JF","QnJhbmQ=","dWFydA==","cHJldmlldw==","IGluaXRpYXRpdmVz","cnVubmluZw==","YmFuZw==","CXVwZGF0ZQ==","IENvYWNo","UmljaA==","IHlvdXR1YmU=","IHJpdHVhbA==","YXBwYQ==","IFJvYmluc29u","cHJlY2lzaW9u","Ly8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLw==","PVtdCg==","IGNlbGVicmF0ZWQ=","T1RP","IGluY2x1c2lvbg==","SlA=","JzsNCg0K","IG5vdGFibGU=","KF8u","TWFuYWdlZA==","IGd1aWRlcw==","Jm5ic3A=","YXRlZFJvdXRl","IEFkanVzdA==","IGNvbG9yZWQ=","X3Njb3Jlcw==","IFRlc2xh","X3Byb2dyZXNz","Lmluc3Q=","Wydf","LmZsYWdz","IGZjbG9zZQ==","X09QRVI=","xbx5","X25vdGU=","IHRyYW5zZ2VuZGVy","5ZU=","UklQVA==","IGFic2VudA==","IGFtZXQ=","IG9wZXJhbmQ=","66k=","IGhvb2Q=","dG9Mb3dlckNhc2U=","YXZv","IENpcmN1aXQ=","IExpbmQ=","LS19fQo=","PW0=","IHN1cHByZXNz","IE1BUA==","aWFuZw==","LWFkbWlu","IHNpZGViYXI=","IEJ1","IEhleA==","LEY=","IFNpZ25hbA==","IHRyYW5zcGFyZW5jeQ==","IEZlZGVyYXRpb24=","L1Y=","UmVx","IHB1bHNl","IHRlbmRz","TnVtYmVycw==","JSc=","IGRlcG9ydA==","ZGF0YXM=","X1VJTlQ=","X3RyYQ==","b2tv","ICI/","Y29tcGV0","c29sZXRl","dW5kcnk=","IG92ZXJsYXA=","fWAsCg==","Lmx5","X3N1bW1hcnk=","IExvc3Q=","LkNlbnRlcg==","IGRpc2FiaWxpdHk=","LlNlcmlhbGl6YXRpb24=","IGdlb20=","ID86","IFdv","IHNoaXBwZWQ=","guaVsA==","IHVnbHk=","IGV4Y2l0ZW1lbnQ=","IGV4dGVyaW9y","IGNoZWNrb3V0","IGt1cg==","LEQ=","IEFsYXNrYQ==","IHN5bnRoZXRpYw==","IEJ1ZGdldA==","IFN1YnNjcmliZQ==","ICYK","yJlp","IFl1","CXF1ZXJ5","fS4K","IHRyYWdlZA==","YXNzZW4=","IGFjY29tbW9kYXRpb24=","IHBoeXNpY2lhbg==","IHJlbmFtZWQ=","IHRpZGFr","esSF","IG1pbnVz","bnljaA==","MDk3","X0VYQ0VQVElPTg==","dGhyZWFkcw==","IHRpcmU=","X2NyZWF0ZWQ=","ZW5zdXJl","IHdvcnRoeQ==","IGV4Y3VzZQ==","IGNsb3Ro","LnBhcmVudE5vZGU=","L3BsYXRmb3Jt","IFVGQw==","IEd0aw==","dW5ueQ==","IGdpYnQ=","a2VsZXk=","aHVt","KHR4","CWRldg==","IG91dGZpdA==","ZG9vcnM=","IGZvbg==","aWN1dA==","dm9sYXRpbGU=","IGhvbW9zZXg=","TWF4aW11bQ==","IGV4cGVuZA==","IH0pOwoKCg==","RXE=","b25kZXJz","ZGVwYXJ0bWVudA==","IFBoeXNpY3M=","In0pOwo=","IHBhcmFk","LlN0cg==","IHNlbGU=","SUZJRUQ=","IGRlbGl2ZXJz","aXZhbg==","IHJlc3BvbnNpYmlsaXRpZXM=","IGFkdm9jYXRlcw==","6LU=","IFJJRA==","LnBhcmFtZXRlcnM=","TWV0cmljcw==","cm9uaWNz","IFVJVGFibGVWaWV3Q2VsbA==","QWJzb2x1dGU=","aXBzZQ==","eWx1bQ==","TUxFbGVtZW50","X1ZBTElE","PHRpdGxl","RGxn","cGFjZXM=","IHN5bmRyb21l","YmVhbnM=","X2RhdGFiYXNl","b3ppbGxh","IE1lZw==","REJH","IGx1Yg==","QmFnQ29uc3RyYWludHM=","YWJhZA==","IHByb2plY3RlZA==","X0JZVEU=","LlNpemVG","c3RyZWV0","CgoKCgoKCgoKCg==","IExPU1M=","IGRpcmVjdG9ycw==","L25ld3M=","IG51cnNpbmc=","IERvbmU=","LkhUVFA=","ZGlzY291bnQ=","IFJvdA==","VG9NYW55","IGVuYWJsaW5n","IGF1c3Np","b3N0YQ==","ICAgICAgICAgICAgICAgIA0K","6L29","IGhlbGljb3B0","IEluc2lkZQ==","5L+h5oGv","aXNwZXI=","IEFsbGFo","QVJDSEFS","IHJvbGxz","Q29tcGFyZQ==","WFA=","SW5kZXhPZg==","U1VN","IGFzc3VyZWQ=","IFBoeXNpY2Fs","RW5kcG9pbnQ=","Lkdsb2JhbA==","LmRldGFpbA==","IHRoZWZ0","Lmp1cGl0ZXI=","IGh1bW9y","LlJlbmRlcg==","QWxleA==","LmNhcA==","IGJ1ZmZlcnM=","IGRpc3Bvc2U=","dGlvbg==","LnByZXNlbnQ=","emVs","LFA=","IGRlc3BlcmF0ZQ==","LmdldENvbHVtbg==","IHR3aW4=","7JY=","LmNhbg==","IGZsZWU=","IElyYW5pYW4=","IHN0aWNreQ==","IFVUQw==","TFQ=","Ly8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8v","IGxpY2Vuc2luZw==","X1BPSU5U","IE1hcHM=","IGxvbA==","PW1vZGVscw==","LXRhYg==","IE5hc2g=","X2xvZ2dlcg==","dG9yY2g=","IENPTlNFUVVFTlRJQUw=","Tm90RW1wdHk=","L3JlYWN0","IHBm","IGFzc2VydGlvbg==","IHN1YnNlcXVlbnRseQ==","X2Nhbg==","IHBhbmRlbWlj","b2d1ZQ==","IisK","X2VudA==","X1BhcmFt","LgoKCgoKCgoK","UmVzZWFyY2g=","Q2FwdHVyZQ==","IGJlbG92ZWQ=","ZGVt","IGV4dHJhY3RlZA==","IGZpZ2h0cw==","RVJD","KGF1dGg=","cG9zaXRpb25z","IHJldmVyc2Vk","KHN0YWNr","IF8p","dXRvZmY=","X2Zsb3c=","54K5","KEdhbWU=","IGV4Y2x1ZGVk","IENTVg==","Y2c=","IFRpdGFu","cGF1c2U=","IGNlcmNh","IGR1bXBzdGVy","TGVzcw==","IGtvdGxpbng=","YXN0ZXJ4bWw=","IHBvaW50ZXJz","IGZsb3dz","IFR1bg==","IE1haW5BY3Rpdml0eQ==","IGRpc2NyZXQ=","IGNvbWJpbmF0aW9ucw==","dmlzaXQ=","X2JpbmQ=","b290aW5n","ZGF0ZXI=","X2xvb2t1cA==","Lm5pbw==","IHN3ZWF0","IFJk","IHNjaWVudGlzdA==","IFBpeGVs","QE5nTW9kdWxl","UGxheWluZw==","IHVuZm9sZA==","VHJhbnNsYXRl","IExhd3JlbmNl","IEZJWE1F","QmlsbA==","IFJJR0hU","IHdoZXJldmVy","IG9vaw==","dmlkZW5jZQ==","IF1dOw==","IFNraWxs","dW5pc3Rk","IPCfmYI=","IGZlbWFsZXM=","LS0pCg==","jrflj5Y=","IEZyZWQ=","T3ZlcmFsbA==","2YI=","IGVzc2VuY2U=","IHRoZXJlYnk=","IHdvdW5kZWQ=","IERPV04=","bGVzc29u","dGV4dHVyZQ==","Um91bmQ=","IGF1dG9tYXRlZA==","INCh","IFVwZGF0ZXM=","IHNoYWRl","cHVibGlzaA==","IEdlYXI=","PWxhbWJkYQ==","IGxldmVy","KSsi","aGlsbA==","IHJhZGFy","cnlpbmc=","ICIpLg==","ZmlsbGVk","IGxpbmV1cA==","IGRs","IHdvcmtzcGFjZQ==","Vm8=","X2R0","67I=","X0l0ZW0=","TlNVUkw=","LnZlcmlmeQ==","IEhhd2FpaQ==","R29k","TWFyY2g=","IFvigKZd","IHBlbG8=","dXJpb3Vz","IFBpdHRzYnVyZ2g=","Lkl0","Q2xlYW4=","Plw8Xg==","IGlvcw==","c291bmQ=","Il07","IGZyZWVk","cm90dGxl","IExvd2Vy","W2NvdW50","5Z0=","IHBhbGU=","IFdheW5l","ZWFydGg=","X2NhdGVnb3JpZXM=","VUNL","Lm1ldGFkYXRh","IHN1bW1vbg==","SE9NRQ==","0L7Qu9GM0Lc=","IG1hbnVmYWN0dXJlZA==","IGRvY2s=","IGNvbXBldGl0b3Jz","X01PREVM","b2tpYQ==","IEhleQ==","zr8=","IGJhY2t3YXJk","IFBPU1M=","cm9wYQ==","IGNyaQ==","X09CSg==","VHJhbnNwb3J0","LWhpZ2g=","IGVyb3Rpaw==","X3Nsb3Q=","IGFydGlj","X2ZyYW1ld29yaw==","LXNlcmlm","IFNxbERiVHlwZQ==","Jyko","KyIv","IHdvcmU=","U2ls","IHN0b3Jpbmc=","IFBoYXNl","dWFudA==","IGJ1bXA=","aW5obw==","IGRpZ24=","IGJhY2tz","cXE=","KGhhc2g=","IGdlbw==","IHRlbmRlcg==","TG9nbw==","ISkK","IE1Y","IEFydGh1cg==","ZXNzb2E=","X0No","IGJlZHJvb21z","PSIjIj48","IHRocm9hdA==","aW5zaWM=","LmludGVnZXI=","IHByaW1pdGl2ZQ==","VHJ1dGh5","IGZhY2lsaXRhdGU=","IGNyZWF0aXZpdHk=","IEROUw==","IGdyYQ==","dWV6","IGNvdW50bGVzcw==","IFBvbGFuZA==","J00=","IERpc3Q=","IHZlc3Q=","IGNlcnRpZmljYXRpb24=","4buR","aGVsZA==","ZXh0ZW5zaW9ucw==","KHN0YXRpYw==","IGdyYWRlcw==","IFViZXI=","44Gf","IFtdKQo=","ZGF0b3M=","IGdldERhdGE=","IENoYXJn","IEJT","Lm1pY3Jvc29mdA==","LnZpZGVv","LmRpcmVjdGlvbg==","LT57Jw==","bHVh","YXBlc3Q=","IGJvaWxlcg==","ZXJlaw==","IGRlY2lkZXM=","Lmphcg==","SVND","IFdvcmRz","KENPTg==","RU1QTEFURQ==","cmVlemU=","c2hvdHM=","YXBwcw==","dW50ZWQ=","LnNldE5hbWU=","Ojo8","LWJvbGQ=","6rI=","5a+G","TG9uZ3JpZ2h0YXJyb3c=","IHVuZmFpcg==","IGVhcm5pbmc=","IHNoZWxm","VVJFTUVOVA==","IGlkbGU=","X01FTlU=","LkN1c3RvbQ==","QUdFUg==","LSI=","X3N3aXRjaA==","YmVjYXVzZQ==","KXZpZXc=","bWFyZQ==","X2NvbmRpdGlvbg==","IFN0YXJ0aW5n","TXZj","KHByZQ==","ZHVtcA==","X0xPQ0s=","YXRldGltZQ==","LmNhbGxiYWNr","IENlcg==","b3BvbA==","aWJyYXJ5","IHJlc2VydmF0aW9u","CQkJCQkJCQo=","bGVjdG9y","Z3JhZHVhdGU=","IGdlbmVyb3Vz","IGlvbg==","cmljYW8=","bXE=","X2NvbXBsZXRl","KGN1cnNvcg==","IEZvcm1Db250cm9s","OmNlbnRlcg==","IHN1YnN0aXR1dGU=","IFBsYW5uaW5n","IHBlbnNpb24=","IHJlY29tbWVuZGF0aW9u","IFRhZ3M=","IGdlZg==","IGFsYnVtcw==","IHdhc2hpbmc=","cm9j","IHRyYWlucw==","YXRpbmdz","IGV4cG9uZW50","YWNrYmFy","LWxu","w6Fn","LkRhdGFBbm5vdGF0aW9ucw==","IEVJRg==","IE1hbGF5c2lh","CVBPUlQ=","b251cw==","IGNsZXZlcg==","IHBldQ==","PgoKCgo=","IEFyZ3VtZW50cw==","IGRlYnVnZ2luZw==","KHJpZ2h0","J0Q=","Y29tcHV0ZQ==","IGZpbmVzdA==","T1JBR0U=","IHNwZWN0YWN1bGFy","cGhyYXNl","IGluZGlh","IGxlZ2VuZGFyeQ==","YmlydGg=","IGNvbXBvc2l0ZQ==","IGdyb3dz","IFRE","IGVwaWQ=","IGxhdW5jaGluZw==","XV1b","TWludXRlcw==","IENoYQ==","IGNsZWFuZWQ=","IHdpdG5lc3Nlcw==","dWthbg==","CVR5cGU=","IGhhYmU=","cGFyYWdyYXBo","IEpQYW5lbA==","IEhhbm4=","IHZhcmllZA==","IFBva2Vtb24=","IE1VU1Q=","5Yqo","LnZpc2liaWxpdHk=","b3B1cA==","Xls=","LmV4cGFuZA==","ICInLA==","LmZhc3RlcnhtbA==","X2F1dG8=","IFNoZWV0","bWFya2Vy","UGFyY2Vs","ZXdz","IFN0cmF0ZWd5","LW1ha2luZw==","IHVudmU=","IHRyYWlsaW5n","IGNsaWNrcw==","IEdldENvbXBvbmVudA==","CWNvbnRlbnQ=","SUdFTkNF","RVJORUw=","TlNNdXRhYmxlQXJyYXk=","IGJyZWF0","IGhhcm1mdWw=","tog=","IGJlc2lkZXM=","IGJvcmluZw==","IGJydXRhbA==","dmFuZw==","KHBhcnNl","cXVpY2s=","IHB5dGVzdA==","IHN3aXRjaGluZw==","KCldCg==","IOyE","TEVS","CWZvbnQ=","IG5ldHQ=","KV0KCg==","KC9c","5p6c","dG9BcnJheQ==","IGJyZWVk","IENBUg==","IFdlYXBvbg==","QWJz","dG90","IHNldE5hbWU=","YXB0aXZl","IDos","IGVzY2FwZWQ=","b3JkZW4=","IFByaQ==","dGh1bWJuYWls","IGRlc2NyaXB0aW9ucw==","L3N0eWxlcw==","IFBDSQ==","IGFscGhhYmV0","YXN0aWNzZWFyY2g=","Tk9URQ==","IGNpYWxpcw==","IEdyaWZm","IHBvcnF1ZQ==","IHByb3RlaW5z","cGxheXM=","IHN0YXRpbmc=","IGltYWdpbmF0aW9u","IGZhY2lhbA==","IE1lY2hhbg==","IGFycmFuZ2Vk","X3VzZWQ=","IGFycmFuZ2VtZW50cw==","IFBpcGU=","aG9zdG5hbWU=","IHByb3ZpbmM=","VGl0","LkZsYXRTdHlsZQ==","IFNwbGl0","IExvYWRlcg==","LmNj","IGNsaW5pYw==","LS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQ==","IGJha2luZw==","IEVOVA==","bmVhdGg=","44CBCgo=","QU5F","LkVudGl0eUZyYW1ld29ya0NvcmU=","YXBwZXJz","Lmlj","IE5nTW9kdWxl","IEZPUk0=","ICc7","LXByb2ZpdA==","aHc=","ZW5lbXk=","IEV5ZQ==","IGNhdXRpb24=","dG93bg==","IHVyZ2Vk","IEppbW15","eW5jaHJvbm91cw==","LXNpemVk","bWFraW5n","LHs=","XScs","X09iamVjdA==","YWhvbWE=","IGFjdGl2aXN0","SU5WQUw=","IENvbW1lcmNpYWw=","IE9ybGFuZG8=","KHRhYg==","INio","QWxnb3JpdGht","IGhlcml0YWdl","R2V0TWFwcGluZw==","IGZhaWx1cmVz","cmlvcw==","YXRpdmE=","IHRldA==","IGNhcnBldA==","KFo=","dGhyZWU=","IGRpc2Nsb3N1cmU=","LkVSUk9S","X2NhbGxlZA==","IGRpYWw=","IG9jY2FzaW9uYWw=","LkVycg==","IGZ1bmNpb24=","Y2FmZm9sZA==","IHJlbGVhc2luZw==","77yJCgo=","X1ZhbHVl","IFZhcmk=","eWVsbG93","IHN0cnVnZ2xlcw==","LmNhbA==","IERha290YQ==","CWNsb3Nl","IHNhbmR3aWNo","IGFuYWx5dGljcw==","ICoqKQ==","JiM=","IEpvcw==","IHBhc3NpdmU=","QVRUUg==","VGhyb3dhYmxl","IE11bg==","IFVpbnQ=","KGRpc3Bvc2luZw==","YXJhaw==","IExlYWRlcnM=","IGFmZmVjdGluZw==","IGl0ZW1WaWV3","IGVjb25vbWljcw==","ZnY=","4LmA","LnJi","IE92ZXJhbGw=","IHdlYWx0aHk=","IGV2b2x2ZWQ=","bmRh","IEh1cw==","cmVzdHJpY3Q=","dW1lbg==","IEFncmljdWx0","IQoKCg==","IGV4cGlyZXM=","IHNwb2tlc3BlcnNvbg==","aW50ZXJ2YWw=","IMOi","IHF1ZWVu","KG5pbA==","aW5nbw==","SGVhcA==","2Y4=","IGNvbXBsYWlu","U3lt","IENsb25l","IFJ1","IFdJTEw=","IENyeXN0YWw=","L2NvbnRlbnQ=","aW5nZW4=","b2ludG1lbnQ=","TGFzdE5hbWU=","YXZpY29u","IElCTQ==","IERpbWVuc2lvbg==","YW5o","aWNpcGFudHM=","IEFubmU=","LnByb2dyZXNz","IGFsZ28=","b2JpbA==","IFZvaWNl","IEZF","IGdsaQ==","IHZlZA==","IHByZXZlbnRz","XENvbHVtbg==","IGZvbGs=","ZXR0aQ==","IG1u","IENMQVNT","IGRpc3BsYXlpbmc=","IEts","IEZlcnI=","ZHV0bw==","Lmli","IGRhZG9z","J25hbWU=","LXNwYWNl","IGl0YWxpYW4=","IGludmVyc2U=","IGRlbnNl","dXRlcg==","IElFbnVtZXJhdG9y","LXNpZ24=","IG5hdGlvbndpZGU=","IHBlcnNvbmE=","IHNvbHZlZA==","IGRyYW1hdGljYWxseQ==","TG9nb3V0","IGdyYXY=","IGFuYWx5c2Vz","b2xsbw==","IGxhbXA=","LnRlYW0=","IEVyb3Q=","PVsi","IGRhbmNpbmc=","ID8+Lw==","IGNhdGVy","ZmZl","IFNoYQ==","IEJvcw==","IFJFUVVJUkU=","IE1vbnN0ZXI=","IFJC","IElERQ==","IHN1aXRz","IGZvcm1EYXRh","KHRoZXRh","IHNwYXRpYWw=","PU5VTEw=","IFNxbENvbm5lY3Rpb24=","IOA=","IFZlbmV6","IE1vcm5pbmc=","IHB1YmxpY2F0aW9ucw==","IE5PTklORlJJTkdFTUVOVA==","Zmlyc3ROYW1l","dWRz","V291bGQ=","X0hFQUQ=","IGludmVzdGVk","c3RhYmxl","ZnJlZA==","IGNvbW1hbmRlcg==","U0VT","4oCUYQ==","YW5jaGU=","IE1vdmVtZW50","67M=","U3VpdGU=","IGp1cmlzZGljdGlvbg==","66as","IEJldGg=","alF1ZXJ5","IElzYQ==","IGRlbnRhbA==","LCo=","IExpbWl0","aWxpYXRpb24=","PSJ7","YmFzdA==","IHR1cmI=","aXN5","T09L","IGFkdm9jYXRl","aW1hZw==","TEVDVElPTg==","0LvRjA==","KGNhdGVnb3J5","LmRlYw==","IHVuaXF1","X3Nu","IGF0dHJhY3RlZA==","IMOJ","IFJ1bm5pbmc=","X2VkZ2Vz","IERpc2FibGU=","X0FT","5Zu+","IG5ldHdvcmtpbmc=","X2JyYW5jaA==","SGF2aW5n","dG9CZVRydXRoeQ==","R0k=","IGNhbXBz","c2Vw","LXBhcnQ=","ICkKCgoKCgoKCg==","dXN0cmFsaWE=","IFJlcG9ydHM=","cml0bw==","IHdhaXN0","X3BsdXM=","IFdX","LXBlcnNvbg==","QXByaWw=","IHNhcg==","LnRhcg==","IGFncmljdWx0dXJhbA==","dGlj","IHRjcA==","IHNldFZhbHVl","YWdlbnRv","IEFwcGU=","cGlsZXI=","Q0FERQ==","IGFuY2hl","YXRjaGVy","IGNvbWljcw==","IGxicw==","X3NlZ21lbnQ=","J109JA==","aXR0ZXJz","aWNoZXI=","R0lORQ==","IHV0aWxpemU=","IEN1cnNvcg==","X2V4cHJlc3Npb24=","IGRhZw==","PGxvbmc=","IHJoeXRo","5o+Q","IGNvbnN1bHRhdGlvbg==","WWV0","IikpCgo=","X01BQw==","Y291bGQ=","ICdcXA==","IFZv","CWh0dHA=","IGdz","cGhlcg==","LWdyaWQ=","SmFtZXM=","SnVs","IHNjaG9u","IHRlbnNvcmZsb3c=","IExPR0dFUg==","YW1hcw==","IHNjaXB5","IGNvbnZpY3Rpb24=","LmFn","IGFkbWluaXN0cmF0b3I=","KSl7DQo=","IG51bg==","Imdyb3Vw","UG9y","IG51cnNl","ZXhwcmVzc2lvbg==","YWt5","IEhlYXZ5","Lm9wdA==","LmdldEFsbA==","IG92ZXJs","LyIs","X2NvdW50cnk=","544=","IEdFTkVS","X3JvdXRl","IERhbA==","wrQ=","b2xvYWQ=","IHVuY29tZm9ydGFibGU=","KG1lbnU=","IGhvc3RuYW1l","JyIpOwo=","IGNhbGN1bGF0aW9ucw==","LWNsaWNr","IHByb3RlY3RpdmU=","44Kv","X0Zvcm0=","dW5ncw==","QWN0dWFs","bWY=","IFByb2Nlc3Npbmc=","IEludmVudG9yeQ==","KG1hdHJpeA==","YXBwcm9wcmlhdGU=","d2Vn","aWph","IGNocg==","IHJpZmxl","LXdzag==","a2Fy","IGluZGVwZW5kZW50bHk=","SU9T","IGNvbnNpc3RlbmN5","dm4=","L3N5c3RlbQ==","IENoYW5nZXM=","IGV4cG9zZQ==","aWNpZW50cw==","IHJlbGF0ZQ==","CW5leHQ=","6Kg=","dWRlcw==","IGdsYXNzZXM=","RlhNTA==","Li4uLi4u","IFBkZg==","IGFwcHJvdmU=","IHtc","IGV4aXN0ZQ==","KSko","QVJFTlQ=","0L7Qvw==","IExhdGVzdA==","IE5pZ2VyaWE=","LkludGVyZmFjZXM=","IHJlbW92ZXM=","RW5lbXk=","IGVuZm9yY2U=","dmVydHM=","CXBvcw==","X3RleHR1cmU=","V0FSRA==","IElOQ0lERU5U","KGNvbnRhaW5lcg==","IGRlZmVuZGluZw==","IFJY","IEhvb2s=","YnJpcw==","IEZsYXNr","R3JheQ==","LikK","dmlzaWJpbGl0eQ==","IFJlZGlyZWN0VG9BY3Rpb24=","ZXJyYWw=","X2VsZW0=","IHJlc29u","ZnJvbnRlbmQ=","X3ZhcmlhYmxlcw==","YXRlcmlh","ICsi","YXZlbGVk","UklY","IGRlZmljaXQ=","X0NoZWNr","WVlZWQ==","VG9PbmU=","c3B5","IHVuaXRlZA==","ZW5kZW50","IHBvZGU=","44GM","Q0FU","KGZtdA==","IEJvbnVz","IHJlY2s=","wro=","TW9kdWxlcw==","IHZhY3V1bQ==","UmFkaW8=","IERBTUFHRQ==","UGVu","IFBhcmtlcg==","OzsK","IFJlYWxseQ==","X25lZw==","cGVuZGluZw==","IG5vbWluZWU=","IENhdGVnb3JpZXM=","IFVsdHJh","V2VhcG9u","IGRlZmVuZGVy","SXNz","IEdlbmRlcg==","IERyZXNz","IGltcHJpc29u","IGJhbmtydXB0","aW1lbnNpb25hbA==","UEhB","IFN0cmF0ZWc=","IFBST0ZJVFM=","IHBhdHJp","Ly8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8=","ZGVsZWdhdGU=","IGZvclN0YXRl","IGRldm90ZWQ=","X21ha2U=","IHRlcnJvcmlzdHM=","IFNuYXA=","X25hdg==","IEFB","IElhbg==","CWFwcA==","UGxhY2VtZW50","X2hkcg==","PEs=","IHNhbmc=","c3Ryb2tl","LVE=","Pjw/PQ==","LW1vZGVs","YXZhbmE=","IFdhbmc=","ICAgICAgICAgICAgIAo=","CWluaXQ=","IGVudHJlcHJlbmV1cg==","YXRpdm8=","TG92ZQ==","LW92ZXI=","V2F0ZXI=","IG1vZHM=","Z2VuY2U=","VGVjaG4=","Png=","LlRhc2s=","bW9uZXk=","aWJhYmE=","J30pOwo=","IFNwZWNpZmlj","IExpbmVhcg==","X09QVA==","SGFzaENvZGU=","KFBsYXllcg==","LkNvbnRhaW5zS2V5","IGNvbGxhcHNlZA==","dHJhbnNwYXJlbnQ=","X1JBTkdF","Vmlld2Vy","KGNmZw==","IHNvcnRpbmc=","IGluZmVjdGVk","IE5hY2g=","IGFjY29tbW9kYXRl","LmVsZW1lbnRz","X1BBUlQ=","IFNleHk=","PWdldA==","KHllYXI=","IHhocg==","Ol0=","b3dza2k=","IHN1bW1hcg==","IMK/","IGludGU=","IHdvcmtmbG93","IFRhaXdhbg==","dmVyc2lvbnM=","5Y+R","IHN1cnByaXNpbmdseQ==","IG9wdGljYWw=","IHByb2Nlcw==","IGRpc2FncmVl","IG51ZXZv","IENBTQ==","c29ydGVk","bGVhc2Vz","aXN0bGU=","SWRlbnQ=","CWV2ZW50","amVjdGVk","Q2h1bms=","VmFycw==","LnByb3ZpZGVy","IHByb2NlZWRpbmdz","IGluY2x1c2l2ZQ==","IGFydHdvcms=","ZW5kYW50cw==","77yaCg==","c2Vlbg==","IGxpZw==","IG1ha2Vycw==","X2Z1bg==","IGxlbmd0aHM=","UGF0aFZhcmlhYmxl","W2l0ZW0=","4Li1","RGVhZA==","RkZGRkZG","IFVyYmFu","dXBsZXM=","aWNoZW4=","KG51bGxwdHI=","LnNwZWM=","LFN5c3RlbQ==","VVJBVElPTg==","KGpvYg==","5byP","IHRyYWNrZXI=","xZk=","IE1S","IFNRTGl0ZQ==","IGR0bw==","IDs7Cg==","IG1pbnQ=","IEludHJvZHVjdGlvbg==","Y2Fv","IHF1ZXN0aW9uZWQ=","IGZpdHRlZA==","cmV2aXNpb24=","c3E=","IG1pZw==","X3VuaXRz","X2FzeW5j","IGZsaWNr","fSk7CgoK","IG5vdHJl","fWAs","RmlsdGVycw==","IG11bmRv","X2RheXM=","IGZybQ==","dXRj","IHZhbHM=","ZXdpZHRo","IEdlbmVyYXRvcg==","IEFydGlzdA==","IElEcw==","IEFydGljbGVz","cmVhdGVy","IENvbXBvbmVudEZpeHR1cmU=","Lj0=","IHJvdQ==","LW5v","LmJ1a2tpdA==","ZWdn","IERpZmY=","YXRpY3M=","0YPRhw==","4oCUCgo=","IENoYXJsb3R0ZQ==","Ynll","IH0pOw0KDQo=","IFZpaw==","IEJyb3c=","IGx2","IEdpYg==","LXdpbmc=","R0xJR0VOQ0U=","KEls","IEVuZ2luZWVy","LldhaXQ=","IFBpY3R1cmVz","IHJoZXQ=","IHRoZXJtYWw=","IHByYWlzZQ==","PD4oKTsKCg==","IFNwaWRlcg==","UGF1c2U=","IEJha2Vy","IHNsb3dlcg==","IH1dCg==","X2VucXVldWU=","IGRpc2FwcGVhcmVk","IFRpY2tldA==","SU5VWA==","X0xPQ0FM","0LDRgdGB","QEluamVjdGFibGU=","Y29tbXVuaXR5","R2VzdHVyZVJlY29nbml6ZXI=","5Zu9","IHNjYWxlcw==","IC0o","Lycr","IFNpdA==","IGV4ZWN1dGl2ZXM=","YXJkaW5n","IGFkdmVycw==","IGJhY2t3YXJkcw==","CWNvbnRleHQ=","IEhhbXA=","IFBG","IERlY2s=","IENyYWln","QW1lcmljYW4=","IGJlbGw=","IHByb2w=","dWZlbg==","IHJuZw==","YXJzaGFs","IFNpbXBseQ==","Zmlyc3RuYW1l","c2hvcmU=","SnVseQ==","IG1vcnRhbGl0eQ==","IOKGkgoK","SGVscGVycw==","IGJlbmNobWFyaw==","ZW1hZGU=","IG9yZ2FuaXNhdGlvbnM=","Lmdzb24=","IFRleHRGaWVsZA==","IGNpdmlsaWFucw==","LkFycmF5cw==","IE1pc3Npc3NpcHBp","IGludGVybWVkaWF0ZQ==","Z2V0VXNlcg==","X2NsdXN0ZXI=","UmVsYXRpdmU=","Zm9yZWlnbg==","LnF1ZXJ5U2VsZWN0b3JBbGw=","Rm9yZWlnbktleQ==","IHJlYXNvbmFibHk=","LS0tLS0tLS0tCg==","Q2FyZHM=","IEthbQ==","IFRob3I=","IHJvbGxlcg==","LWVsZW1lbnQ=","IEN1cnJlbmN5","ZGRpZQ==","QUxMWQ==","IFJB","IHBlcm1ldA==","YWFhYQ==","IGhvbWV3b3Jr","IFZpdA==","IG1vbGQ=","IEZlcg==","W3N0YXJ0","IHN0YXRpc3RpY2Fs","IHNjYXJ5","X0hPTUU=","LkJlZ2lu","Q29uc3RydWN0","b2dlbmlj","IERFQUxJTkdT","IHRhbWJpw6lu","aXhvbg==","LmluZA==","YWNyZQ==","IHRyYW5zZm9ybXM=","IE5hcA==","LkJsb2Nr","dXNzaWE=","cGlyYXRpb24=","dWxlbnQ=","IGNlaWw=","Q2xhdXNl","bmFpcmU=","VEVT","IG5lYXQ=","U1RE","IFJlZ0V4cA==","cGVyZm9ybQ==","Oik=","IHVuaW9ucw==","IHN1YmxpYw==","IHdpbmRz","bG9hdGluZw==","Z2xpY2g=","IHBhZ2luYXRpb24=","U2tpbGw=","QXBwbHk=","IE9wZXJhdG9y","aXN0b2dyYW0=","IHF1YWxpdGllcw==","Q3Jvc3M=","IGRlY29t","XSwi","IEp1YW4=","Lm1vZGFs","LkNoaWxk","IFJvZ2Vy","U1RJVFVURQ==","OkNHUmVjdE1ha2U=","YWxldHRl","IHN0YQ==","YXNpZGU=","IGJsdXI=","IFdh","aWZldGltZQ==","cmVlZA==","Y29udHJvbHM=","IGJpbnM=","INC/0L7Quw==","Ki8sCg==","VUlT","IFJvdQ==","IERlbW8=","LWF3ZXNvbWU=","IENoYWlu","IGhhc3Rh","IEJhcnQ=","LktFWQ==","IHZlbmRvcnM=","bm9mb2xsb3c=","IERlc3Q=","X2J1aWxkZXI=","IGFyZ3Vlcw==","X2Fuc3dlcg==","Z290bw==","IFJFU1VMVA==","IE1PTg==","IHBvZGVy","b29ucw==","X0NBU0U=","IHJlcGxpYw==","IGZpbmFuY2luZw==","IERBVEU=","Y2Vybg==","X3RyYWNr","dGllcw==","L2xvZ28=","IE5FR0xJR0VOQ0U=","Z2V0VHlwZQ==","PlQ=","YmV0","Z2lybA==","IElOQ0lERU5UQUw=","LXNpdGU=","LnRyaWdnZXI=","IExpc2E=","X2lucHV0cw==","IHJlbGF0aXZlcw==","TG9nZ2VkSW4=","Q29uZmlndXJl","SUs=","LmFjY2VwdA==","UmVzdW1l","IERyYWZ0","ICo+KA==","IFdB","ZWRpYW4=","ZXJuZXNz","IExheW91dEluZmxhdGVy","Ki8NCg0K","b3RoeQ==","IG9ibGlnYXRpb24=","U3Vic2NyaWJl","IHRodW1ibmFpbA==","ZXhpc3Q=","IGluc2lzdGVk","IFVJQ29sbGVjdGlvblZpZXc=","IEFuZ3VsYXI=","IHRhYmxldHM=","IEltcGFjdA==","44CNCgo=","YWhv","IGNoYXJhY3RlcmlzdGlj","Z2Q=","ID09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT0=","b3VydA==","YC4=","QXBwcm8=","Q29vcmRpbmF0ZQ==","UmVtZW1iZXI=","IG1hcmluZQ==","XT09Jw==","IEFkbWluaXN0cmF0b3I=","LmdldERlZmF1bHQ=","IGZvcmdvdA==","IFN0cnVjdHVyZQ==","VnVl","YXJzaW5n","bW9tZW50","a3c=","X2N1cnNvcg==","QXR0YWNr","IGF0aGxldGlj","IGRpYWdub3NlZA==","IGVuZGU=","5Yig6Zmk","SG91c2U=","IFBBUkFN","IHdpa2k=","IE9wcA==","IGNvbnNlcnZhdGlvbg==","IHNuZA==","X3RlbQ==","c3Vic3Ry","IENhcGU=","LnNpbQ==","VVRJT04=","YW5hbg==","4oCZdW4=","IGd5","LXdvcms=","IGNvbXBlbGxpbmc=","PScj","CXN1Yg==","IGRpcmVjdG9yaWVz","7Yq4","IHRvdWNoZXM=","b3V0aW5lcw==","LkNvbGxlY3Rpb24=","c2NoZWR1bGU=","LmxhdA==","IERvY3RyaW5l","Q0FB","IFJlZmVy","IHNoaWZ0cw==","IGxpa2VsaWhvb2Q=","cHJldGVy","IEZlbWFsZQ==","IGludGVyY2VwdA==","IGxvdQ==","55m7","IHJ1Zw==","IENyb3du","ICoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKio=","LXByb2R1Y3Q=","IHByb21wdGVk","dW5nbGU=","ZG9ja2Vy","IFR1","IFVuaXF1ZQ==","X0Vycm9y","dWxvcw==","IOKE","IChg","R2V0dGluZw==","X3NjYWw=","IEVuaA==","w7x0","IHN1c3RhaW5lZA==","IHBhdGNoZXM=","IHByb3NwZXI=","IEdhemE=","X2xpZ2h0","IGluY29ucw==","LS0tLS0tLS0K","CQkgICAgICA=","U0Y=","Q04=","OiI7Cg==","IENvbGxpbnM=","KCop","IGNvbXBpbGF0aW9u","J10NCg==","IGNvbnNlcXVlbmNl","LC4uLg==","IGRt","IEJMT0NL","Q2x1c3Rlcg==","IHNraQ==","KGFyZ2M=","VHVwbGU=","IGpvaW5z","IFNoZXJpZmY=","V2Fy","aW5kaQ==","IGNvbW1lbnRlZA==","SE9TVA==","IGludml0YXRpb24=","YXBhbmVzZQ==","IHBlcm1pdHM=","cHJlY2VkZW50ZWQ=","X3pvbmU=","IEFteQ==","X1JE","TWluaW11bQ==","IGludm9jYXRpb24=","LmVuYWJsZQ==","aWNodGVu","LW93bmVk","Imlk","X1BPSU5URVI=","RmFj","IHNwZWNpZmljYXRpb25z","IG5vbWluYXRpb24=","IGdw","PCg=","IHJvYm90cw==","IEplcnJ5","IGhvbGRlcnM=","IHdhbmQ=","Y21z","IH0pKQo=","LlRvYXN0","IElMaXN0","QmFzZWQ=","em9vbQ==","L3N0eWxl","IEJlY2s=","TWVu","IGNvbnRyaWJ1dGluZw==","IHVuZG8=","IE9I","IGFkZE9iamVjdA==","IGVpZ2Vu","c2lnbnVw","6ZSZ","IGRpc3RhbnQ=","UEFSQVRPUg==","IE1hcmk=","IG3DoQ==","RW1w","w7Nz","IOyImA==","ZXZ0","K2o=","cGFyaw==","IFN0YXk=","IER1bg==","IHNveQ==","PiU=","YXppbmVz","IHRpZW1wbw==","KG1l","cHJlc2VudA==","LlRoaXM=","IGVkaXRvcnM=","RklFTEQ=","Lldvcms=","IFVuaXZlcnNl","IGRydW5r","LnRpbWVy","IGFsdGVyZWQ=","IE5hcg==","66Cl","LkFjdGl2ZQ==","aWRvcg==","560=","LmRlbHRhVGltZQ==","IGF3a3dhcmQ=","JnF1b3Q=","IFNhZmFyaQ==","IHRyaWNrcw==","TUVOVFM=","ZGl2aXNpb24=","IHZhcnlpbmc=","IEhpZ2h3YXk=","IHBob3RvZ3JhcGhlcg==","IFN0ZXdhcnQ=","IGxhc3Rpbmc=","LlByZQ==","LmFtYXpvbmF3cw==","IEx1Y2s=","LkRlc2NyaXB0aW9u","IE5heg==","bmVn","IGPDsw==","PDwiXA==","IFN1cnY=","IFVuYw==","UmVjaXBl","LkJvcmRlclN0eWxl","IG1vZGlmaWNhdGlvbnM=","LWF0","QVRGT1JN","aGRy","YWtv","IHN1YmxpY2Vuc2U=","IEp1bXA=","IGJlaW0=","IE1hbmhhdHRhbg==","LmJvb2w=","X2h3","0YLRjA==","Qmlu","IGdhdGV3YXk=","IiI6","IFVJUw==","OiIr","LWRlZg==","IFJlZ3VsYXI=","L3Rlc3Rpbmc=","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA=","c3RyaW5nc3RyZWFt","IGRpc3Bhcg==","IG1vYmls","LXJlYWQ=","IEFkYXB0ZXI=","IENoYW1waW9ucw==","IHNjaGVkdWxlcg==","IGtpbGxz","IE11bHRpcGxl","aXJyb3I=","IGdvZHM=","QURP","YWt0ZQ==","IFVzdWFyaW8=","LmNpcmN1bGFy","IHJlY2VwdA==","IEV4cHI=","IGVsZGVybHk=","IG5pY2VseQ==","IGJlc3Rl","V2FudA==","IGNsYXNzaWNhbA==","LnNwcml0ZQ==","b2JqYw==","IE1hc29u","IHNpc3RlbWE=","LkJsYWNr","ZXNv","IFplaXQ=","IGRpdmlk","IGVudGVycw==","X3N1YmplY3Q=","IFBsYW5ldA==","Lndhcm5pbmc=","IEdyYW0=","X3Rva2Vucw==","IGhvdXNlaG9sZHM=","X2N1c3RvbWVy","dXNlck5hbWU=","Y3Jvc3M=","IHBpb25l","IGFzc2lzdHM=","X1NN","aWJv","IGxveWFs","IHVzZWxlc3M=","I2VsaWY=","IFVsdGltYXRl","Q29tZQ==","Z2Vs","IGRpY2g=","eHl6","aWtlbA==","b2JyYQ==","X3NjYW4=","IEludGVyaW9y","IE5pY2U=","IHBsYWM=","CXRhcmdldA==","IHZpcmFs","YXNzbw==","KCkv","dW5kZQ==","IEFkb2Jl","T3M=","dmlzaXRlZA==","IE9X","IEZlZWQ=","IFNlcXVlbmNl","IG1hbmFnZXM=","aW5zb24=","IExvdWlzaWFuYQ==","e30p","IEhhYg==","IExE","IGJpcA==","cHJpdGVz","KGVsZW0=","LmhpYmVybmF0ZQ==","w6lsw6k=","IG9obmU=","X3RyYW5zYWN0aW9u","IGFubnVuY2k=","UHVibGlzaGVk","IEhvbmRh","IFRhbQ==","IFBhY2tldA==","X3NlbGVjdG9y","IGNoYWxsZW5nZWQ=","UHJvY2Vzc2luZw==","LWhvdmVy","IHRyYWluZXI=","X2NhbmNlbA==","IE5TRGljdGlvbmFyeQ==","YWJyaWM=","IE1MUw==","X3NlbnNvcg==","IHNocmluaw==","IEZY","dGhyZXNob2xk","CUhY","LW1hcms=","YC5g","U2NoZW1l","KGZ1bGw=","X3dyaXRlcg==","IFN5cw==","IGZsZWQ=","IENpbg==","LXdpZGdldA==","IFByZXZpb3Vz","R2VuZGVy","X3F1ZXN0aW9u","RmVlZA==","IHNjcnV0","KHByZWZpeA==","44CC44CC","IGluZmVjdGlvbnM=","UGFydHM=","IGhpZXJhcmNoeQ==","X0RFTEVURQ==","IFBhdGllbnQ=","X3BheQ==","IHByb21vdGVk","IOyL","IGNpdmlsaWFu","IGFncmljdWx0dXJl","IFBpZWNl","IHN0YW5jZQ==","dXRzY2hl","QXNzaWdu","LkFDVElPTg==","Rmln","X3JhZGl1cw==","IFN5bmM=","ZHVjZXI=","ZmFpbHVyZQ==","ZW5zZWQ=","cHRpbWU=","Qk0=","X2RhdGV0aW1l","cXVpdm8=","UVVFVUU=","6ICF","QXBwZWFy","IHN1bW1pdA==","OnZvaWQ=","IHZpbmU=","6K6k","b25uZQ==","X1RSQU5T","LmdyZWVu","X2Nj","IGh1bmdyeQ==","ICI+","KCkpOw0KDQo=","RXh0cmFjdA==","aXplbnM=","IHNvbHZlcg==","Tm90aWZ5","IGVuZ2xpc2g=","IFNob3BwaW5n","aW50ZXJmYWNlcw==","UkVR","IGlsbGVn","IFVJSW1hZ2VWaWV3","IGRpc2Nvbm5lY3Q=","IFVudGls","IENvbnNlcnZhdGl2ZQ==","QENvbHVtbg==","IHNoaWZ0ZWQ=","IDoNCg==","IGZpY2g=","IGRsYQ==","IHNob2U=","IiksDQo=","dWxhcml0eQ==","X1JFU1A=","V2VhdGhlcg==","VUlBcHBsaWNhdGlvbg==","Lml0ZXJhdG9y","IGFnaW5n","LlBhcmVudA==","b3dpZQ==","KGVxdWFs","IENvbnY=","L2RlZmF1bHQ=","IG1lYXN1cmluZw==","LnByZXY=","LklzVmFsaWQ=","LkZhdA==","IHPEgw==","a2V5d29yZHM=","d2l0aG91dA==","IHNvdmVyZQ==","IGV4Y2hhbmdlcw==","IG1lbHQ=","IGlzbGFuZHM=","IEludGVncg==","IGp1bXBpbmc=","IGdsZQ==","IGpvdXJuYWxpc20=","IGRhdGVk","TG9jYWxpemVk","IFJlZnJlc2g=","UGFydGljbGU=","IGFh","IFNUUklDVA==","IGJvZA==","LlByb2Nlc3M=","X0FVVE8=","IFB1Ymxpc2hlZA==","ZXZlcnk=","IHRlY2hub2xvZ2ljYWw=","bHN4","IGlycml0","QWRkaXRpb25hbA==","IGRlbGltaXRlcg==","X2xhbmd1YWdl","LWFyZWE=","Ym95cw==","IFR1YmU=","IHdhdA==","IG1lY2hhbmljcw==","X293bmVy","U3BlbGw=","IFN0b3JpZXM=","LkFwcGVuZExpbmU=","VGFibGVWaWV3","aGVt","c3RpY2s=","b2xsb3dlcg==","SUZG","IFVW","b2xsaXNpb24=","U1VC","IGNvbXBhcmFibGU=","IGRvbmRl","c2FsZXM=","bGx2bQ==","IH1dLAo=","T1RUT00=","IFB1cnBvc2U=","TGFi","IGludGVydmlld2Vk","b2lz","YXNpbA==","LnNldElk","IEluc3RydWN0aW9u","LS0+","IE1vZGlmaWVk","YXRpb25hbGx5","IE1lZXRpbmc=","6K+v","I3JlZ2lvbg==","IHJvdXRpbmc=","LmZvY3Vz","IFlvdXRo","PEQ=","IE5hZw==","Y29udGFjdHM=","IGZvcm1pbmc=","IG1pZQ==","JyxbJy4uLw==","IEJQ","IGFwcGV0","IFRlYWNoZXI=","IFRQ","IGFubnVhbGx5","b3V0ZWRFdmVudEFyZ3M=","IFNwZWFrZXI=","IHJlbmFtZQ==","Q0ZH","KCIvLw==","5o6l","L3BhZ2Vz","IHByw6lz","IFNwZWxs","LkFsbG93","IElOVEVSUlU=","ICgj","4oCZCgo=","X0dlbmVyaWM=","Lmltc2hvdw==","X3RpbQ==","LWZhY2U=","KCYo","YXRpbnVt","IHJldm9sdXRpb25hcnk=","IEhvdXJz","cmFpbg==","IGFueXRpbWU=","IGFiYg==","LmpzcA==","U2Nyb2xsVmlldw==","IFRydXRo","IGFudGljaXBhdGVk","IGFjY2VudA==","LmNoZWNrZWQ=","IHNwZWNpZmllcw==","IGNhZg==","IGNlbGxwYWRkaW5n","IGNvb2tlZA==","IEh1Z2g=","cGVlaw==","X1JBVEU=","IGRvcm0=","Lw0K","SVZJVFk=","LkNvbnRyb2xsZXI=","KHBhcnQ=","LmNvbnN0cmFpbnQ=","IGludmFzaW9u","TU9WRQ==","IGdsdWM=","bGVuYW1l","IGFtZW4=","ZW5nbGlzaA==","IFN3aXR6ZXJsYW5k","IjsKCgo=","cGVzdA==","LmNvbGxlY3Q=","Tmli","IERpY3Q=","IEVtYg==","KHN1YmplY3Q=","IG91dHJhZ2U=","IGRlY2lkaW5n","IHNlbnRlbmNlZA==","RmVjaGE=","IkE=","IHF1ZXI=","IGZvbnRGYW1pbHk=","IHF1YWRy","LVk=","X0NBQ0hF","IGFuYWx5emVk","IGdhaW5pbmc=","IEFnYWluc3Q=","IFNvdWw=","dGF1","IGxpZ2h0d2VpZ2h0","IFRG","IEVmZmVjdHM=","LlR5cGVz","LmFkZENsYXNz","IHZlZ2Fu","6YE=","Lici","IEV4cGxvcmVy","LmRldGVjdA==","LnNoaWZ0","IG9ibGlnYXRpb25z","bGFzdE5hbWU=","IGFzc29jaWF0aW9ucw==","IFRpbWVTcGFu","dW50ZXI=","IEZyZXNo","Q29tcGF0aWJsZQ==","UHVi","aWRnZXM=","Lm9wdGlvbg==","dmFyaQ==","Lmhhc2hDb2Rl","IGdlYg==","LnNlY3Rpb24=","LW5vdA==","IFN1Ym1pdA==","VE4=","cmVnaXN0cnk=","X21lZGlh","IG5hag==","ZmZ0","IG1hdGU=","LXRoaXJk","IHBvY2tldHM=","ZXN0YQ==","IGJlbnQ=","IE5vcmQ=","IHJldGFpbGVycw==","IE1vcnJpcw==","LiIiIgoK","V3Jvbmc=","IMWb","UmF5","LmVj","IEJpbmQ=","X0hBTkQ=","KG5vbg==","aXNWYWxpZA==","IHNpbWlsYXJseQ==","X0xJTUlU","IGR5bmFtaWNz","IGRpc3RpbmN0aW9u","44GG","PE4=","IG9ydGg=","IFRveW90YQ==","IEthdGU=","IExT","b3JpZQ==","IFNwcmluZ3M=","IGZyZWFr","bGFzdG5hbWU=","X01VTFQ=","LXN0ZXA=","Iig=","QUREUg==","IGVudGVydGFpbmluZw==","X0NPTkY=","IGRlY29kZWQ=","IHN0cmVhaw==","IHdhaXRlZA==","IG5vdGlmaWVk","cm9kdWNlZA==","dmlzdWFs","LkxheW91dFBhcmFtcw==","5rA=","ZXNpYW4=","Zml0cw==","c3ByaW5n","IEJlcm5pZQ==","VXNlckRlZmF1bHRz","IHBlZGVzdA==","QXBwZWFyYW5jZQ==","IFdpa2k=","IE5PVElDRQ==","IHNzaA==","IGR1cmFudGU=","IFppcA==","xLFy","IE5BVE8=","IHR3ZWx2ZQ==","IHJveWFs","77g=","IG1lcmNoYW50","IEZ1cm5pdHVyZQ==","J10pLAo=","LFg=","IGZvbGRlcnM=","IEdhdGU=","CWZ1bmM=","cGljaw==","X3VzdWFyaW8=","IFZlcm0=","bWVudGlvbg==","dXJwb3Nl","IGFsZXJ0cw==","eGlvdXM=","X3NpZw==","IEZ1","ICg6","IGR1bWI=","5YWz","IGFjY3VyYXRlbHk=","6YeN","UkI=","LXNjcmVlbg==","IFZFUg==","am91cg==","IHJvbWFuY2U=","dWNjZWVk","LmNob2ljZQ==","IGFkaXA=","X2RpbXM=","U2VyaWFsaXphYmxl","44KL","LmpvYg==","IHByb2c=","dWNoYXI=","IGdlbnRseQ==","IFJTUw==","aWN0dXJlZA==","X0VOQUJMRUQ=","CWxhYmVs","YXdrcw==","IEVuc3VyZQ==","cmVtZW1iZXI=","7KCV","IHRyYW5zbWl0","e3sk","LlRyYW5zYWN0aW9u","dXJzZQ==","X3JlbGF0aXZl","IHNpemVk","IFhY","IFByaW5jZXNz","IExhcnJ5","IHByw7M=","INGB0YLRgA==","IHNpc3RlcnM=","ZXN0cnVjdA==","IGNoZWNrcG9pbnQ=","Omxlbmd0aA==","IENhcmxvcw==","L2ljb24=","X1RBUkdFVA==","VG9rZW5z","IHBhdGllbmNl","IFNlbGVjdGVk","cXR5","LnNob3dNZXNzYWdl","IHdpbGRsaWZl","IFByb3Bz","Ym0=","LWFycm93","IHBhcmNlbA==","ZmlyZWJhc2U=","IEJlbmphbWlu","Y2Vzc28=","LnRpbQ==","IEdhcmM=","LmFueQ==","IEhPV0VWRVI=","IEtv","IGdyYWJiZWQ=","X2ZyYW1lcw==","IG9iamVjdEF0SW5kZXg=","IEFEVklTRUQ=","IHN1YnVy","CUdM","IH0pfQo=","LWxlbmd0aA==","7Iuc","IFBvdHRlcg==","X2J1ZmY=","Lmd1aQ==","IEVuY29kaW5n","RWxlY3Q=","LW1lc3NhZ2U=","IO+/vQ==","IMiZaQ==","IEFyZ3VtZW50TnVsbEV4Y2VwdGlvbg==","0LDRhtC4","IG1pbmltaXpl","IHJlc3BvbmRpbmc=","JF9bJw==","IEluZGl2aWR1YWw=","w6Fj","IElOVEVS","IG1hc3R1cmI=","IEJpbg==","KCck","65Oc","IG9wZW5seQ==","ID48","IHVudG8=","b2xvZ2ljYWxseQ==","IE11bA==","VklESUE=","IHNsaW0=","IENvbW1pc3Npb25lcg==","KG9u","IHVuZGVybmVhdGg=","L2Ri","dm90ZQ==","KE1lc3NhZ2U=","IFBvcGU=","RGVmaW5lZA==","IHN3aWZ0","dXJm","IGFkYXB0ZWQ=","U0VM","IHJldmVudWVz","IGRpdmluZQ==","PXk=","R3JhZGllbnQ=","X2FjdA==","IC8qITw=","IHBvbHlnb24=","IEZEQQ==","IENhcnI=","YXRhYmxlcw==","KHN0ZG91dA==","IHJlZnJpZ2Vy","IGNvb3JkaW4=","YXZvcml0ZXM=","0YjQuA==","IGNvbXBhc3Npb24=","IFBPU1NJQklMSVRZ","LXNlY29uZGFyeQ==","dXJhY3k=","IGNvbXByb21pc2U=","X0FW","X29z","IGJlc2lkZQ==","g50=","IGxu","LnBsdWdpbnM=","Q2FwYWNpdHk=","YWxhaA==","LmJpbg==","IENSQw==","X2JhbGFuY2U=","IGZsZXhEaXJlY3Rpb24=","IGFtYml0","IG5pY2tuYW1l","IEZvcmNlcw==","Q0xF","IFNoZWxs","IHNhaWw=","IFdyaXRlcg==","IEFsaWNl","ZHc=","IEluZGlhbnM=","IE1hcnNoYWxs","X1NSQw==","IG5vcm1hbGl6ZWQ=","IEphZw==","44KS","emVpdA==","cnBj","w61j","LmlubGluZQ==","IHRyYXZlcnM=","X251bWVyaWM=","IHV0aWxpdGllcw==","IGV2YWM=","SU5QVVQ=","CXJlZ2lzdGVy","TVg=","IENhbXBiZWxs","IGRhdGFzZXRz","IGRlbWFuZGVk","IGluaXRpYWxTdGF0ZQ==","Z2Fu","IGVp","VW5leHBlY3RlZA==","LXdlYg==","dHJhaXQ=","LFk=","IFRvZGQ=","IHNrZWxldG9u","IG9wdGltaXpl","56ys","IFVwb24=","IFN0T2JqZWN0","IGFwbGlj","Lic8Lw==","QUND","YWxvdXM=","IGhhc2hDb2Rl","IEJpYg==","SU5BTA==","IGludmlzaWJsZQ==","IGhldGVy","IHNhZmVy","fS8v","LnRoZW1l","Lm5hdmlnYXRpb25Db250cm9sbGVy","X21lc2g=","c2tpbGw=","IFZpb2w=","wrI=","IEVPRg==","IEtp","eW1tZXRyaWM=","IG1heGxlbmd0aA==","xaM=","ZnJpZW5kcw==","IEV2YW5z","IGxlbW9u","ICgu","U2xpZGU=","IFRoYWlsYW5k","IENhbm4=","IGFtZW5k","IGNpcg==","IHNpbGx5","ZXNpbWFs","X3BpYw==","cHJvY2Vzc29y","SmF2YVNjcmlwdA==","IGV2aWRlbnQ=","X2Rp","PlA=","dnJvbg==","LlVO","IHBhaW50ZXI=","aXphcnJl","IGxhdg==","IHBvbQ==","cHJlZw==","PWZ1bmN0aW9u","KHNlcmlhbA==","aWZpY2E=","dW1pbmc=","5Zyw","44GC","LW9w","VUNI","IEhlbmQ=","LnByb3BUeXBlcw==","IHlv","IHJvdXRpbmVz","IGNhcmluZw==","U2Vt","IHJlc2VydmVz","IHByaW9yaXRpZXM=","cmVkaXRz","SVNUUg==","Q29udGVudFR5cGU=","IFNjaHc=","L21lZGlh","IGVzdHI=","IGNsaW1iaW5n","LXdlZWs=","Y2hlcmNoZQ==","c2Vuc29y","VG9BcnJheQ==","IE1vbnRyZWFs","IGNsb3Vkcw==","IEluamVjdGFibGU=","IFJpY2U=","IHByb3BhZ2FuZGE=","X3Byb3ZpZGVy","IGluZG9vcg==","IGluYXVn","IGRpcGxvbQ==","IG1lc3NhZ2luZw==","X211dA==","5aaC","IGt3","T05T","YXJpYW5z","UlBD","KV0NCg==","LXJheQ==","IFNvcg==","bWFsbA==","IG1hcmtldHBsYWNl","IHZ0aw==","TWE=","b2dhbg==","aWdp","IHNwb25zb3JlZA==","IERhbmk=","LlNFVkVS","PicuJA==","bXVsdGlwYXJ0","IFdvbA==","IHRhYmxlTmFtZQ==","IFVzZXJuYW1l","QmFja2dyb3VuZENvbG9y","IGZyaWdodA==","X0VNQUlM","U2VwdGVtYmVy","X3ZhbHM=","b3BpYQ==","IHNwb3R0ZWQ=","LUNo","IGRhdGFTb3VyY2U=","LyIK","0LXQutGC","IFJlcXVlc3RNZXRob2Q=","IFJlcGxhY2U=","LWRv","YWhu","IFBoRA==","XS4KCg==","Tk9O","Z2VtZW50","IFRocg==","IHF1aWV0bHk=","IHRvcnR1cmU=","IHRlYXM=","IENZ","IGF0cg==","ZGV2ZWxvcG1lbnQ=","LWRldGFpbA==","IGxpZ2h0ZXI=","IGFyZ3Vpbmc=","IGRlc2VydmVz","IGN1cnJpY3VsdW0=","X0NPTlRFWFQ=","xYJ5","SElURQ==","CUlE","L3VwbG9hZHM=","IHRpdHM=","cmVv","X2Ryb3A=","LlVURg==","IHBpY2t1cA==","IGdyb2Nlcnk=","IFB1cmU=","IGVhc2llc3Q=","UGhpbA==","LmZlYXR1cmU=","KCIq","IGludmVzdG9y","dG9r","IGphcg==","TG9z","4oCU4oCU4oCU4oCU4oCU4oCU4oCU4oCU","LnF1ZXVl","LXNwZWVk","TWFs","dW1ibHI=","IENPTlNU","IEhSRVNVTFQ=","IERhbmNl","KGZpbGVQYXRo","IGF0dHJpYnV0ZWQ=","4KWN","IEJ1bmQ=","Y29pbnM=","IHPDo28=","IHBpcg==","cGVyc29uYWw=","IHByZWxpbQ==","IHByb3Bvc2U=","IFRM","XV0p","IFN1YnNjcmlwdGlvbg==","IEtyZQ==","LGxlbg==","LkZpcnN0T3JEZWZhdWx0","KS0t","X3Byb2R1Y3Rz","LkdldEJ5dGVz","U2hpcA==","IGVuY3J5cHQ=","IFNH","IE15c3Q=","aGly","IGl0ZXJhdGU=","IGludGVuZA==","Lm1vY2tpdG8=","IGNoYXB0ZXJz","KGFuZ2xl","IFZsYWQ=","6K6+","Jy4KCg==","UmVzcG9uc2VCb2R5","IEFiZA==","ZGVhbA==","IGJhcnJpZXJz","LW91dGxpbmU=","YmlsbA==","IEZhbGxz","X3NlY29uZA==","LmluY2x1ZGU=","LmNlaWw=","IG9jY3VwYXRpb24=","cGhvbnk=","Lm1vdmVUbw==","IEplbm5pZmVy","QVNURVI=","OyI+PA==","IEVuYWJsZWQ=","IHRlcm1pbmF0ZQ==","IElv","bGF0aW9ucw==","IFRIRU9SWQ==","IGVhcmxpZXN0","IHJhY2s=","IFNjYXI=","c2hha2U=","Y2hpcA==","IHV2","IGFsbGlhbmNl","0L/QuNGB","IEdPT0RT","emlvbmU=","IFZJ","IHst","IGZpbHRlcmluZw==","IG1pc2Nvbg==","LkRvY2tTdHlsZQ==","IGJ1c2g=","IGp1bms=","5ow=","IFFVRQ==","IGhvb2tz","IGZpcm13YXJl","IG1pZGRsZXdhcmU=","ZGlj","IE9ha2xhbmQ=","IGFycml2ZXM=","UGF5bG9hZA==","cGl4ZWw=","XXw=","IHN0YXJ0RGF0ZQ==","LlBSTw==","X2F1ZGlv","IG1pZGZpZWxk","aWdpZGJvZHk=","IFN3aXNz","IENsaXA=","IER1bXA=","IFRleHRCb3g=","IGdlaA==","eWllbGQ=","b2Rz","IHJlZmVyZW5kdW0=","QmFja2VuZA==","IENyZWFt","IGRvbWluYXRlZA==","IEFyY2hpdmU=","IHJpZGVycw==","LnByZXBhcmVTdGF0ZW1lbnQ=","IHF1YW5kbw==","IGNoZWY=","d2lraQ==","aW5lbA==","YW1wbGluZw==","KCJcXA==","IHNhZw==","X3Byb3h5","44GV","cGRv","LmdldEVsZW1lbnRzQnlUYWdOYW1l","IGRlbW9uc3RyYXRpb24=","IE5QQw==","IGFyY2hpdm8=","ZW5kYW5jZQ==","IGVmZmljaWVudGx5","KGFjdHVhbA==","LnRhYmxlVmlldw==","IG11c2g=","IGJlYXJz","X3RocmVhZHM=","amFz","YWh1bg==","IG5ldXJhbA==","IGRlc2lnbmluZw==","IEdEUA==","IGxpZnRlZA==","55uu","IEpvaW50","IEluY2x1ZGU=","IEdpYW50cw==","IHdpdGhkcmF3YWw=","IFJlbnQ=","bmF0aXZl","IFNlZWs=","Z3Jlc3Npb24=","X0NQVQ==","XFM=","IFNoaWVsZA==","IHNvbGlj","IGJvb20=","eWVjdG8=","IG1hbnVmYWN0dXJl","IOKAiw==","IGJib3g=","IGVhcnRocXU=","b2xsZWN0b3Jz","OkAiJQ==","IGxvb3Bz","SmU=","YWxraW5n","IFdoYXRz","IEJveXM=","LmJvb2s=","QVJHRQ==","X3BpeGVs","IHN1c3BlY3Rz","zrk=","dXNw","IEJNVw==","aWVjZXM=","KHBlcnNvbg==","5byA","6bs=","IFBvZGNhc3Q=","IGJvdQ==","KEl0ZW0=","w7s=","KElucHV0","SHR0cEdldA==","IGJ1cmc=","KV4=","Qk9BUkQ=","Ki8s","IGd1bHA=","IEJlbm4=","IGRlY2tz","LnN0YXR1c0NvZGU=","IGFjdXRl","IGh1Zw==","dWd1","IHBsZWQ=","LCIl","aGFwZQ==","INC30LDQvw==","IE1haW5l","LnJlYWw=","IGRhbGFt","IE1pbm9y","LkZsb2F0","ZGlzcA==","IHRs","IGVuY291bnQ=","PT4k","IGZn","dGVlcw==","IFJlY29tbQ==","w6Rs","IGNoZW1pc3RyeQ==","QmxvY2tz","T0lE","IGZvcmV4","IEFwcGVuZA==","IHsq","IFN1cHBseQ==","Q0dGbG9hdA==","KGJs","IGF0ZQ==","YWRvcmE=","IGd1c3Q=","QXNzb2Np","Pi4K","RkVUQ0g=","LnNlcmlhbA==","d2lkZ2V0cw==","YXJkbGVzcw==","aWVmcw==","X0ZVTEw=","ZXJuZXRlcw==","IFByZWQ=","2K0=","5LqL","dWJlcm5ldGVz","IExhdXJh","IGxhYmVsZWQ=","SGlnaGxpZ2h0","IGFubm95aW5n","L3VwZGF0ZQ==","KGRlc2NyaXB0aW9u","IGludGltaWQ=","JGM=","IikpKQo=","LkFQ","IFtdKg==","IEVYSVQ=","Lkhvc3Q=","IE9QRU4=","LnNlbmRNZXNzYWdl","X2NhbWVyYQ==","X3RpbGU=","IHRoZXJt","b25vbW91cw==","IGRpc2Fkdg==","IG5hYXI=","aW5kZXhPZg==","IFBQ","LnByb3RvY29s","QUZF","IHRleHR1cmVz","IyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMj","dW1iYWk=","LnN0YXRz","IEdF","IGll","IFNURA==","IE1hbm4=","LnJlZmxlY3Q=","S0I=","IGRpdmU=","Lndhdg==","LyotLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0t","L3NldHRpbmdz","LmxpZmVjeWNsZQ==","IGRhdWdodGVycw==","b3J1cw==","dWJlcg==","TklORw==","c3RyaQ==","IFRpcA==","IHpu","IHN3aXRjaGVk","aW5ldA==","dWZmeQ==","IFRyYW5zcG9ydGF0aW9u","KGNvbmY=","ZnJpY2E=","IFhM","IExlYWQ=","X3BlcmNlbnQ=","PE1hcA==","IHRocnVzdA==","b3Ji","aWtr","IHRyYXVtYQ==","QWNjZXNzb3I=","IEZpdA==","IFN0cmluZ0J1ZmZlcg==","ZXhwbA==","KHNjcmVlbg==","IGF1ZGllbmNlcw==","IE9QVElPTg==","X3JvdW5k","W25vZGU=","YmVo","LT5fXw==","cGVybWlzc2lvbnM=","IERldGVybWluZQ==","Lk1hbg==","IGFkdmFuY2Vz","LklucHV0U3RyZWFt","IHN0cm9uZ2VzdA==","IGVCYXk=","ICMt","IGRpcm5hbWU=","IFNNUw==","IG1lZGljYXRpb25z","IGFtZW5kZWQ=","IGNodXJjaGVz","IEltcGVyaWFs","JHJvdw==","IE1hZGlzb24=","IEluc3A=","IGFmZmFpcg==","IHBzeWNob2xvZ3k=","dmg=","IHNldmVyaXR5","4oCQ","IHN0cmlwcw==","QUg=","dmVydGlzaW5n","IGNvbnNl","SU1BR0U=","IFN0YXRz","CXNj","LkN1cnNvcg==","IGZyZWV6ZQ==","c3Nvbg==","KHhtbA==","IFN1c2Fu","LnRpbGU=","ZWRlZA==","ICAgIAkJCQ==","dWVsbGU=","IE1pdGNoZWxs","YmFzZWQ=","T3BlcmFuZA==","veaVsA==","IEZG","CXN0cmNweQ==","b3VuY2Vz","aWxkbw==","LmV4ZWN1dGVRdWVyeQ==","IGFwcHJvYWNoaW5n","IFNldmVu","IG51dHM=","IHJpYw==","YXNzaWdubWVudA==","IGNhbGN1bGF0b3I=","IE11cnBoeQ==","IEJvdQ==","7YQ=","IGJ1dHQ=","IHRpY2tz","UHJvamVjdHM=","aWxpYg==","LnRleHRDb2xvcg==","bW92","X2xvZ28=","KHRlbXBsYXRl","IElOSVQ=","IGltYWdlVmlldw==","c2NyaXB0aW9ucw==","T1JJVFk=","Q29uc3VtZXI=","IHVucHJlY2VkZW50ZWQ=","IHRvdXJpc3Q=","IGJyb24=","IGNvbnRyYWN0b3I=","IGxpY2VuY2U=","IE5hbQ==","5q8=","KHRyYW5zZm9ybQ==","X0FUVA==","UHJlZg==","IEdhbQ==","IHZlc3NlbHM=","IGhhdg==","TGF0ZXI=","LlRvTG93ZXI=","IHVybHM=","IGJyZWFrZG93bg==","IHBlbmFsdGllcw==","IGZvc3Rlcg==","IFVF","IGNsdWU=","Y29tZWQ=","5ZCN56ew","LW1haW4=","IHB0cw==","IGNvdW50ZWQ=","aWN0cw==","L3Bvc3Q=","IGdldGF0dHI=","IHBpbmc=","QU5DRUw=","IHBlYw==","0YXQvtC0","YW50b20=","IEJsdWVwcmludA==","IEV2ZW50RW1pdHRlcg==","IGzDpA==","5rI=","IHN0cmF3","KGNvbXA=","J3VuZQ==","Pk4=","LWNsaWVudA==","ZXNNb2R1bGU=","LWJhc2U=","IHJldHJlYXQ=","X3NpbXBsZQ==","CQkJCQkJIA==","ZmVl","JykNCg0K","Q29udHJvbEl0ZW0=","IHN1YnNjcmliZXJz","cGxlYXNl","IEVmZg==","IHBvdW5k","IEJ5dGVz","IFRlYQ==","X2FjdGl2aXR5","IG1heGlt","IG9wY29kZQ==","QlNE","LmNvbnN0YW50","O30=","b21icmVz","IGNhcmVlcnM=","KS4KCgoK","IHNwcmVhZGluZw==","LWV4cGFuZGVk","IE9yZA==","YW1hcmlu","IG1vYmlsaXR5","VW5mb3J0dW5hdGVseQ==","YWtr","Tkw=","X3JlZGlyZWN0","IFBH","IFNlbnNvcg==","Ym9s","dGFw","X01FTU9SWQ==","IFVJQWxlcnQ=","cGxpdHVkZQ==","V2Vic2l0ZQ==","IExvZ28=","bG92ZQ==","W2luZA==","IGFsdG9nZXRoZXI=","IHdvbmRlcmVk","IGVzcGVy","IExpYmVyYWw=","IG9zcw==","IGVsaXQ=","IHN0aWZm","b2RveA==","X21lbnRpb25z","IERvdWdsYXM=","X3BpZA==","IENL","IGluaXRXaXRoRnJhbWU=","LmJsb2c=","cGtn","YW5naGFp","UVVJUkVE","dXU=","IG1rZGly","QVRBTA==","IHVuaA==","aW5jZXM=","c3Ro","IGh5cG90aGVzaXM=","IGNhdGE=","IFRC","IENsYXI=","IHByZWRlY2Vzcw==","IHNpdHVhdGVk","LXdvcmxk","KSkv","IGhlYWRsaW5lcw==","LnN0YXQ=","IG91dGJyZWFr","c3BhdGg=","X0ZMQUdT","IFNlcnZsZXRFeGNlcHRpb24=","U3Vu","RlJPTQ==","IERpcg==","44O744O744O7","X2Nvb3Jk","IE9wdGlt","TW9uaXRvcg==","LmJpdA==","WFhY","IHRvZGFz","ZmVsZA==","0YDQuA==","aW1pcg==","IHBvbGl0aWNhbGx5","IG1vbGVjdWxhcg==","IHRyYWRlZA==","IHt7JA==","IFN3ZWRpc2g=","ICdALw==","X1JFQUw=","IHdhcmVob3VzZQ==","dG9kYXk=","LEw=","b3Jw","PHNlY3Rpb24=","LWJy","eW1l","IFVzZXJTZXJ2aWNl","IGxpYmVydHk=","IG1vbWVudG8=","KEltYWdl","PHNpemU=","U2No","IGpvZw==","aW9sb2d5","YXJlbnRseQ==","IHF1YW50dW0=","IEFidQ==","IHJpbQ==","IG1hbmE=","Rm9udFNpemU=","QnVpbGRpbmc=","c3RhaXJz","QUlMQUJMRQ==","ICYn","IHNlY3Q=","IHNpZ2g=","KGJhdGNo","LklDb250YWluZXI=","cG9sbA==","IENvcnBz","zrU=","YXJ1","IEtheQ==","LnJhbmdl","X2NsaWNrZWQ=","IFJvYmVydHM=","Lk5ldHdvcms=","ZmluaXNo","LU1hbg==","IGNvbGxlZ2Vz","IEZpbmU=","IikpLAo=","ZmlsbQ==","IHJlbWluZGVk","IGdlc3R1cmU=","b3V0aWw=","IHRocmVhZGluZw==","IG9iamV0","IHRvdXJz","YWN0aXZhdGVk","Lm1rZGly","PXVzZXI=","IHJlZGU=","ZsO8","X1NZU1RFTQ==","cHY=","IGNvbmdy","IG1hc3Nhc2pl","IHByYWN0aXRpb24=","VW5pdmVyc2l0eQ==","IHRhYmluZGV4","0Jg=","U2V0cw==","IGNvdW50aWVz","Z3Vlc3Q=","ZmFu","IHdvcmRlbg==","LmRp","0L3QsNGH","wr8=","aWdEZWNpbWFs","IHNob3Jl","IGfDtg==","IHJlcGFpcnM=","IGhlbHBlcnM=","IGNlbnRlcmVk","T0xMT1c=","IG1hcFN0YXRlVG9Qcm9wcw==","IGNlbnRz","PEE=","IGV4cGVjdGF0aW9u","T2N0b2Jlcg==","IGJnY29sb3I=","Y2FsZXM=","LkNPTg==","IFZlbA==","IGNyeWluZw==","LXNlYXNvbg==","IGZ1bmN0aW9uaW5n","X0xPQ0FUSU9O","w7xzcw==","YmVyeQ==","UGFyYQ==","b21pbmF0b3I=","LWxl","IGV0aGljYWw=","aGFzaHRhZ3M=","ZW1wbG8=","IG7Dum1lcm8=","KGFjdGl2aXR5","LlN0b3A=","LnN0cmZ0aW1l","SUxE","IHRvZQ==","CU5vZGU=","IikNCg0K","IFB1ZXJ0bw==","IGV4ZWN1dGluZw==","IEdVSUQ=","IG9wcG9zaW5n","YWxwaA==","IGV4aGliaXQ=","X2ZsYXNo","IG1laWxsZQ==","IGpzb25PYmplY3Q=","SGVybw==","YWludGVk","X0RPTQ==","IHdpbA==","IHNsb3Bl","IG3DpQ==","IElyYXFp","IG9yZ2FuaXpl","CWpRdWVyeQ==","SFVE","c2hpbmU=","Lndl","IFNraWxscw==","cG9uc29y","IGNvbmNsdXNpb25z","IHJlZm9ybXM=","IHJlbHVjdA==","bmFtZWQ=","IE9saXZlcg==","IC8vfQo=","LWxvb2tpbmc=","IGZvZw==","IEhP","IEZyaWVk","IGluZXZpdGFibGU=","IERhdGFHcmlkVmlldw==","SG91cg==","aWxsZXM=","bG9naWNhbA==","IGNvbm5lY3Rpdml0eQ==","LnR3aWc=","IEt5bGU=","KGRzdA==","LVNo","IFN0dWRpb3M=","KExldmVs","LmpldA==","X1BST1RP","LWRlY29yYXRpb24=","T1RIRVI=","IHJlYWRpbHk=","LlBhcmFtZXRlcg==","IG11bHRpcGx5","IExJQg==","YXJtZWQ=","IHNvb25lcg==","5oQ=","X0VT","IGZvc3NpbA==","IEFuYw==","4oCcVGhpcw==","bG9kYXNo","UHl0aG9u","IGhpc3RvZ3JhbQ==","d2VzdGVybg==","IGluZmFudA==","IGNvb3JkaW5hdG9y","IG5pYg==","Om0=","IHJlc3BlY3RlZA==","IGRlZmluaXQ=","JlQ=","X3BhZA==","IFRyaWdnZXI=","dGhhbA==","IGltYWdlTmFtZWQ=","IGJlYXRlbg==","CXJj","IFBhbGFjZQ==","IGhhemFyZA==","IGlzb2xhdGlvbg==","X3Jj","Y29udHJl","T1VUUFVU","IHJlaWdu","IFBsYXRl","QVRFUw==","IGZsdXg=","IHBhY2tz","LmdldFNlbGVjdGVk","IHBhcnRpY2lwYXRlZA==","IG5lZWRsZQ==","LWRlcHRo","Ojo6Ojo6","LWxhdw==","aW5zcGFjZQ==","b25pdG9y","PW5v","IEF0b21pYw==","IEJyYWlu","RWRpdGFibGU=","LXNj","cmVkZW50aWFs","IFBlcnJ5","a2ll","IC0tLS0tLS0tLS0K","LnN0cm9rZQ==","KEludGVudA==","IHVuaXR5","dW1sYWg=","RnVydGhlcg==","IHByemU=","IHPDuA==","44KK","IFBST0NVUkVNRU5U","IEhvdXNpbmc=","IGF0dG9ybmV5cw==","IGNvbXBvc2U=","YXR0ZXJpbmc=","IldoYXQ=","ZHJhdWw=","IHN0cmFpZ2h0Zm9yd2FyZA==","SW5zdGFudA==","LkpUZXh0RmllbGQ=","IHRyYWRlcw==","0LvQsA==","IHsh","IGxhdGVseQ==","SU1H","IEFsZA==","IElOTkVS","IGNhcnRvb24=","LlNvdXJjZQ==","RkFMU0U=","IGRvdWdo","ZmVu","KHJlY3Q=","RGF0YVRhYmxl","Tmljaw==","IEJ1dHRlcg==","cmVhZHM=","X2NvbW1lbnRz","RU5W","IENvbm5lY3RpY3V0","LUZJUlNU","CQkJICAgICA=","YWNoaQ==","Lk1zZw==","cmVjdGlvbg==","IHJlbGF4ZWQ=","IHNoYWZ0","IGVm","IEFkZGluZw==","IGJyZWFjaA==","IO+8mg==","cmFtYQ==","IGNvbmR1Y3Rpbmc=","ICg7","KGds","IENBVVNFRA==","YXNoaQ==","IEZMQUc=","IENvbW1lcmNl","IElOVEVHRVI=","aG91cnM=","IFNjaG9vbHM=","IG51Y2xl","QWdhaW4=","cHJvag==","IHNldmVudGg=","RU1QTEFSWQ==","KG1vY2s=","J10sDQo=","X1NQRUVE","PmZhbHNl","IHNwYQ==","IE5lYXI=","7JU=","IGludHJpZw==","X21lbWJlcnM=","d2F2ZQ==","IGFuYWx5c3Rz","X09T","ZWRpbg==","IEZyaQ==","IHJldHJpZXZlZA==","UmVndWxhcg==","X29icw==","RVhQT1JU","Jyl9fSI=","ImNsYXNz","X18oKA==","YnVja2V0","IHN0cm8=","IFBhdGNo","eXN0aWNr","ZnVsbmVzcw==","YXBvcw==","RGE=","CQkJCQkgICA=","IGVucmljaA==","dW5vcmRlcmVk","aG9sZQ==","Q29uZw==","PFByb2R1Y3Q=","IEN1cnQ=","KHRoZQ==","X2xvd2Vy","IGF2b2lkaW5n","IGJ1eno=","IHZpYWJsZQ==","dWJh","LWlz","YXJlbA==","IGFjdGVk","LWRldGFpbHM=","4LiH","IFRoZW9yeQ==","IFB1bg==","IEFub255bW91cw==","Li4uIgo=","w6hyZXM=","5Y+v","IFZpc2lvbg==","X3NlbQ==","YXNoYQ==","IGNlbGVicml0eQ==","IGVuZERhdGU=","IHBvcHVsYXRl","IGN1aXM=","cXVhbnQ=","Zmxvb3I=","IGdsb2JhbGx5","IGNydWlzZQ==","IFN0YW5sZXk=","IGJpa2Vz","LmdldENvbm5lY3Rpb24=","IHBvb3JseQ==","X290aGVy","YW1waW5n","LiIpOwoK","b2Rp","X0FETUlO","LmNvbG9ycw==","IEdhbWluZw==","Pic7Cgo=","U1RSVUNU","UVI=","SURz","KGFyZ3VtZW50cw==","X2F1eA==","KEV2ZW50","X1BSSVZBVEU=","IFRyZWs=","IGRvd25sb2Fkcw==","bXV0YWJsZQ==","X1NUUlVDVA==","KHd4","IGRvbWFpbnM=","anNweA==","IFZpYWdyYQ==","Q29tbWFuZHM=","SnM=","LmNmZw==","Q29udGVudFBhbmU=","IEVkaXRUZXh0","4KWN4KQ=","QXR0YWNo","IEFSTQ==","cG9zaXRpdmU=","IEdlbmVyYXRlZA==","IHNlaXplZA==","PTo=","IGVsZWN0cm9uaWNz","IEFwcENvbXBvbmVudA==","LycsCg==","LmVxdWFsc0lnbm9yZUNhc2U=","RG9jdHJpbmU=","ZGlzaw==","IFBvbGl0aWNhbA==","Q0hP","PEY=","CWhlaWdodA==","IEJ1Zw==","Lmxl","aWto","IG1pbGxpc2Vjb25kcw==","IGNvbnN0aXR1","bWFn","Lm5s","LXJhbmdl","YW5nZ2Fs","Jyxb","cm9wb2xpdGFu","IMOc","IFVD","LmRlc2M=","LUxBU1Q=","ZnN0cmVhbQ==","aWJpbA==","IGZpZXI=","VkVSWQ==","IOuz","SVJU","X1VJ","KGFicw==","IGtuZWVz","IHJvb2tpZQ==","IFZhYw==","YXJlbmE=","Y29tbWVuZA==","LVw=","IFNVQlNUSVRVVEU=","U29mdA==","IHBhcnRpcg==","d2VhbHRo","6KaB","KGRhdGFzZXQ=","IENsaW1hdGU=","LXNob3c=","IHJlbGlhYmlsaXR5","X2NodW5r","5Luj","X3N0b2Nr","IEVYRU1QTEFSWQ==","77iP","IHbDrQ==","IHNtaWxlZA==","IGRyaWxs","LkZ1bmN0aW9u","IFNJ","IHJlZ3Jlc3Npb24=","LVg=","IEphcg==","cHJlZg==","CXN1Y2Nlc3M=","IEhpdGxlcg==","IGluc3RpbmN0","IGZlbW1lcw==","IGxvdmVy","PAo=","IG11bHRpcGxpZXI=","cmls","UmVzaXpl","IEF1dGhvcml6YXRpb24=","IEthbg==","RGlzcGF0Y2hUb1Byb3Bz","IGNyb3Bz","dG9rZW5z","ZWNu","ZW50aWFsbHk=","IElOVEVSUlVQVElPTg==","ZmFrZQ==","VW5kZWZpbmVk","IEFL","IFRlc3RDYXNl","IHJhYg==","IHRvcnJlbnQ=","IE90","QmFycw==","IGxlY3R1cmU=","IGVuam8=","IHJlc3BvbmRz","IGluZGV4ZWQ=","T2ZXb3Jr","X2NoYWlu","KSktPg==","IEJlYXV0eQ==","IGA8","IHRvdWNoaW5n","IHwtLQ==","CWZsYWc=","bm9ybWFsaXpl","IHRyYXBwZWQ=","IGVzdGFibGlzaGluZw==","L2J1aWxk","QUo=","Znk=","LXJlYWN0","YXZu","UklQVElPTg==","IGt1dA==","IEZhc2hpb24=","IEluZm9ybQ==","Y3VyaXRpZXM=","PGJ5dGU=","IFVrcmFpbg==","IHN1Zw==","IGNvbnNpc3Rpbmc=","b29kbGU=","LmN0eA==","LlRvTGlzdA==","IGNvbW1lbnRhcnk=","IHRyYW5zZmVycw==","IG5vc3Q=","aWhhZA==","IFVwcGVy","IGNvbmZ1c2luZw==","bWlzc2luZw==","LWNs","IGJvdW5kaW5n","IGNvbmdyZXNzaW9uYWw=","IHJldmVhbGluZw==","ZGg=","cnVw","IHRyZXM=","cmVwZWF0","LAoKCgo=","X3RhYw==","IGV4cGVk","R2lybA==","aG9yaXpvbnRhbA==","ICIuLi8uLi8uLi8=","KG9wdGlvbg==","IHdlaXRlcg==","CXNxbA==","ID0+ewo=","IGdhcmxpYw==","IHJlcHI=","IHJlcGxpZXM=","KHByb3A=","IHNwaXJpdHM=","IGluc3BpcmU=","IGJhc2VtZW50","LnJlamVjdA==","IGhpbnRz","IHBvbGxpbmc=","CSAK","X3JhdGluZw==","IGNhdGg=","YXZpZXI=","IGNvbXByZXNzZWQ=","IFZT","XSc=","IGp1ZGljaWFs","IFRyZW5k","dHJhaW5pbmc=","RVNUQU1Q","b2duaXRpb24=","xIE=","U0VOVA==","dmVudGlvbnM=","IGNvbnN1bHRhbnQ=","dW1waA==","IHVzZXJTZXJ2aWNl","LE5VTEw=","a2g=","RGVhcg==","X0JBRA==","aXRhdGlvbnM=","IG1ldGFwaA==","J8Op","YW5kaXNl","LWZvbnQ=","LmNoYXJ0","IHNn","X0NvbnRyb2xsZXI=","LmpwZWc=","IFVMT05H","CWdhbWU=","KHNz","IE1hag==","CWdv","IFNhZA==","IEJlcmc=","IE1pbmU=","UGFjaw==","IHJlc2lzdGFudA==","IFJPTQ==","IHBlZw==","IFN0YW5mb3Jk","IFlhaG9v","IHNjYWxlZA==","IGxhbg==","PVtd","Ii8+PC8=","IHBsb3Rz","LioK","IHRyYXZlbGVk","IE9zY2Fy","Vkw=","IGxpbmtpbmc=","IHRpcmVz","ICcqJw==","IEJ1ZmZlcmVk","ZXJp","ICoqKio=","IG92ZXJsb29r","Lk5vbg==","IHLDqXM=","IGVneQ==","5bCP","IGF0dGFja2Vy","CQkJCQkJCQkJCQkJCQkJ","LnN5bmM=","QVNDQURF","R3JvdW5k","IGRlY2F5","IFRvbg==","IGpld2Vscnk=","IGJ5cGFzcw==","IG1lbWJy","Uk5B","PFN5c3RlbQ==","IE1lZGljYXJl","KG5ldA==","b3Np","SEI=","REVD","e0VJRg==","X2ZpbGw=","IHRyYXZlbGxpbmc=","b2JzZXJ2ZXI=","IGNvbnN1bHRpbmc=","UkVBVA==","UGhhc2U=","KGlp","IFNVTQ==","Pg0NCg==","IHN1ZA==","CWJhY2tncm91bmQ=","IHNjaG9sYXJz","LW11dGVk","YXLDoQ==","ID09PT09","IF9fX18=","Q3JlYXQ=","ZW5ldmVy","L3dw","IFZQTg==","RXJyb3JDb2Rl","KV0sCg==","KGJ1aWxkZXI=","IEVuZW15","U2Vuc29y","dXNh","IHRyaWdnZXJz","IHBsYXlvZmZz","X1JFUQ==","ICh+","IEJhcnJ5","IHBlcm1hbmVudGx5","IFJVTg==","IGJ1cmU=","LkZhdGFsZg==","IGNoaWNr","CXBhbmlj","cHNp","b2th","6YCJ","Pls=","IHVuZGVyc3RhbmRz","IEp1bmlvcg==","IElORk8=","PW15c3FsaQ==","dXN0YWlu","LXNvdXJjZQ==","c2Vydg==","IENSRUFURQ==","LmF1","IHNlbGxz","ICAKICAK","RXVyb3Bl","enc=","cHJlaA==","IE5TQQ==","IHh5","4Li0","IEJleW9uZA==","SW5zdGVhZA==","Tm9uUXVlcnk=","IGFyaXNl","IGF2b2lkZWQ=","LmVtcGxhY2U=","X21vZGVscw==","fSksCg==","IGhpZA==","ICZf","LnBvaW50cw==","LmdldFdpZHRo","LkV4ZWM=","IC8vLy8=","IFNlc3Npb25z","Li4uXA==","IENvbG9tYg==","IGFjY2VsZXJhdGlvbg==","cmVzdG9yZQ==","IGlsZQ==","b2JpYw==","PE5vZGU=","IERY","IEJlc2lkZXM=","LmFnZQ==","IENvbnRhaW5z","TmF0aW9uYWw=","IEltcGxlbWVudGF0aW9u","IGVmZmlj","IFJN","SHk=","IFdlZGRpbmc=","b2tpZXM=","IHJlY3Vyc2l2ZQ==","IHByb3NlY3V0b3Jz","LlNlbGVjdGlvbg==","IEZvcm11bGE=","QmVlbkNhbGxlZA==","W2lp","IEZyYW4=","IHRyYWdlZHk=","X0ZFQVRVUkU=","mag=","Y29tcGFzcw==","IEJo","PwoKCg==","LndyaXRlcg==","IEhvdXI=","RGJDb250ZXh0","aW92","YW1vbg==","cmVwcg==","6YM=","CWZp","J11d","IERyeQ==","LnJv","IE9ic2Vydg==","5qCH","Rm9ybWVy","IEJhbGFuY2U=","CWpzb24=","IHByenk=","SVNT","KHNvY2s=","IExJTkU=","IGRlY2U=","IGFsbHk=","IHRlbmRlbmN5","RnVu","IHNjaGVtZXM=","IGludGVydmVu","5piO","IGFkdmVyc2U=","cXVvdGVsZXY=","IHNhY3JpZmlj","X3NpZGU=","IG11dGV4","QUdJQw==","IG9jY3VycmluZw==","IENvbW11bmljYXRpb24=","dW1hcg==","57yW","IFRyZWF0bWVudA==","LnBlcnNvbg==","IExD","IGVjaA==","KCgi","IERpc2Vhc2U=","w6Rk","IEFa","LkFjY291bnQ=","IGNvbnRpbnVvdXNseQ==","RU5ESU5H","IFJFVFVSTg==","LXN0cmluZw==","LmZpbGVuYW1l","c3ludGhlc2l6ZQ==","UmVzcG9uZGVy","KG9wdHM=","cmVncw==","IG51ZXN0","UGVlcg==","Ly8tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0=","IGdhdWdl","IEtpbg==","LnNjaGVtYQ==","IGFycmFuZ2U=","IEJsYWtl","X1R5cGVJbmZv","Q292ZXI=","IEhhbXBzaGlyZQ==","UGFwZXI=","LWlubmVy","dXRpbGl0eQ==","IGNyb3Nzb3JpZ2lu","Rk9S","IGlnbm9yaW5n","IERE","YXZhbg==","IHRyYWRpdGlvbnM=","IGdldFN0cmluZw==","IGV0aGljcw==","IE1hdGVyaWFscw==","REVTQw==","IGVuenlt","aW9sZXQ=","IENoaXA=","IE1jRG9uYWxk","IG5lcnZl","54Q=","Iild","5rGC","IFN1Z2Fy","X1NJTQ==","anBlZw==","IGRpc2NyZXRpb24=","IFRO","Ym92ZQ==","IE1pbmltdW0=","IEZvcm1Hcm91cA==","IHdvcmtmb3JjZQ==","IEV4ZWN1dGlvbg==","ZXJyZXI=","CSAgICAJ","IHByZXNjcmliZWQ=","LlRleHRBbGlnbg==","T1BFTg==","IFBC","aW1pdHk=","IEV4dGVybmFs","wrBD","IEFwcGxpY2F0aW9uQ29udHJvbGxlcg==","IGJhcnI=","aW1wbGljaXQ=","X2RvdA==","IENvbG9u","Q09MT1I=","LlByb2plY3Q=","Kjwv","LXhs","IG9zYw==","KHBhdHRlcm4=","Jyl9Cg==","c3VjY2Vzc2Z1bA==","YWxvZw==","U3R1ZGVudHM=","XXN0cmluZw==","YW50b24=","YXR0aQ==","Y2hlbWljYWw=","LmluZg==","KGRy","OlVJQ29udHJvbFN0YXRl","dG9JbnQ=","XTwv","0LDQtdC8","IMW+","LkFjdGlvbkxpc3RlbmVy","LlNFVkVSRQ==","IFNhbHY=","X1RSQU4=","L2ludGVybmFs","IHdlbGNvbWVk","LmNvbW1lbnQ=","bXV0YXRpb24=","IEZBUQ==","Lm9uZQ==","IExBQg==","In19","IFJvbA==","aWV2ZWQ=","IGFkdmVudHVyZXM=","IGZ1bmVyYWw=","IHNwb3VzZQ==","KG9wZW4=","IFJlYWR5","IHRvdXJpc20=","YWRpbg==","X2ZhY2U=","4oKB","IG1pZ3JhbnRz","IFB1cmNoYXNl","Y29yZA==","IE9VVFBVVA==","KSkNCg0K","U2VndWU=","dGFicw==","IGRvdHM=","IG5haWw=","Ym9ybmU=","IGRlc2lyZXM=","IHByZXZlbnRlZA==","J109PQ==","IHRpbWVseQ==","SUNB","U2Nhbm5lcg==","IEx1Y2Fz","IGdpdGh1Yg==","J11bXQ==","ZGlh","Y29ub21pYw==","IGRpZXNlcg==","dW5kZXJz","LkhhbmRsZXI=","PyIs","LmRhdGFi","IGFkdmlzZQ==","LmFuaW1hdGlvbg==","IG92ZXJoZWFk","IG9ic3RhY2xlcw==","X2pvaW4=","IG3DqQ==","RmxhdA==","LmRpc3Bvc2U=","IEV4cGVjdGVk","IGZsZXc=","IGVtYm9k","X3NsdWc=","IG5hbWVseQ==","IHdpdG5lc3NlZA==","c29saWQ=","LmxlZ2VuZA==","UXVhbA==","X3N1cmZhY2U=","44Op","QW1lcmljYQ==","IGFmZmlsaWF0ZXM=","IFByb3M=","X2V4dGVuc2lvbg==","YmluZGluZw==","U1RBTEw=","LnJlYWR5","IGNvcHlpbmc=","IEhlbmNl","IGRpc2NvcmQ=","X3NoaXA=","UHJvcGVydHlOYW1l","CQkgICAgICAgICAgIA==","IGFjaGlldmluZw==","IEJlYw==","Wmlw","U29tZXRpbWVz","44GL","IGNvbnRyYQ==","IHB1bmlzaA==","IGluc3VsaW4=","IGRpc2FwcGVhcg==","X2VudW0=","LmF1dA==","IGhhc2F0dHI=","YWZmZWN0ZWQ=","c2hl","JHRhYmxl","a3Np","IGxhY2tpbmc=","IGRpc2NvdW50cw==","U3RtdA==","IEFyZ2VudGluYQ==","IHVucGFjaw==","IFJvdXRlZEV2ZW50QXJncw==","ICc/","aW50ZXJvcA==","IHNvZmE=","IGR5bg==","IEdyYWNl","IGludGVncmF0ZQ==","2YM=","IGRlbGF5cw==","IEltcGxlbWVudA==","UHJvb2Y=","IGFwcGxpY2FudHM=","IExlYXRoZXI=","7Ja0","IGVuam95YWJsZQ==","U3Bpbm5lcg==","L3o=","IGZvYW0=","IExhYm9yYXRvcnk=","IHJlc2VhcmNoZXI=","IENocmlzdGlhbml0eQ==","IGN1c3RvbWl6ZQ==","IGNpcGhlcg==","IGRvZA==","IHPDsw==","QEVudGl0eQ==","T05MWQ==","aW52ZW50b3J5","IGNvbmNsdWRl","IGN1ZW50YQ==","IENvaGVu","LWluY29tZQ==","bWJI","bWVudGF0aW9u","IHZlcnc=","dWRw","QU1M","LmNvbWJvQm94","Zmg=","am9icw==","RmlsZVN5bmM=","IEJhcmJhcmE=","IFNjYW4=","Y3JlZW5zaG90","IE9ydGg=","LnZpZXdEaWRMb2Fk","IEFSUkFZ","LEA=","L2ludA==","R2VuZXJhdGU=","IGRlbW9uc3RyYXRlcw==","IFplbmQ=","5YiX","CXZvbGF0aWxl","PXI=","IGZt","CWJ1ZmZlcg==","ZW5hdGU=","LkNvbWJpbmU=","IG1pc2M=","Y2hlbWFz","IHB1cmVseQ==","IGdsVmVydGV4","LlJlc3Q=","IHJlY2FsbGVk","IGZyZWVs","IHNxdWU=","VHJhY2tlcg==","IFBocA==","IERpc3RhbmNl","IGJlYXN0","Q29tcGxleA==","IGNvbnNpZGVycw==","572R","dHJpYnV0aW9u","IGNvbXBsaW1lbnQ=","X2xpbmVubw==","IE11dGFibGU=","IHVuZGVm","IEdlbQ==","IGNvbXBvdW5kcw==","LnV1aWQ=","IGFub255bQ==","IHN0YWlycw==","IERiU2V0","d29ydA==","IFNlbnM=","LkJlZm9yZQ==","IGVuZGZvcmVhY2g=","IFRvZ2V0aGVy","YXRpbGl0eQ==","IG1vaXN0dXJl","LSR7","KFRlc3Q=","VEI=","bXVzaWM=","IGluc2lzdA==","IGhlYWRsaW5l","LkFuZA==","UEFUQ0g=","IFByZXBhcmU=","IHN3aXRjaGVz","KnA=","IFll","X2Ficw==","LmhhbmRsZXI=","IGFzc2lnbm1lbnRz","UHJlZmVyZW5jZQ==","RU5USVRZ","IHBpcGVz","IEFsZXJ0RGlhbG9n","b2dyYXBoaWNhbA==","IHBhdGlv","IHdlYnBhY2s=","YnBz","TmF2TGluaw==","Lk51bWJlcg==","IEFybW9y","IFBldGVycw==","IERlc2M=","ZHVpbm8=","IEljb25z","LmdldEhlaWdodA==","IHRleHRWaWV3","CU5VTEw=","YWxsb2NhdGU=","fSR7","IFByaXpl","LW51bQ==","Lk1vdmU=","6L6T5YWl","LmNhbWVyYQ==","UHJvYmxlbQ==","CXR5cGVkZWY=","KHN0b3Jl","IERJU0NMQUlNRUQ=","IHN1YnN0YW50aWFsbHk=","RkZG","IGVwc2lsb24=","IGluZXF1YWxpdHk=","X2NoaWxkcmVu","5LiH","cmVsdQ==","UGllY2U=","YW50cnk=","YmFiZWw=","dmV0aWNh","IHN1cnZleXM=","IGRldGVjdG9y","CWFyZ3M=","LlNlbGVjdGVkVmFsdWU=","IGludGVyZmVyZW5jZQ==","Li4uKQo=","LlNUUklORw==","IFR5bGVy","IENhdGFsb2c=","VmVydGljZXM=","IFByb2plY3Rz","IExlYmFu","LiIpCgo=","Lmtlcm5lbA==","IHJpZGVz","IE11dA==","YW50aA==","0L7RgNC8","ZW5uaWFs","LnRhc2tz","LnNldFByb3BlcnR5","YXRlZ29yaQ==","5pyA","L2Nvbg==","YnJhY2U=","IE5TRXJyb3I=","J10pKTsK","bGlzdGVk","IFByZXZpZXc=","QWN0aXZhdGU=","IGN5Y2w=","LWFjdGl2ZQ==","aGFk","VG9v","IHJlZ2lzdA==","bGljYWw=","IHBvZXRyeQ==","SW1wb3J0cw==","77yB77yB","Ojw=","IGNoYXJt","IENvdW4=","b2xsaWRlcg==","IGh3","fWAK","PWFyZ3M=","IE5ldXJv","aXRpY2Fs","aWVuZW4=","IERvdA==","X09OTFk=","RE4=","IFBsYXlTdGF0aW9u","IHN0ZWVw","IHByYWN0aWNhbGx5","IGFwcGxpY2FudA==","IGFyb20=","YW5pYw==","CWRpc3BsYXk=","IHRlcm1pbmF0ZWQ=","IGNsYXJpdHk=","IE1lbnVJdGVt","IEt1cg==","aWpl","X3dlZWs=","KGRpY3Q=","X3JlY29yZHM=","IENvc3Rh","IGtldA==","RXh0ZW5zaW9ucw==","IG5ldWtlbg==","aW5zaQ==","X2luYw==","IOaW","IGVpbmY=","IFJpc2s=","IGVsZXZhdGVk","cGVycw==","VURB","IEtO","IGxpbmVk","IE1vcm0=","KTsKCgoK","Pn0K","cGxhaW50","Z2V0VGV4dA==","IGluZGl2aWR1YWxseQ==","IGNoZWNrYm94","VVk=","IExhbWI=","IGR5c2Z1bmN0aW9u","IExhcg==","4LA=","IENyZWF0aW5n","Jyk7CgoK","IlRoZXk=","bG9jYXRpb25z","X0NPUkU=","SW50ZXJhY3Rpb24=","dW1ibmFpbHM=","IFBhcnRuZXI=","YnJpdA==","IGxlc3Nlcg==","IFNsb3Q=","c2V0QXR0cmlidXRl","IFdhdmU=","LnBv","L3N0b3Jl","IGJyb3dzaW5n","X3Bk","c3VtZQ==","c2Vk","Q3VydmU=","IHBsYXNtYQ==","IHN1c3BpY2lvdXM=","7J24","IEJhaA==","IEV4cGxpY2l0","X0ND","LkNsaWVudFNpemU=","XFZpZXc=","IHN1YnN0aXQ=","bG9vbg==","IEdBTUU=","IEJyaWQ=","m+W7ug==","X1VzZXI=","IHNxdWFyZXM=","Zm9uZQ==","IHNhY3JlZA==","dWdocw==","XWludGVyZmFjZQ==","IFRocm93","IEtpcms=","IGVtcGlyZQ==","IGFzc2Vzc2Vk","VGF4","IEhlYXZlbg==","LWJ1ZmZlcg==","X1NUQVRJQw==","w6luw6k=","LWJvcmRlcmVk","IHB1bmN0","KG1vZGU=","IGtlaW5l","U2VudA==","IENhbGN1bA==","IEV2ZQ==","IHN0eWxpc2g=","IG9pbHM=","LlRlc3RDYXNl","IHRyYWRlbWFyaw==","IGxpdGVyYXJ5","IGNvbmNlbnRyYXRpb25z","IFJlbGF0aW9ucw==","KENsYXNz","IHN0ZGlu","IHbDpg==","YmFja3Vw","LlZFUlNJT04=","LkF1dG9TY2FsZURpbWVuc2lvbnM=","c3RhcnRlcg==","VHJhbnNhY3Rpb25hbA==","LXBhbmVs","U3R1ZGlv","a2M=","IENoYW1iZXI=","IFNwaWVs","IHJobw==","2KfZhA==","ISc=","LkF0dHJpYnV0ZXM=","IG11cmRlcmVk","YXBldXRpYw==","IGludGltYXRl","IHRleHRGaWVsZA==","IEJ1ZmZhbG8=","ZHVtbXk=","IiU=","IExpYmVydHk=","b2Jhcg==","IFRhbms=","IFBvcHVsYXI=","ZXJ2aXNvcg==","IEluaXRp","IE1hbGw=","IFByaW9y","Q0FQ","IENsYXk=","IENlcnRpZmljYXRl","LkxvY2s=","LXN0cmlw","LWRyaXZlbg==","L2FsbA==","IE1lc3NhZ2VCb3hCdXR0b25z","X1NFQ1JFVA==","X3Bi","IHJhdHM=","4KS+4KQ=","IG50","LlJvdXRlcg==","X3RvcGlj","IHRlbm5pcw==","IFBVQkxJQw==","IEFjdGl2YXRlZFJvdXRl","ICcsCg==","IGNvc3R1bWU=","IGpva2Vz","LkhhbmRsZQ==","CWJ5dGU=","IGZsYXZvcnM=","KGNj","IHBlcnNvbmFz","CWltYWdl","IE5hemk=","IGdyYW1tYXI=","IMO6bHQ=","IHZhbHZl","IHZpYw==","IFJhY2hlbA==","X2ludmFsaWQ=","UHJlZnM=","c3RkaW50","KHJvdXRl","IGh0bWxzcGVjaWFsY2hhcnM=","IHBlb3BsZXM=","cGxpbmU=","IG52","IFF1YW50","b3BwZXJz","IGN1cnJlbnRVc2Vy","IENhdGFs","IHJlY29uYw==","IGNvbmp1bmN0aW9u","bHg=","YW1idXJn","IGluZmx1ZW50aWFs","ZGFuZ2Vy","aW5kZXJz","ICVAIiw=","LmNvbmZpZ3VyYXRpb24=","b3NvbWU=","LmlkZW50aXR5","IHBpY2tlcg==","bm9zdA==","IERJWQ==","QXVndXN0","YWJsbw==","TGVhZg==","IFJlY28=","Y2tv","RE9D","IEhlcm0=","OmFueQ==","IEludGVydmlldw==","IFRleA==","eGZl","KHdvcms=","IGxlYXA=","SGVhZGluZw==","IHF1YXJ0ZXJz","XEJ1bmRsZQ==","cmVi","UGVyaGFwcw==","IEdtYkg=","QmlydGg=","CXN1bQ==","IFdhdHNvbg==","Lm5pbA==","56E=","e30KCg==","aWNhaWQ=","R2V0dGVy","Im5hbWU=","ICINCg==","X25vbmU=","em0=","YWN1dGU=","dWVzdG8=","IHNvdXM=","IHJlYnVpbGQ=","IG5ld3NwYXBlcnM=","IEhheg==","IGtpdHM=","aWZv","Qmx1cg==","IHN1aXRlZA==","LUlu","4K8=","IEtlaXRo","IE5vcndheQ==","SU5JVA==","aXJlY2Npb24=","aWV0aWVz","X3VzYWdl","IERvdWc=","cmlzZQ==","IHRyaWxsaW9u","aW1pdGVk","IFJFTA==","YWxpYw==","IGNyaXRpY2l6ZWQ=","dGhlb3JlbQ==","IGNlYXNl","IHNpZGV3","IFRlcnJ5","IHN1YnNpZGk=","IGZpcm1seQ==","IGF3cw==","IGhvdHQ=","IGRyZXNzaW5n","YmFkZ2U=","IEFwcGxpY2F0aW9ucw==","6L+U5Zue","IGxhdWdoZWQ=","IGhvYmJ5","IG11c2ljaWFucw==","ICou","LnBsYWNlaG9sZGVy","IGNvdW50ZXJz","IENhcGl0b2w=","U0RL","IGhlbG1ldA==","YW5kYm94","cXVpdA==","IGNyaW1pbmFscw==","IHRlZW5hZ2Vy","KHVwZGF0ZQ==","R2w=","LnNlbGVjdGlvbg==","IGRpc2NoYXJnZQ==","IHByZXNlbnRpbmc=","dWZhY3R1cmVy","X1VOS05PV04=","IHN0cmVzc2Vk","5Zmo","UHJvdG8=","X2NvcnJlY3Q=","aGF1cw==","IHJlbm92","IGZpcmVhcm1z","IHRlY2huaWNhbGx5","LWJyb3dzZXI=","IGNhbmR5","U3Ryb2tl","IGV4ZWN1dG9y","IG9jY3VycmVuY2U=","IElQdg==","X0lOVEVSRkFDRQ==","IFJldHJpZXZl","LmJhZA==","RXhjaGFuZ2U=","TmF2YmFy","IEtpZA==","KGdldEFwcGxpY2F0aW9uQ29udGV4dA==","X1NUT1A=","IEJvc3M=","TGlzdGVuZXJz","IHNob290ZXI=","IEFsYg==","w6RjaA==","IHBpeA==","LmtleUNvZGU=","YWxvbmU=","IGFic3VyZA==","IEN1bQ==","IE5ld3RvbnNvZnQ=","aWt0","IGxhdWdoaW5n","IGNhcGl0YWxpc20=","cmVlTm9kZQ==","VHg=","X1FVRVJZ","LlNsZWVw","KGxvZ2lu","V2ViRWxlbWVudA==","IGNlbGVicmF0aW5n","IGRlcHJlY2F0ZWQ=","IG1hYXI=","IGFydGlzdGlj","X0FTU09D","IEJvcmRlclJhZGl1cw==","CXdw","IHN1cnZpdm9ycw==","SW5uZXI=","LXJlZA==","IHByb3NlY3V0aW9u","X3Bw","KCI8Lw==","IF49","IGxhbQ==","IFRyYWRpbmc=","ZmxhcmU=","RGV0ZWN0b3I=","TUY=","IEVtZXJnZW5jeQ==","IEVhZ2xlcw==","cXVhZA==","IEluY3Jl","cGxpYW5jZQ==","XE1pZ3JhdGlvbg==","IHVwZ3JhZGVz","Q1BV","YWdnaQ==","ZnByaW50Zg==","aWdpb24=","IGJlYXV0aWZ1bGx5","IGRyaWVk","X0hJR0g=","IGdwaW8=","TVND","IERlcHV0eQ==","IERlY2w=","IHRyZWFzdXJl","c2dpdmluZw==","X3NpZGViYXI=","IGFwYXJ0bWVudHM=","IFdy","IGJvYXRz","IGJvcg==","Lmxhbmd1YWdl","IFVp","bGl0","ZnJt","YW5jaWVz","IG1hc3Nlcw==","IEFzc2lnbg==","IFBPTA==","IG1hcERpc3BhdGNoVG9Qcm9wcw==","IGJyYWNrZXQ=","IFBhcA==","IENp","IEludG8=","IHRlYW1tYXRlcw==","IGZvcmFsbA==","dWx1aQ==","IENhcm4=","X0lOUw==","YXppb25p","Y2Vw","IHRvdXJpc3Rz","LWJsdWU=","IExlZA==","IHBlbmV0","IEZv","IGltYWdpbmc=","cHJh","IHNsYXZlcw==","b2xlcmFuY2U=","IGluY29ycG9yYXRlZA==","Jiw=","dWFibHk=","IEthcA==","WG1sRWxlbWVudA==","IE11ZWxsZXI=","Q2hhbmdlTGlzdGVuZXI=","IEhvbGlkYXk=","CSAgICAgICAgIA==","RmxleA==","CVVzZXI=","Il0pKQ==","X3N1Ym1pdA==","LmJvbGQ=","IGxvY2tz","IEN1YmE=","dWRzb24=","SG9vaw==","IFdhcm5lcg==","X3N0YXI=","Ij0+JA==","IGNvbW1h","dW5jaGVja2Vk","Z3JhcGhpY3M=","cm9ycw==","R1JPVU5E","KHB1YmxpYw==","IGN1c3RvbWl6ZWQ=","IEFya2Fuc2Fz","IFJldw==","IGV4cGlyYXRpb24=","15U=","IEN1bA==","IG5vbnM=","LkZpbHRlcg==","IHNlbmF0b3I=","X2RlZmluaXRpb24=","YXNoaW5ndG9u","eW1waA==","L0o=","IGZ1c2U=","cmFtaWQ=","IFN1cHBsaWVy","IGF1dG9jb21wbGV0ZQ==","IH0pLA==","LiIKCgo=","X2Z1bmN0aW9ucw==","CXRv","LmV2YWw=","IFRPYmplY3Q=","UmVmZXJlbmNlcw==","IGhlYXRlZA==","SEFM","ICkpfQo=","fSQ=","IEJhcnI=","X1VOSVQ=","KyQ=","IGdldFZhbHVl","aXBlZA==","Y2hpZWQ=","KHZt","Y3Vl","X2ludGVnZXI=","X2NvdXJzZQ==","dGhpcmQ=","IHJldmlzZWQ=","KiovCg==","X0RJUkVDVA==","T3V0T2Y=","KCIo","IEZlZWw=","IHJlYXNz","IHN1YnRpdGxl","cGVyaQ==","bmY=","IGVuam95cw==","IHRyZWF0cw==","KXRoaXM=","LXRhYnM=","YW5jZXJz","IGNvbnRpbmVudA==","IGNhcmRpbw==","U2Vy","LnF1ZXN0aW9u","IHBocmFzZXM=","VmFsaWRhdG9ycw==","IHBvcHVs","IGzDrQ==","c29uZw==","X0lOVEVSTkFM","IGFkdmlzZXI=","IHB1eno=","IGFtYml0aW91cw==","IFRvYg==","IERQ","IHByZXNpZGVuY3k=","IHN1cnJlbmRlcg==","IHdhdGNoZXM=","X2JpbmFyeQ==","IFNvb24=","IGNhbmFkYQ==","KCIiKQo=","XT0n","IEJyYW5kb24=","ZXBzaWxvbg==","cnc=","LmFkZENoaWxk","LkNvcHk=","UHJpbmNpcGFs","UGhvdG9z","IG1hcmdpbmFs","IGJhc2ljcw==","ZWluZw==","TXVzdA==","X1N0cmluZw==","IG9sZQ==","TWFnZW50bw==","LmN1c3RvbWVy","KHByZXY=","4Lil","IGxveWFsdHk=","Q29n","IHByb3RvY29scw==","IENvbXBhbmllcw==","IHRoZW9yZXRpY2Fs","IGFjY2Vzc2luZw==","IFplbg==","Lm9uZXM=","YXR0aWNl","X3dvcmxk","emVz","IHRhdHRvbw==","IG1lbm9z","IGludGVyc2VjdA==","Il07Cgo=","YmVsaWU=","IGluYWN0aXZl","LnJlYWRsaW5l","LWxhYmVsbGVk","LmRvbmU=","bGlja3I=","IFdPUks=","IGRlcml2YXRpdmU=","IGRhdGFiYXNlcw==","4oKC","IHN4","LmlzQXJyYXk=","IHlz","IHBhZGE=","IEJ1bGxldA==","KGAv","aXNBY3RpdmU=","IENHU2l6ZQ==","KGVxdWFsVG8=","IENvbHVtYnVz","IG1hcnJ5","REVW","X2xpbWl0cw==","cm9uZXM=","SUFT","IHRhdQ==","bWlubw==","X1dyaXRl","IFdpbmU=","IFtbJw==","IFB1bGw=","cml0ZXJz","cmllbnRz","IHNoaWZ0aW5n","dXBw","X1RJTUVS","IENvbmRpdGlvbnM=","4bql","IE9yZGVycw==","IFN0cmVuZ3Ro","5omA","IHZhbGlkaXR5","IGZvdA==","ZXR1cg==","IGJvbHQ=","5YaF","IEFsb25n","b3NoaQ==","IGFzc3VtcHRpb25z","IG1hZ2F6aW5lcw==","X1NQSQ==","IHB1bnQ=","X1BST0RVQ1Q=","IHJlbGF5","IEphdmFzY3JpcHQ=","LnRl","LWVz","IHdpZGdldHM=","KGZz","PEl0ZW0=","X2V4dHJh","IHJlY3J1aXRpbmc=","RXQ=","IG5lY2Vzc2l0eQ==","cHc=","IG5vdmVscw==","dXNzZWxz","Q3JlYXRvcg==","IE1WUA==","IE9D","dGhvb2Q=","Y2xpZW50cw==","KSkq","IGNoYXJhY3Rlcml6ZWQ=","X1NFTkQ=","dXRp","VHk=","LmZyb21Kc29u","QFNlcnZpY2U=","44KC","Q2hyaXM=","X0lz","IEpvaG5ueQ==","IGNsZWFuZXI=","IEluaXRpYWxpemVz","VU5L","KGF4aXM=","0LXQtw==","aWV2YWw=","IFdhcnJpb3Jz","fSko","RE1J","4pmA","IFRyZWFzdXJ5","IGZlYXM=","IHNsYQ==","X0VOVU0=","bGhz","IEluc3RpdA==","aXBwZXJz","TGluZWFy","UmVhZGluZw==","cXVpcmllcw==","LWNlbGw=","Y2hyb21l","LlNlYXJjaA==","SU5B","57G75Z6L","IAogCg==","IFNhbXVlbA==","IG1pbGxz","IGRvbmF0ZQ==","IEdlbw==","KHJvd3M=","IHNoZWVw","IMOpbA==","5L2T","IGJlbQ==","X1VOVVNFRA==","IFJDQw==","IGludHJvZHVjaW5n","YXR0YQ==","IFByaW9yaXR5","IEZC","IFNlcmdl","PiI7","YXRjaGluZw==","IEtub3dsZWRnZQ==","CVRoZQ==","O21hcmdpbg==","bGVzc25lc3M=","b3BhcmQ=","dW1hdGlj","KCkpKTsNCg==","IGZhbHM=","KGNhY2hl","VHlwZUlk","6YCa","X2Nob2ljZQ==","IEdvdGg=","IFNpdGVz","TUc=","X2JvcmRlcg==","SW5kaWNlcw==","Q29tcGFyZXI=","IFJlZGlzdHJpYnV0aW9u","IGNsb3NldA==","IHZlcnNhdGlsZQ==","SW5wdXRz","KioqKioqKioqKioqKioqKioqKio=","IG9iZXNpdHk=","cXVpeg==","Z3Jh","KGdsb2JhbA==","5Yqh","IGNvbGxlY3Rvcg==","IGtvcg==","b3ZhYmxl","QURD","IEV2ZW50SGFuZGxlcg==","Lm5j","IHBsYXliYWNr","aWVudG9z","X3Blcm0=","X1dBUk5JTkc=","IE9seW1waWNz","Lm5vcm0=","IEJyb2FkY2FzdA==","X3NtYWxs","ZHJpdmU=","Lmlsb2M=","IHR5cGVk","TUVN","X2NvbnM=","RE1FVEhPRA==","IGx1bg==","LmRpc3RhbmNl","KHBhcg==","cG9vbg==","IGJhc3Q=","YWN0aXZpdGllcw==","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA==","Og0KDQo=","U0VS","KSYm","X2xzdA==","IFBvbGlzaA==","IGtub2NrZWQ=","IGZydXN0cmF0aW9u","YXVrZWU=","IHBob3NwaA==","aXF1aWQ=","X2NvZWZm","5q2k","TGF0ZXN0","IER1c3Q=","VGlwbw==","IG1haW50YWlucw==","IG1hcnNo","aW5jaW5u","bGJs","Q2FyZQ==","IG5laWdoYm9yaG9vZHM=","X2dwaW8=","IEFyc2VuYWw=","RGVt","IFdoZQ==","X2hvb2s=","IGxkYw==","IEhhcnBlcg==","IEJlcmtlbGV5","IGdyYWR1YXRlZA==","UGVyY2VudA==","IGFycml2aW5n","IEFkdmVudHVyZQ==","KHNjb3Bl","KCcq","cXVhcnRlcg==","IE1hcmll","U3BlYWtpbmc=","X2NvZGVnZW4=","IGltbXVu","Y2FzdGVy","44KM","5ZWG","IERpbWVuc2lvbnM=","LnJlY29yZA==","IHRleHRv","IE1pY2hlbGxl","UGVuZGluZw==","KGJ5","X1BBUg==","dWNodA==","YmVl","LlRocmVhZA==","YW1waXJl","a25vdw==","IENsaW5pY2Fs","IG1hcmdpbkJvdHRvbQ==","IGRpc3Rpbmd1aXNo","LkZ1bGw=","LnVuZGVmaW5lZA==","IFNlcXVlbGl6ZQ==","IyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIw==","IGVkdWNhdGVk","X09WRVI=","5bqP","IMKgIMKg","X2VhY2g=","IHVyZ2U=","ZGVwYXJ0","IGRvbm9ycw==","IEF1","IGJpbGxpb25z","IGJlbG9uZ2luZw==","X2FnZQ==","X0ludA==","IHN1YnN0YW5jZXM=","bWFjaGluZQ==","ISEhCgo=","IGpzb25pZnk=","aWJiZWFu","IENhZA==","IGVuZFRpbWU=","IGN5Y2xpbmc=","IFVJVGV4dEZpZWxk","IGxldmVyYWdl","IHZhbmlsbGE=","ZWF0","TGF1bmNo","KHB0","c3RhdGVz","IENvbnRyb2xz","IFJlc3BvbnM=","IEpha2U=","IGFzbGVlcA==","Zm9ydHVuYXRl","Lm5leHRMaW5l","U2l6ZU1vZGU=","7J28","VGVzdGluZ01vZHVsZQ==","R2VybWFu","IEludmVzdGln","LnJldmVyc2U=","IEJBQ0s=","KERhdGVUaW1l","IG5vbnByb2ZpdA==","IEV4cGVjdA==","IHRhbnRv","J10pLA==","CXRoZQ==","TXVsdGlwbGU=","KGdldEFjdGl2aXR5","X1dBSVQ=","IGrDoQ==","ZGVjb3I=","bGV2YW5jZQ==","IEdpdEh1Yg==","bWluYXRpb24=","X3F1YW50aXR5","LlNjYW5uZXI=","IExpb24=","6ZSZ6K+v","IGRyZQ==","IHRhbnRyYQ==","IGNvbnRlbnRUeXBl","IGZpZA==","X2FsdA==","TlNJbmRleFBhdGg=","LXBs","5YyW","IGFudGliaW90","dGFibGVz","YWNpYWw=","IFJlZ2lzdHJ5","IG9saXZl","aWdlcnM=","IHN1YnNjcmliZXI=","X3ByZXM=","IFN5bnRheA==","IGxvdmVycw==","LkJ5dGU=","b2xkZXJz","X2ZvcndhcmQ=","YWx3YXlz","Q2FwdGlvbg==","UHJpdg==","IFRhbXBh","aXNhdGV1cg==","LWxhYmVsbGVkYnk=","IFRvU3RyaW5n","IOyCrA==","IGluaXRpYXRlZA==","V0Y=","IGluc3RpdHV0aW9uYWw=","aW5qZWN0","IFNjcg==","IGRvY3RyaW5l","IHNwYWNpb3Vz","aXN1cmU=","IEFuYQ==","InRpbWU=","ZXNzYWdpbmc=","IGNpZA==","IE5hbg==","IGluY29tcGxldGU=","VEFH","LWJ1aWxk","RGVjZW1iZXI=","IHJlc2lkdWFs","KFBETw==","IExpc3Rlbg==","IGdseXBo","IGdhcHM=","bmVh","LlJlY3Q=","IHNhdQ==","IFBob3RvZ3JhcGg=","IGV4ZWN1dGFibGU=","IEV4cGVydA==","Q29yb3V0aW5l","X3NpemVz","IE5M","LmlzVmFsaWQ=","KTt9Cg==","LXJlZw==","IGNpdGluZw==","Y3dk","IE90dGF3YQ==","IEJhdHQ=","IHJlbmV3YWJsZQ==","IHByZWxpbWluYXJ5","IGFzeWx1bQ==","IHdyaXN0","IHV0aWxpeg==","IGRldGVudGlvbg==","RmFzdA==","IGFuZ2U=","aW5jaW5uYXRp","IHN0ZWVyaW5n","IE5hTg==","aW9zaXR5","L3BhZ2U=","IOi/","c3Rlcm9s","IGRpc2c=","KERC","IERFU0NSSVBUSU9O","IF8k","IG9ic3RhY2xl","IGJpemFycmU=","IGV4dHJhY3Rpb24=","X2V4cGVjdGVk","IGxvc2Vz","IENlbGVicg==","IGh0bWxGb3I=","IGV4cGxvaXQ=","0L7Qu9GM0LfQvtCy","WFla","IG1hZ25ldA==","YW1wZWQ=","IGF0b21z","U291cmNlcw==","cGVjdGl2ZXM=","0YHQu9C4","ID0NCg==","IGRhcmU=","IFdhbHRlcg==","IGJyaWdodG5lc3M=","IGFubm90YXRpb25z","648=","aXNrZQ==","U2NoZWR1bGU=","LmltYWdlcw==","cm9zc28=","ICIuLg==","Z2FtbWE=","IGluc3RydWN0b3I=","IG92ZXJ3cml0ZQ==","LWFt","IGRldmFzdGF0aW5n","IFNhaW50cw==","IGhz","IGJvbnVzZXM=","JG91dHB1dA==","aWpk","KEFjdGlvbkV2ZW50","bW9uaXRvcg==","IG1hdHRyZXNz","SmFudWFyeQ==","Lmpw","IGNhcmFjdGVy","IGltcG9zZQ==","X3Jlc3Q=","IFNpZ25hdHVyZQ==","IGNvcm9uYXZpcnVz","44GK","X2NvbXBhcmU=","TWVhc3VyZQ==","aXRhdGVk","ZWxpams=","aWdvcw==","ZXNhcg==","IHJ1c2hlZA==","bWV0cnk=","X1NFUEFSQVRPUg==","X1dF","X0FUVFJJQlVURQ==","IHlhbWw=","IHNwZWNz","IFJhaA==","cGhlcmlj","IEludmVzdG1lbnQ=","w6RsbA==","IGFwcGVhbGluZw==","IHZpZXdwb3J0","56k=","IG1hcmdpbkxlZnQ=","IHN1YnRyYWN0","IEVESVQ=","CUFycmF5TGlzdA==","Z3JhZGluZw==","IEZhaWx1cmU=","YXNwZXI=","RUVL","KG5vdw==","PG9iamVjdA==","IEFsaWdubWVudA==","cGxlYWRv","cXR0","KEVSUk9S","IElOVkFMSUQ=","IHVzZXJpZA==","cmFpc2Vz","SURJ","IHZhcmlhbmNl","IE5pbA==","L2RlbGV0ZQ==","X01BSU4=","LlRva2Vu","LkNhdGVnb3J5","PikK","Q29sbGlzaW9u","IEdyZWF0ZXI=","IFJhY2luZw==","YWxhbg==","IG1vbmV0YXJ5","LG5ldw==","IFNvcnJ5","LkVuYWJsZQ==","IEluc3RhbnRpYXRl","b2xsZW4=","66m0","IENhbGxpbmc=","X2hvdXI=","QURB","IHNoeQ==","KSoq","ID09Pg==","IGVzcGVjaWFs","IGludGVycHJldGVk","IT0i","IHBoYXJtYWN5","LnNpbmdsZQ==","IENpYWxpcw==","IHBhcmFz","LnRvVXBwZXJDYXNl","IERlbW9u","UHJpbWU=","IHJhbmtpbmdz","QWRkaW5n","X0hBU0g=","IEV4YW0=","2qk=","IFZpY3Rvcg==","T2theQ==","Il07DQo=","IGZvcnR1bmU=","IEZFVENI","ZXhwYW5k","LkludGVyb3A=","IGJhcm4=","5raI","dWV2bw==","IHNwZWN1bGF0aW9u","4pSA4pSA4pSA4pSA","IE51","IEJsdWVz","KGZuYW1l","IGluaGFiaXQ=","IFwiJQ==","Q0VT","dWxhcmlv","X2Ny","IHZhbGlkYXRlZA==","IG1pZG5pZ2h0","YW5raW5n","IGluY29ycG9yYXRl","IHB1cnN1aXQ=","RVhQ","cHJpbWU=","UGlk","LVVT","IE51cnM=","IFdoZWVs","6Zg=","IGlucA==","IHN1cHBvcnRpdmU=","Lm1lbWJlcg==","IFNob3Q=","LkNoZWNrQm94","IGFmZmlybQ==","VG9y","RnVsbFllYXI=","IGNvbnNpZGVyYWJseQ==","Y3JlZGVudGlhbHM=","X29wdHM=","Um9sbA==","KHJvdW5k","IGNvbWVudA==","X1VBUlQ=","IGV4dGVuZGluZw==","Ukc=","cmVzdWx0YWRv","aXR1","LmdldFNlc3Npb24=","IGF0dHJhY3Rpb24=","JkQ=","JGh0bWw=","IEplc3NpY2E=","IEFzc29jaWF0ZQ==","YcOx","X2Vk","IExhZw==","IG9yaWdpbnM=","KCkpLT4=","YWRkRXZlbnRMaXN0ZW5lcg==","SUFMT0c=","5ZCm","LkNvbXBhcmU=","QWxidW0=","IEt1","PFE=","YXJnZXN0","IHByb2xvbmc=","IGNvbmZpZ3VyYXRpb25z","IGFjY2lkZW50YWxseQ==","X3Bob3Rv","ICcnOw0K","IHZlcnNl","Qm9i","IGZhcm1pbmc=","ZGVsaXZlcnk=","IE1hY2s=","IHVzZVNlbGVjdG9y","LmJvb3RzdHJhcGNkbg==","a2VlcGluZw==","ZW55","LnVwbG9hZA==","IE1FVEhPRA==","Y3JlYXRvcg==","PF8=","IEVhc3Rlcg==","Li0t","VUlCdXR0b24=","44KJ","b21ldGVycw==","IHNoaW5l","IGhvZ3k=","XHM=","IGhhcm5lc3M=","LkNlbGw=","IGxpZnRpbmc=","IGNvbWJpbmVz","IE9jY3Vw","ZXhjbHVkZQ==","cGF0aWFs","IHJlc3Bpcg==","X2ZpdA==","IGZpZnR5","IE1vbA==","IHR1bmVk","LWRpbWVuc2lvbmFs","IHFz","IHRvcHM=","PiI7Cgo=","cXVpc2l0ZQ==","Y2hhbm5lbHM=","L3Jlcw==","IEFuYWx5dGljcw==","LmFwcGNvbXBhdA==","L3Rv","IG9uRXJyb3I=","KGF0dHI=","SVJN","IHJhZ2F6","LWFz","LlNlY29uZA==","b3JpZW50ZWQ=","IGRvbm4=","IGxpZ2h0bmluZw==","Zmlk","IFBsZQ==","44G+44GZ","dHJv","LlRydWU=","T2JzZXJ2YWJsZQ==","15k=","dW1iaW5n","IHByb3NwZWN0aXZl","LWZpbHRlcg==","IHB1cnN1YW50","KHBvaW50cw==","LkJpbmQ=","IHBhbG0=","Y2xlYXJmaXg=","w7Zz","IEdvbno=","IHdlYWtlbg==","RHJpdmU=","ZW5pZG8=","bGxk","b2JveA==","YW5lYW4=","R290","5L+d","UmVnZXg=","5oM=","IHNhbGFk","YXNzaXM=","Im5ldA==","aW5oZXJpdERvYw==","IFJW","cXVpZXI=","IGNsYXp6","xLHFnw==","b3N0ZXJvbmU=","IGFpcmxpbmU=","Lmxpc3RkaXI=","IGRvd25sb2FkaW5n","IFBhbG0=","d2F1a2Vl","Jmx0","LkJM","X0lOTElORQ==","b2Zmcw==","PDwo","X25ld3M=","IGNoYXNl","Lz48","IGV1cm9z","IEVneXB0aWFu","IFN0YWlubGVzcw==","X0JPT0w=","IEd1aWxk","IER5bmFt","W2luZGV4UGF0aA==","IO8=","IG1lbW9yYWJsZQ==","IENoYW1waW9u","UmVzb3VyY2VNYW5hZ2Vy","LkxvZ2lu","IEZvcm1lcg==","eXBlZA==","IGxsZWc=","OyIs","RFdPUkQ=","IHRheGk=","IGJvbWJz","cmFo","LnRhZ3M=","X3Rlc3Rz","c3RvbmVz","4oCdKQ==","W2c=","cnR5cGU=","IHZ1","IGhvc3RpbGU=","Q2hhcnM=","IFBhdHJpb3Rz","L3N0YXR1cw==","PEI=","IEluY29tZQ==","IERhZA==","IHBhdHJvbA==","X0NIQU5HRQ==","IHVwZ3JhZGVk","IGNoaW5h","c2V0cQ==","U3RhcnRlZA==","LlVuZGVm","IGNoZWNrc3Vt","IGZydXN0cmF0ZWQ=","e28=","IGVuZg==","IHdvb2Rz","IEFueW9uZQ==","RW5jb2Rl","IFF0V2lkZ2V0cw==","YXJlYXM=","IHNoZWVy","c2tp","ZW5kcG9pbnQ=","X1Rlc3Q=","U291cA==","fn5+fn5+fn5+fn5+fn5+fg==","KGZpbGVz","CQkJCQkNCg==","LnNwYXJr","IHZhbHVlZA==","ICUK","LmNvbnRyb2xz","IFhDVEFzc2VydEVxdWFs","IGZhbWU=","IFJpYw==","RE9U","IEFsYmVydGE=","5L2/","b3NhbA==","LldlYkNvbnRyb2xz","IC0tLS0tLS0tLS0tLQ==","IE1pcw==","IFNZUw==","Tm9ubnVsbA==","PWl0ZW0=","IGV4cGlyZQ==","RGVjb2Rl","X29wZXJhdGlvbg==","IFZhbGlkYXRvcg==","LkNFTlRFUg==","dWZmcw==","Km0=","IGF2YW50","5qyh","4oCcWW91","LnBlcm1pc3Npb24=","Li4uKQ==","IExpYw==","X2Nvb3Jkcw==","Lm5vbWJyZQ==","Y2xv","LkludGVybmFs","IENobw==","X3N3","CUls","Y2xr","IGNhc3RsZQ==","KGxheWVy","cGl0","IGd1aWRlZA==","IOKWiA==","IHN1cGVyYg==","IHN1cHBsZW1lbnRz","X2NlbnQ=","IHBlZWs=","SU5BUlk=","LkNvbnRlbnRBbGlnbm1lbnQ=","ZmFsbHM=","IikpOw==","V2FsbA==","KS4NCg==","IERhbm55","aXJtaW5naGFt","SUFMSVo=","KGNyZWF0ZQ==","Iklu","U2VydmljZVByb3ZpZGVy","IHByaWNlZA==","bWFjcm8=","YW1hYw==","LmJveA==","LS0tLQo=","44Or","IFN1aXQ=","dXJzdA==","YnJ1","b3VybmFscw==","bnVtZXJv","X18oKQo=","RGFz","IE1pdHQ=","dWRlcg==","P1w=","ZnU=","W0I=","IDopCgo=","KGludGVy","YnJhaW5z","IGF0dGl0dWRlcw==","VmVyaWZ5","IHNpZ25hdHVyZXM=","YWNrQmFy","IGdk","SmFjaw==","LmNhdA==","IHp6","d2FyZg==","RlRFUg==","Iik7CgoK","QWxpdmU=","SUNMRQ==","IFdoYXRldmVy","IG91dGxpbmVk","c3ByaXRl","0LXQsg==","X0FC","X0RFUFRI","IGNydXNoZWQ=","YWFh","KGV2","5py6","QW50aQ==","SUNP","aXNFcXVhbFRv","LnN1bg==","aWN1bG8=","c2FsZQ==","X2hleA==","IFZr","YXB0b3I=","VW5pb24=","IERpc2NvdW50","bGlzdGE=","LlVuZGVmT3I=","IGF1dG9tYXRpb24=","Tm9y","5a+5","5Y+C5pWw","IHJlZmxleA==","IExhdXJl","LnNob3dNZXNzYWdlRGlhbG9n","LnRlbXA=","IGFrYW4=","IF9fX19fXw==","LklzVHJ1ZQ==","QVJFRA==","YWdsZQ==","RW5lcmd5","IHF1YW50aXRpZXM=","4oCZw6k=","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA==","IGNpdGl6ZW5zaGlw","bW91dGg=","IGluYXBwcm9wcmlhdGU=","IE91dGRvb3I=","V2hpdGVTcGFjZQ==","QW5vbnltb3Vz","bG9hZHM=","d2ViRWxlbWVudFByb3BlcnRpZXM=","VGVu","IGFjY2lkZW50cw==","IGFkdmVydGlzZW1lbnQ=","IFllbWVu","KGNhbGw=","IHNsYXZlcnk=","0YHQvw==","IExhbQ==","X0JJVFM=","b21lZ2E=","IE9sZQ==","IGtpZG4=","X0Fu","IFJhaWQ=","Q3JlYXRpb24=","c2F2ZWQ=","IHByb3BvcnQ=","V0FSTklORw==","XFA=","IHB3ZA==","RGF0YVJlYWRlcg==","aXNjaGVy","YWRlb24=","IFByZWRpY3Q=","IHJlYXNvbmluZw==","IGRlc3Ryb3lpbmc=","SGVs","KmQ=","IExlZ2lzbA==","X1By","CQkJICAgICAgIA==","IHN5bXBhdGg=","IGNoZXNz","IG1hbQ==","OmhvdmVy","IGNvbnZlcnRz","IHBlbGE=","IHByb2dyZXNzaW9u","ICJfIg==","IEdpbGw=","CXNob3c=","IHN1cHBvc2VkbHk=","YWNjdXJhY3k=","ZWxpbg==","IHVuZm9sZGluZw==","IEh5cGVy","IHdhbm5h","IHVwcw==","KCM=","IENyaW1pbmFs","KFBvaW50","YXRMbmc=","YWN0bHk=","IGNvbnRyYWN0b3Jz","J119","ZHJhdWxpYw==","w7NkaWdv","IFRU","IFdpZGU=","IEFSRw==","X2lj","RkxBR1M=","U2Nob29s","IGNsZWFyaW5n","LWJlaW5n","PXtb","LGNvbnN0","bWFuZW50","T3ZlcmxheQ==","KCci","6YeP","IFRpbWVzdGFtcA==","IG1haWxpbmc=","IENha2U=","LlRoYXQ=","IG1lZGl0YXRpb24=","cXA=","IGVtcHJlc2E=","IExpb25z","IHdlbGQ=","IExpbmtlZElu","IGN1c2g=","IGdlbm9tZQ==","LkluZGV4T2Y=","YWdhaW4=","IGZhbGxiYWNr","IGNhbXBpbmc=","cmVkZA==","LXN0cmlwZWQ=","IGR2","RmVicnVhcnk=","IFByb3h5","dXNr","IGRpZXNlbA==","V1JJVEU=","UkVBSw==","TG9yZW0=","Lkludm9rZQ==","LWRpdg==","SW50ZXJjZXB0b3I=","IERI","aWFsZXM=","IHZpbGxhZ2Vz","2LQ=","IEVOVg==","U3lz","LlhS","IHBvZW0=","w4I=","Y2FkZQ==","cGxvdHM=","IHso","LmdpdA==","L3N2Zw==","bmNtcA==","IMSN","YWluZXM=","5Ye95pWw","ICgpCgo=","b3BzaXM=","IFJlbGF0aW9uc2hpcA==","X2F1dA==","IEJvbWI=","CWNvbQ==","KnNpemVvZg==","b2ZmaWNpYWw=","X3BheWxvYWQ=","CQkJCQkgIA==","Lm1hbmFnZXI=","IEFyb3VuZA==","CXNlbmQ=","IEV4ZXJjaXNl","IEJpbGx5","aXZp","IG5lZWRpbmc=","X3VybHM=","X3Rhc2tz","IEhlbQ==","IHRlYXJEb3du","ZW5jcnlwdA==","LnRpZQ==","IGFzbQ==","SUNI","IENHUmVjdE1ha2U=","7ISx","dWxvbmc=","IGl0cg==","IEdTVA==","IG9mZmVyaW5ncw==","cm9iZQ==","RUVF","b3BlcmF0b3Jz","X1BST1A=","aW5kZW50","QURF","b3Jm","65A=","IGJsZXNzZWQ=","dmFzY3VsYXI=","IGNvbm9j","SGFwcHk=","QnJpZGdl","aWxpdGF0aW9u","am9pbnQ=","IEFkbWluaXN0cg==","LXRyYW5zZm9ybQ==","IG1lYW50aW1l","L0s=","IEJlZHJvb20=","IHJpZ2lk","IGJyb3dzZXJz","RU1QVFk=","LlNlcmlhbGl6ZQ==","X0VE","IHN0aXRjaA==","IGphbg==","ZWxsdA==","IGJyYWNl","IHRyYWlscw==","cHVibGlzaGVk","5a+G56CB","fScpCg==","IGFjaWRz","ICEhIQ==","X2RpcmVjdA==","PigpKTsK","YWrEhQ==","X09DQw==","IHBsYW5ldHM=","5p+l","IER1Ymxpbg==","IHNlcmll","LnByaW50Zg==","ZGVlcA==","YCk=","IFwk","IM68","X1ZJREVP","ZW5kb3Jz","IENyeXB0bw==","RmFy","LlRyYW5zcGFyZW50","LlRS","aWFzbQ==","X3RyYWluaW5n","IHRlYWNoZXM=","IEJlbHQ=","IGxpbWl0aW5n","IEthdGg=","IEluZGV4UGF0aA==","IGFjaGlldmVtZW50cw==","IHNlcsOh","aW50ZXJvcFJlcXVpcmU=","IGRpc3Nl","Lklm","YXJtaW5n","dWxzaW9u","UG8=","X0RFVEFJTA==","UHJvdG90eXBl","IENBTA==","IGFncmVlcw==","LnZv","LkV4ZWN1dGVOb25RdWVyeQ==","IFRvcGlj","ICd7fQ==","QXJt","IGVjYw==","TWFn","IHNlcmlhbGl6ZWQ=","CWNvbm4=","Y2FjaGVk","PXRm","IEJ5dGVBcnJheQ==","cHJvdG9idWY=","dmFyY2hhcg==","CUFTU0VSVA==","IGxpc3Rl","X3RyaWdnZXI=","t7g=","RmVlbA==","VGFob21h","IExpaw==","IHN0cnVjdHVyZWQ=","ZXJndXM=","LkluaXRpYWw=","X2dl","Y2xqcw==","LmNvbnRhY3Q=","IGFuZGVyZQ==","JHN0bXQ=","X0NVUlJFTlQ=","IERpc2NvdmVy","JHJlcw==","Zm9ybWF0dGVy","SGE=","dmFuZ3N0","IGVtZXJnZQ==","44CC4oCd","IENhYmluZXQ=","LXNxdWFyZQ==","6YOo","IHJhZ2U=","IEFK","IFZU","c2hhZG93","IEZhaXRo","ZW5hbWVz","cHJldHR5","aGFzaWw=","cGFydHk=","IHZhcmNoYXI=","IGZvdG9z","IGFsdW0=","IEJlbGdpdW0=","LnlsYWJlbA==","IGRlag==","X251bWJlcnM=","IGh1","LnNldEFkYXB0ZXI=","IFVzdWFsbHk=","KHNhbXBsZQ==","LlNoYXJlZA==","IGJvb2tlZA==","ID4+PQ==","IG1pbmVyYWxz","Ij48Pz0=","IGFkanVzdG1lbnRz","IERM","IHZpYnJhbnQ=","IERlcGVuZGVuY3k=","IHphcA==","L1g=","IGZvbnRz","dHJpcA==","0LjRhw==","IHR1YmVz","Y2xhbWF0aW9u","IOun","IHByb3RhZ29u","b3Vwb24=","IEJydXNo","KHByZWQ=","b3VybmV5","J10pLT4=","cHJvZw==","Ym9v","X21k","X3BhY2s=","KGV4cHJlc3M=","dXR6","XEF1dGg=","LGlk","IENoaWxl","YWN0aWNl","IHJlY3J1aXRtZW50","IHBvc2Vz","IHZ1bG5lcmFiaWxpdHk=","aW5zdGFuYw==","b3J1bQ==","ZGVzcw==","IHhs","JSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSU=","KGZpZw==","IGRlbGV0aW5n","LmRlbA==","KScpCg==","IFdlZWtseQ==","Pz8/","KHN0cmNtcA==","c21pdGg=","IHB1cnN1aW5n","LXNv","IEFwcHM=","LycK","IGRlY2lz","Rk9SRQ==","RXZlcnlvbmU=","IGxhbmVz","VmlydHVhbA==","LmF0dGFjaA==","KExvZw==","IE1lZGljYWlk","KFBhdGg=","IFR1cm5lcg==","L2FwcGxpY2F0aW9u","IHBvcnRyYWl0","IG9wcG9zZQ==","Y2hlY2tvdXQ=","IGZpbmlzaGVz","X01F","QmFycmllcg==","U29uZw==","VkFS","RWFybGllcg==","cmVsbGE=","IGhhc3Q=","YXphcg==","IHB1bGxz","bmd4","IGluc3BpcmluZw==","0YPRjg==","LWRpcmVjdGlvbg==","IGV4cGxvc2l2ZQ==","IGNyZWF0ZWRBdA==","c3Rv","IHdoZWF0","IEJ1aWx0","J2Fp","IHRyYWNrZWQ=","aGFtbWFk","Um93QXRJbmRleFBhdGg=","X2hlYXA=","RHVl","IGNvbm5lY3Rz","LnB1Ymxpc2g=","ZW11","IGJ1bGxldHM=","QkFS","b2xhdGU=","IGludGVybmFsbHk=","IGNhdGNoaW5n","LXBhc3N3b3Jk","b3VjaGVk","5oCn","ZW91cw==","IHhyYW5nZQ==","UXVhbGl0eQ==","dnY=","TWFuYWdl","KCgk","YWNlbWVudHM=","IEJyb3RoZXJz","IEhFQUQ=","IFVuc3VwcG9ydGVk","c2Fu","ZXNp","KioqCg==","IGFkYXB0YXRpb24=","IFdvcmtlcg==","J10v","LnNhdmVmaWc=","KHRyYW5z","2Kw=","bmVl","Q29ycmVjdA==","Li4uIikK","IHN1Ym1pdHRpbmc=","LXBhdGg=","CWxhc3Q=","aXNzYW4=","LnhsYWJlbA==","IFNlcGFy","L25v","X2Jlc3Q=","IE1pbGxz","X3NvY2s=","KGZsYWc=","IGRlc3RpbmF0aW9ucw==","ZW1wdGlvbg==","IEZBSUw=","5ZKM","IHJw","ZmFjdA==","CWxlbg==","REFZ","IHNlaXo=","X2RzdA==","bGlw","LkxpbmVhcg==","IEJhc2tldA==","JHQ=","JGk=","LWJyYW5k","IE5laWw=","IEVx","IHRob3U=","b2dlbmU=","IHNjaG9sYXJzaGlw","5pu0","IHN3bw==","YWdpbmF0b3I=","ZW5p","KGJvb2s=","IGJsaW5r","dGh1cw==","IGNhbmNlbGxhdGlvblRva2Vu","IFBhbGVzdGluaWFucw==","IHByb2ZpdGFibGU=","IGJhY2twYWNr","ZW5zb24=","PExvbmc=","IHBvb2xz","IHN0aWNrcw==","IHNwb2tlc3dvbWFu","QmVpbmc=","IEhlcml0YWdl","IE5pa2U=","U0hB","IE5vdEltcGxlbWVudGVkRXhjZXB0aW9u","JGNvcmU=","IFJpY28=","L2xhdGVzdA==","IEN6ZWNo","bmVyUmFkaXVz","KGxpbmVz","IHNlbWVzdGVy","IHdvdW5kcw==","UHJvY2VkdXJl","Lm1haWw=","KCkpOgo=","IGNvcnJpZA==","dGVyZWQ=","IE5DQUE=","IGdhbGF4eQ==","X2tpbmQ=","aWxr","IHRyYXM=","X1BPTA==","IEhldA==","IHJlZnVnZWU=","IHRlZW5hZ2U=","LmJpbmRpbmc=","cG9zdGFs","IGnDp2lu","IERhdGFUeXBl","6ZY=","eWNsZXJ2aWV3","LHZhbHVl","X2lkZW50aWZpZXI=","PGI=","IG91dGZpbGU=","DQogICAgDQo=","IGNyw6k=","IHJlc3BvbmRlbnRz","IEJlYXN0","Y2VsZWQ=","IGludGVyZg==","LXRoZW1l","Z2lm","IFJhbmdlcnM=","SVRBTA==","IGF1dGhlbnRpY2F0ZQ==","Q29tcGxldGlvbg==","dXJzb3Jz","IGNpbmVtYQ==","IGRpc2NvdXI=","IEphdw==","T0NLRVQ=","IHByYXllcnM=","IEx1aXM=","ZnJhZw==","PVsK","IGJyYXZl","X3Bvc2U=","Q2VydGlmaWNhdGU=","LWZl","aWZlcmF5","IEZsYWdz","Q29udGFpbmVyR2Fw","IENyaXQ=","UmVzdWx0U2V0","CWN1cg==","IGNvcnJlc3BvbmRz","U3RhZmY=","Lkh0dHBTZXJ2bGV0UmVxdWVzdA==","IG5ldXJvbnM=","IE1haW5BeGlzQWxpZ25tZW50","ZWRhcg==","IGdhZA==","X3BhcnRz","IM6y","IGZ4","L2ZpbGVz","IEJyb3M=","aGlwcw==","IGdsdWNvc2U=","IGZhcm1z","IG1lbnRhbGx5","cmVzdGF1cmFudA==","VGFibGVOYW1l","IE1lcmNlZGVz","LlZpc3VhbA==","IGFuY2g=","aW5hbGc=","X3J1bnRpbWU=","IHByb3ByaWV0YXJ5","IGludGVudGlvbnM=","aXpp","U2xpY2U=","OyI+PC8=","X1dPUkQ=","XE1pZ3JhdGlvbnM=","IEVOQUJMRQ==","X1BBUkFNRVRFUg==","IEJpc2hvcA==","LnN1YmplY3Q=","aWxsYXM=","Lm1hdHJpeA==","dXJyZW5jZXM=","Knk=","IGNvc3RseQ==","IENodWNr","IGNsb3Nlcw==","IE1pZ2h0","LXN0b3Jl","IG1hbGw=","aWV0ZW4=","LkFicw==","IGNvdXBsZWQ=","LmJhc2lj","IDo6Ojo6Ojo6","TWFrZXI=","Y2Fubm90","IGFjaA==","IEVsaQ==","4oiS","b3JuYQ==","IGNwcw==","IHRoZXJlb2Y=","IEB7","IE5TTXV0YWJsZUFycmF5","zr0=","cHJvZHVjdGl2ZQ==","U3F1YXJl","dGVtcHRz","IGVsaW1pbmF0ZWQ=","PE0=","IGNvbnNlcnZhdGl2ZXM=","IFN1cmc=","LnBhcg==","IEJ1Y2g=","KmI=","Rm9ydA==","Q29sb3Vy","IENoaQ==","ZWRpYw==","PnRydWU=","IE5ZQw==","IGJvcmVk","IERldGVjdA==","IGFwcGFy","IGplYW5z","IFRhaw==","SU9E","IEhvcnNl","KEZJTEU=","KD8=","cmlxdWU=","b3B0aW1pemVy","bmF0","bG95cw==","CVRva2Vu","b3VidGVk","dWVzcw==","b2NvYQ==","RGF0YU1lbWJlcg==","X1BPV0VS","Y2xhc3NMaXN0","UHVzaEJ1dHRvbg==","IFdpRmk=","LlN0cmVhbQ==","Lmd1aWxk","IG5vZw==","IFBvcnR1Z2Fs","IFVudGVy","UHJpbWl0aXZl","Ym9zcw==","IERldXRzY2g=","IGVyb3RpYw==","IHN0cmNvbnY=","LlRyeVBhcnNl","IGdyYW1z","LlN1Y2Nlc3M=","X3Br","IEhhcnZleQ==","LW1pbmRlZA==","LmNvdW50cnk=","W10i","IGFuZ2Vs","IGJlYXRz","IFZvcg==","aWxpbw==","Lm1hc3Rlcg==","c29tZXRoaW5n","IFBBQ0s=","KGlm","UmVxdWVzdEJvZHk=","IGFudGVz","L3dpZGdldA==","IG1vZG8=","IEFX","ZmluZGVy","IG9wdGltaXplZA==","IG1pc3NpbGVz","TkI=","CWludGVybmFs","dGV4","IFNyaQ==","IGRhbWFnaW5n","IE1haXM=","LUFsbG93","IFpo","LWFsdA==","ICkpOwoK","6Ik=","IGluZmx1ZW5jZXM=","IGNhdGFs","X1JFR0lTVEVS","IEFQSXM=","LWNlbnR1cnk=","IGJpb2xvZ3k=","IEFjdHVhbA==","IGhlZWxz","VFJBQ0U=","X0RJRw==","RGF0YXNldA==","IE1hdHRlcg==","IGNsYXNzaWZpZXI=","Lndpa2lwZWRpYQ==","IFJvZ2Vycw==","IGRvbmF0ZWQ=","cmF3bGVy","ZW5lbg==","IGNhc2lub3M=","b3J0YWw=","IHByaXZl","c3Bl","ZHVjZXJz","LmVw","IGdyYXNw","YWNqaQ==","IGRhaXJ5","IGJ1c2Vz","LmNvbW0=","Lmlucw==","IElSUw==","IEJlZXI=","YWRj","b2FyZA==","X01FVA==","ICcrJw==","cmFucw==","IGtpbmRh","IOKUgg==","IE1hdXI=","0LDQsw==","IGJhbmR3aWR0aA==","aWJ1cw==","IERpZmZlcmVudA==","KG1hdA==","IFJlc3VtZQ==","X1VOUw==","ZXN0YWJsaXNo","IGZvbmN0aW9u","U3Vic2NyaXB0aW9u","X2NvbXBhbnk=","IGxpZ2h0bHk=","LmNvbmZpcm0=","LnlhbWw=","IEJvb3N0","Q29tbWVyY2U=","LXRlbXBsYXRl","X0RFTEFZ","IEhJ","IG5hdmln","KFNlbmRlcg==","IEhT","XyIr","IFJFUVVFU1Q=","IHdpZmk=","PSIiCg==","XSktPg==","IHJvcGU=","IHZpb2xhdGVk","IGdsYW5jZQ==","IEt1cmQ=","IOiu","ZGVjaw==","IElTQk4=","IGluZmVjdA==","IEZvbw==","IGdldHRlcg==","IHRlbmVy","YXBwZQ==","Lmho","X2hvdA==","PEFN","cG9seQ==","ISIsCg==","IGNvbnZlcnRpbmc=","IFdXRQ==","Uk9T","KCd7","Q29tbWl0","KUw=","IE9yZQ==","IHNwYXJzZQ==","IGRpc3Bvc2Fs","IGNhbmNlbGVk","5ZCO","IGFlcg==","IHZpbnls","4buD","cmVjb2du","YXJraW5n","IHRyaWNreQ==","KnM=","IHByb2NlZWRz","IGlzbw==","IGNvY29udXQ=","IGNyYWZ0ZWQ=","SUVMRFM=","IHF1ZXN0bw==","IGNvbW11bg==","X0NPTk5FQ1Q=","IHRyYWZmaWNraW5n","RGVlcA==","YcOnw7Vlcw==","Y29kaWdv","dmVhdQ==","IGJldHJheQ==","aW50YQ==","VEVE","w6Zy","bWFydA==","X0JVUw==","L3Nj","aWFsbHk=","IGNpZ2FyZXR0ZXM=","6K+B","KG5u","IG1vZGVsaW5n","L3Byb2R1Y3Rz","d2Fybg==","IG1ldHJv","IEl2","Jik=","IENhYmxl","zrs=","Q29tcGFyaXNvbg==","Z2FyeQ==","IEJB","UEFSVA==","IHB2","X3VwZGF0ZWQ=","Q3JlZGl0","b3J0aHk=","b2JzZXJ2YWJsZQ==","IHRoZWF0cmU=","QkxF","O30KCg==","bGF1bmNo","X3N0cmluZ3M=","dWdv","IFJQRw==","LWF1dGg=","0KA=","aG9sbQ==","IFBhbmQ=","VWlk","IGltcGx5","7Jy8","J109Jw==","L1VzZXI=","IHN0cmNhdA==","0L3Ri9C5","RGF0YUFkYXB0ZXI=","IGxhbmRzYw==","IGRpcGxvbWF0aWM=","77yT","KioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKg==","IENoaWNrZW4=","IGJjcnlwdA==","LkluZg==","W2NvbA==","IFF1YW50aXR5","LXBvc2l0aW9u","IGRpZXRhcnk=","IGZpbG1t","SXNyYWVs","UHJldg==","IE1pbGxpb24=","IHJlbWVk","IGJpbGxpbmc=","IG91dGRvb3Jz","LnRt","IG5hZA==","Rm9yZw==","Wlo=","IHNzbA==","XSwn","S1Q=","ZnJlcQ==","PWRvY3VtZW50","Ymx1cg==","rLg=","IEplZmZlcnNvbg==","Q3M=","KHNhdmU=","IHN0cmFw","SW5kaWE=","IGlkZW9sb2d5","Qk9TRQ==","IEZQ","KGFucw==","IGZldmVy","IFlhbQ==","S2luZw==","4LI=","QVRJTkc=","Ym9oeWRy","cm9sbGJhY2s=","IG5ld05vZGU=","IE5WSURJQQ==","IGhvbm91cg==","IENvbmZpcm0=","eGJk","IHN1Y2Nlc3Nvcg==","L3U=","bGl2","b3VybmFtZW50cw==","QXR0YWNobWVudA==","IGdydXA=","IHRyaWJl","IGNhcmVz","ZWZ0","X3NhbWU=","J2xhYmVs","IOOAkA==","TW90b3I=","IGluZXhw","ICIoIg==","X1BPU0lUSU9O","IHZhbGxleQ==","IFJlc3VsdFNldA==","IHByZXNlcnZlZA==","IG11dGF0aW9ucw==","IHF1ZXN0aW9uaW5n","bXVuaXRpb24=","cGFyc2VJbnQ=","IFNy","IE1ldGFkYXRh","4oCd77yM","dGltZXN0YW1wcw==","IHRyYW5zaXRpb25z","7Zk=","0Yo=","aW9t","LkRv","IHBpbmU=","IGZ1bmc=","IHRyYW5zbWl0dGVk","Y3RpbWU=","IEZhbQ==","UmV2aXNpb24=","QmFz","VVBFUg==","RGVzdGluYXRpb24=","dG9IYXZlQmVlbkNhbGxlZA==","IHVuZm9ydHVuYXRl","SU5FUw==","X3Byb2Y=","QW1vbmc=","IEN5YmVy","IEJhdHRlcnk=","Z2VucmU=","IFZpZXdNb2RlbA==","LT0=","IHV0aWxpemVk","cGFpbnQ=","LkludGVnZXJGaWVsZA==","ZXJuaXR5","Y29tcGlsZXI=","4oCLCgo=","IE1hc3RlcnM=","LlRvQXJyYXk=","IHN0cnRvbA==","IFVrcmFpbmlhbg==","fSkpOwo=","IHNoZW1hbGU=","IlRoYXQ=","Zm9yYWxs","L2Rvd25sb2Fk","IHJoZXRvcmlj","LmxhdGl0dWRl","IFdIRU4=","IHNob2NraW5n","SUZJQw==","Lk5vcm1hbA==","X0ZPTERFUg==","IGRyaWZ0","IG1vdW50aW5n","LWJvb2s=","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAK","IFdpcmVsZXNz","PiIuJA==","IHJlbGllcw==","KENvbnNvbGU=","SW50ZXJuYXRpb25hbA==","LT57JA==","TWlk","IGRpc3NlcnQ=","ZGRz","IGRlcG9zaXRz","CWRyaXZlcg==","I2dh","cHJpc2luZw==","cHJpbnRsbg==","IHByZXNlbnRlcg==","IG1pbmVz","Q1NT","IER1YWw=","KCEo","IGthbQ==","IGlzTG9hZGluZw==","IFByb3RlY3Q=","LnVwcGVy","YXJpdW0=","XToKCgo=","WWlp","LXNoaXJ0","IElNQUdF","X2NvbG9ycw==","IHVyZ2VudA==","LkNvbnRhaW5lcg==","ISgK","U2F0dXJkYXk=","IHNvY2lldGllcw==","IFRoYW4=","IENvZA==","PUA=","IGF0dGFjaG1lbnRz","Lm1vYmlsZQ==","IHNwaXRl","IGJvdW5jZQ==","cmF3bA==","aW5zdGFuY2V0eXBl","IFRydWNr","IG1hbmlwdWxhdGlvbg==","KENvbmZpZw==","LWluc3Q=","IHN0b3I=","aXR1dGlvbg==","UHJlZmVycmVkR2Fw","IG1haW5BeGlzQWxpZ25tZW50","IGxpc3RlbmVk","JycnCgo=","b3R0YWdl","LXByb2plY3Q=","LkFQUExJQ0FUSU9O","CXJvb3Q=","IHdoaXQ=","IGJpbGRlcg==","IGtlcg==","IGFwcGxpYW5jZXM=","cm93YXZl","7J2A","ZW1hdGljcw==","IE9yZw==","b3Bpbmc=","X1NFQVJDSA==","IGNoYW0=","YWRkQ29udGFpbmVyR2Fw","ICgpLg==","IEFycm93","SWxsZWdhbA==","Q3VycmVudGx5","IHVzYQ==","IHBhc3N3b3Jkcw==","IHJlbm93bg==","YXZlcm4=","IEV2aWw=","IGNvbmNhdA==","IGR1bw==","IHZhbGU=","IEJlYW4=","IGluZGljYXRvcnM=","Y21hdGg=","IFB1bXA=","Tm92ZW1iZXI=","aWZpY2FudA==","X0RPTUFJTg==","cmVnYXI=","IFBvcnRhbA==","IiQ=","IGZvcm1lcmx5","Il06Cg==","IFZpc2liaWxpdHk=","LmdldEVsZW1lbnRzQnlDbGFzc05hbWU=","X1JFRA==","IGNoYW1waW9ucw==","4LQ=","VmFsb3I=","X2Vz","KmE=","LXJlcGVhdA==","QmFuZA==","LnN0YWdl","IGJ1cmVhdWM=","Q250","ZXRlbg==","LWZ1bmN0aW9u","IG11aXRv","UElE","X2VkaXRvcg==","IGNyYXNoZWQ=","ZGVhZA==","a2F0","YWdo","IEVYVA==","YXNzZXI=","LXNtYWxs","IHJlYWxpeg==","KEVudGl0eQ==","w7pz","IEFjdHVhbGx5","IEVsaXRl","IGhlbG0=","KG5vbmF0b21pYw==","YXNoZXI=","Q29tbXVuaXR5","YWxsZW5n","aXJ5","IEdyb3d0aA==","IHN1ZQ==","IGZyZXF1ZW5jaWVz","X2Rlc2NyaXB0b3I=","LkF0dHJpYnV0ZQ==","IHJlY2lwaWVudHM=","X05T","LyIr","aWJhbg==","IGF0aGxldGU=","IElnbg==","X0RNQQ==","KGRz","IFJlcXVpcmVtZW50cw==","QURJ","ZXJleg==","XEFkbWlu","YnJhc2th","IFJ1c3Q=","UmVsYXRpb24=","Q09E","IFZFUlNJT04=","ZW1tYQ==","KSl7","LkR1cmF0aW9u","IENhbWI=","LWxvZ28=","IHJlYWRhYmxl","IGNyZWF0b3Jz","KCldOwo=","VXBEb3du","LWhhbGY=","LmdldE1vbnRo","KHNm","UGlj","IGh1bmdlcg==","LnR4","IGV4Y2VlZGVk","X3NlZWQ=","KF4=","X3Nr","LnBlcmZvcm0=","ID46Og==","IG1vbmdv","PWZsb2F0","YmluZFBhcmFt","U21hcnQ=","aWZh","IHNlY3VyaXRpZXM=","IHByZWp1ZA==","ICwi","IGNvcnBz","IHZyYQ==","YW1hY2FyZQ==","aXRlcnI=","KE1lZGlh","dWNoZQ==","IGNvYg==","IGxpYmVy","Lmdlb21ldHJ5","TG9jYXRvcg==","IHNsaWRpbmc=","IHN1cmdpY2Fs","X0NVUg==","IGNvbnNlY3Q=","Wyo=","IFJlc29ydA==","U3R1Yg==","X0RPVUJMRQ==","IFNvcGg=","IGVsZWN0b3JhbA==","X2Rpc2FibGU=","INGB0L4=","IExpZ2h0bmluZw==","IG1lbnRpb25z","b2N5","IGxlYWtlZA==","IHJlbGF4aW5n","UHJlc2VudGVy","dnNw","IGd1aWx0","PS09LQ==","LnJlcGx5","IE1pcnJvcg==","Q2FtcA==","ICsjKyMrIys=","ICsjKyMrIysjKyMr","LkF1dGhvcg==","IGRpcmVjdGl2ZQ==","LWhvb2s=","7YSw","fQoKCgoK","QHB5dGVzdA==","X3JhbmQ=","bWlz","IGNvbG9yZnVs","dWpl","bGFzc2Vz","IENsYXNzZXM=","LmhhdmU=","JSks","6aKY","IGRpc3R1cmJpbmc=","c3Vic3RyaW5n","IEtvaA==","SW52ZXN0","cHVyY2hhc2U=","IHJlY3ljbGluZw==","IEFSVA==","aWVyYXJjaHk=","IGZwcw==","LmNoZWNrQm94","7ZW0","X21hdGVyaWFs","ZHVjYXRpb24=","IGZ3","dWRpdA==","IHJldmlld2luZw==","IFNpZA==","U3ludGF4","IFdyaXR0ZW4=","YXJnYXI=","VU1F","L3E=","Q2xhc3NpZmllcg==","T2ZmaWNpYWw=","IGpheno=","IG9tZWdh","UGh5c2ljcw==","IGx1Z2Fy","X2FjY2Vzc29y","LmNvbW1hbmRz","QWJpbGl0eQ==","IEJhdGNo","UkFN","IGVuY291bnRlcnM=","LlF1","QllURQ==","IERpc3RyaWJ1dGlvbg==","IHVzbw==","IFJlY292ZXJ5","YXBwcm92ZWQ=","IGRlbmlhbA==","L3NoYXJl","TGlua2VkTGlzdA==","KQ0KDQoNCg==","dWRkeQ==","IGZpbmVz","IHJ5","VW5pY29kZQ==","CXJlbmRlcg==","IHByZW1pc2Vz","IHBvbg==","YWxpYXNlcw==","L0ZvdW5kYXRpb24=","Y3VkYQ==","IENvY2s=","LDop","KGZvbGRlcg==","IG3DqWQ=","ZHJhZw==","IHRhbGVudHM=","ICAgCgo=","0LXRgdGC0LI=","bW9i","LnltbA==","IGFzdGVy","IGRpc2NyZQ==","Z29hbA==","IEdUWA==","IFNVQ0NFU1M=","IExPTkc=","KGZpbmQ=","IHNpbmd1bGFy","X3N6","IEV0aGVyZXVt","Li4K","IGlycmVz","Jykpewo=","IG1pbmlzdGVycw==","U3RlcHM=","aXZlcnNhbA==","IE5ldmVydGhlbGVzcw==","LWxlZA==","ICglKQ==","56Gu","IHRpbWV6b25l","IHN0cmFuZ2Vy","KHJlbmRlcg==","IHNodXRpbA==","IG1waA==","IHRyaW8=","cHB5","IHByZWRvbWlu","IGVuZG9ycw==","IFJ1c3NpYW5z","CXJvdw==","IHdpemFyZA==","LnNlcmlhbGl6ZQ==","IGNvbXBsYWluZWQ=","IHNpZG8=","IGRlbGlnaHRlZA==","LW1l","IFJhdg==","SHVtYW4=","YWRheXM=","cmVjdg==","V29ya2luZw==","SnVtcA==","IMOlcg==","IEF1dG9tYXRpYw==","X0Jhc2U=","5qC8","YXVyYW50cw==","wq8=","5rg=","KENUeXBl","SUZJ","KGFtb3VudA==","IGJlbGlldmluZw==","PW15c3Fs","IGZpcg==","IHJlc3RvcmF0aW9u","ZXJlY28=","0KI=","Xycr","IGVib29r","IGRlYnJpcw==","KGlucHV0cw==","QVlPVVQ=","IHNjcmVhbWluZw==","YXZpYQ==","bGFuZGVy","IGRpc3RyZXNz","IGFzc2VtYmxlZA==","IEF2b2lk","KHRocmVhZA==","IFJQQw==","X0VYSVQ=","KHF1ZXVl","0LjRgdGC","RGxs","IHNrdWxs","X3B1Yg==","Y2hleg==","bWluYXRl","ZW5zZW4=","IGluc2FuZQ==","Ym91bmRz","IFJvc2Vu","IGNvbmRpdGlvbmluZw==","cHJvY2Vzc2Vk","dmlkZW9z","Zm91cg==","LkNvbnY=","fDsK","UGVyc29uYWw=","Y2VycHQ=","OlVJQ29udHJvbFN0YXRlTm9ybWFs","IGRvc2Vz","IEthcmw=","IEZyZXF1","LkJBU0U=","IFZvdGU=","IGNvbmN1cnJlbnQ=","IE1lc3NhZ2VCb3hJY29u","IMOW","IER1YmFp","IFJldGFpbA==","Om51bWJlcg==","IE9ic2VydmVy","IEJpZ0ludGVnZXI=","X29yaWdpbg==","X1dPUks=","RnJhbWVz","IG5vdGFibHk=","LuKAnA==","IHRyb3BpY2Fs","IG5pY2hl","YW1pbmE=","LnN5cw==","KHRva2Vucw==","bW9kaWZ5","b3NpdA==","c3Ryb20=","IENvbWljcw==","T1BUSU9O","VGlja2V0","IGZhY3Rvcmllcw==","IGRpc3B1dA==","X0ZpbGU=","IEZpbm4=","ZWVl","IERpc2NvcmQ=","X21vbmV5","LnRwbA==","X3NhZmU=","TEI=","IGdsdXQ=","Sks=","LmZsb3c=","LWNvbnQ=","Z29z","IGhvcml6b24=","IFJ1c2g=","Ojoq","UGlwZQ==","dWxsYQ==","Ym9yb3VnaA==","aGVpbWVy","KG1vdmU=","KFRleHQ=","fSk7DQoNCg==","d2VsY29tZQ==","IENvbXBvbmVudHM=","IGdvdmVybmFuY2U=","Y2xvc2Vk","CW1hcmdpbg==","IGxhdW5kcnk=","IFRlcm1pbmFs","aXphcmRz","LuKAlA==","LnJlbW90ZQ==","LnJhZGl1cw==","IFF1ZWJlYw==","IGRo","VGVjaA==","IE1pc3Q=","c2VsbGVy","X2xpdGVyYWw=","IGdlbml1cw==","IGJyYWlucw==","Z2Vt","IE1lYXN1cmU=","IGNhdGFzdA==","cmFuY2U=","LlRleHRGaWVsZA==","IGNvbnN1bWluZw==","ICdcJyc=","b3VidGVkbHk=","IENlcnRhaW4=","RXY=","ZXJ0aQ==","YmVpbmc=","RXhwZXJpZW5jZQ==","IC8vWw==","IEFyYWJpYw==","IENyaXN0","IEF6dXJl","IGhvcmE=","bGFkZXNo","XEJsdWVwcmludA==","ZGFy","LnJlbA==","IHN1cHJlbQ==","IFJlYWdhbg==","IEF0dHJpYnV0ZXM=","LXNpZGViYXI=","IHVzZVN0eWxlcw==","IEFpcmxpbmVz","IGhpbGxz","L3hodG1s","dmluYw==","X21vY2s=","CiAgICAgICAgICAgICAgICAK","IFBpbGw=","LkxheW91dFN0eWxl","IENvbW1hbmRlcg==","XTw=","c2lnbmF0dXJl","IHt9DQo=","IGhhdHJlZA==","IOuL","b2xlc3Rlcm9s","ICoqKioqKioq","YW5jZWxsb3I=","Y3JvcA==","VElN","CQkKCg==","eXNxbGk=","dWl0aXZl","CXVuc2V0","X3NlbA==","IG1lbnVz","dGljaw==","IGNvbnN0aXR1dGU=","IEVsZW1lbnRz","IFJlZGlz","YWdnaW8=","X2Zw","X2RlcGVuZA==","ZW1hcw==","Q0FTVA==","b3Jhbmdl","am9u","IEVtaWx5","IHBvdGF0b2Vz","IHJlY2VwdG9y","IEVsZWN0cm9uaWM=","IExpZ2h0cw==","IGNvbWJpbmluZw==","IFNvbWVvbmU=","ICMjIyMjIyMjLg==","IFRPRA==","L3Nob3c=","WGQ=","LiIn","YWZ4","IHRyYWdpYw==","U3R5bGVk","IE1hcmNv","R2FsbGVyeQ==","ZGFsZQ==","LuKAnQoKCgo=","w6lyaWU=","L3NlcnZpY2U=","5LqG","IGFtYmllbnQ=","X1NFVFRJTkdT","LkFkYXB0ZXI=","bGVuZQ==","IHRyYXZlbHM=","Tm90aWNl","IGNsZWFucw==","IEZlbQ==","Y2hhaXI=","0YPQvQ==","L215","X2JhZA==","IEVjb25vbWljcw==","SVNB","X0NOVA==","KE1lbnU=","5LqO","IFJpZGdl","IGxlbmd0aHk=","RG90","IGp1bXBz","IGhleQ==","JHBkZg==","IHdvcm0=","IHN1dA==","IHNoZXI=","aWFtbw==","IENhbGM=","dHJpZXZl","IGNvcHM=","IENocm9t","IHJlZ3VsYXRlZA==","cmVhdG1lbnQ=","IEhpZ2hlcg==","b2tz","IGRlemU=","TE9DQVRJT04=","b25nc1Rv","IGZpbml0ZQ==","IHZhcmllcw==","IHBvc2l0aW9uZWQ=","J2ls","6YeR","IGhpa2U=","KGRvbmU=","cGxheWxpc3Q=","IGFkYQ==","IGNvYXN0YWw=","IE5hbmN5","LkRhdGVUaW1lRmllbGQ=","Q3BwQ29kZUdlbg==","IFNpbWlsYXJseQ==","cmV1cg==","IENvbnRy","IEhpZGRlbg==","IEJldGE=","YXRjaGVk","X2luc3RhbGw=","Lk91dHB1dA==","TG9va3Vw","IFJpY2htb25k","cXVhcmVk","IG1hbmdh","LWNvbnRyb2xz","IEJlcm5hcmQ=","TGFyZ2U=","IHNsaWNlcw==","IG9mZmVuY2U=","IE1lZ2E=","IGVzdGFy","IGpvaW50cw==","IHN1bW0=","X3BsYXRmb3Jt","QnVmZg==","LmFkZFN1YnZpZXc=","IHJldGFpbmVk","TGV0dGVy","LmRpbQ==","IGVzc2VyZQ==","IFNjYWZmb2xk","RVhQRUNU","CVJF","LmxvbmdpdHVkZQ==","w7xuZA==","IHN0YXR1ZQ==","LmFkZFdpZGdldA==","IENhcmliYmVhbg==","YWRkUHJlZmVycmVkR2Fw","aWxkZQ==","VUlMYWJlbA==","IE9wcG9ydA==","IGltcGVyaWFs","dXJzaW9u","IG1hbmRhdGU=","IHByb21vdGlvbmFs","IHZr","aWHFgg==","IHB5bA==","IENyZWF0aW9u","0L7Qt9C0","IHNpbXBsZXI=","LndoYXQ=","IFJlY2VudA==","U3Rvcm0=","LnF1YW50aXR5","IExvdg==","Ii0=","dWJibGVz","X25vdGlmaWNhdGlvbg==","KHdvcmxk","dXJnZXI=","Kigt","OiIK","aG0=","YW5zaGlw","IEFsbW9zdA==","IG1vdG9yY3ljbGU=","X2ZlZQ==","IGFic29yYg==","IFZpbmNlbnQ=","IHNvdW5kZWQ=","w61zdA==","IHBoYXJtYWNldXRpY2Fs","aHRhZw==","IEtpbmRsZQ==","aXRhbGl6ZQ==","IEVtcGVyb3I=","b3VzdGlj","IHNwZWNpYWxpc3Rz","5YWs","Qm9yZGVyU3R5bGU=","L1w=","UkVMQVRFRA==","KCcsJyw=","KGV4cHI=","IGh0","5Y2I","X0NyZWF0ZQ==","IHNwZWNpYWxseQ==","IFtdOw0K","IGhlZWw=","IHNlcHQ=","X2FyY2g=","KGluaXRpYWw=","JS4KCg==","XCIsXCI=","IGRpc2N1c3Nlcw==","IHVwdA==","IFsm","IG1hbnVz","LmhhbmQ=","IE1BSU4=","IERlbm1hcms=","IF0sDQo=","IGNyeXN0","IG5hY2s=","Q29vcmRz","X2lubmVy","IG1pZHN0","IGF3YWtl","INCe","LWJyZWFr","w612ZWw=","X1BBU1M=","IFBhcmFtcw==","IGRldHI=","IHNwaWRlcg==","IENvbmNlcHQ=","IHByZW5k","Q0hFRA==","LkV4aXQ=","IHBvcHVsYXRlZA==","IHZpcnR1ZQ==","X1NFU1NJT04=","IG5vdXZlbA==","b2F1dGg=","INC00LDQvdC90Ys=","cmluaw==","LkhlYWRlclRleHQ=","YXR1cmF0ZWQ=","IGVyc3Q=","IOWF","4KWH","X3Zpc2libGU=","ZXllcg==","IGxpYWJsZQ==","IGRlYmU=","IGJ3","ey0j","X1dJTg==","ZGZz","SG92ZXI=","IFBVVA==","LWFuZ2xl","IG5vYmxl","IHRyYWNlcw==","ZW5jdg==","IHVzZXJEYXRh","X2lucw==","IFN1eg==","IG5ld3NsZXR0ZXJz","IE1vZGk=","IGVudHJlcHJlbmV1cnM=","IHRyaWJ1dGU=","IHJ1bW9ycw==","IHJy","IFF1YXJ0ZXI=","6rOg","IGZlZWRz","w7Nn","IGVudmVsb3Bl","IGxlYXI=","IGvDuA==","ZGV2ZWxvcGVy","U2ltaWxhcg==","OiIpCg==","c3Vic2NyaXB0aW9u","TW9kaWZpZXI=","aXRhbGlj","IG5hc3R5","IHRlcm1pbmF0aW9u","IGNoYXJtaW5n","IOKf","dG9ucw==","LnRyYWNl","aG90cw==","IFVS","TW9udA==","IGp1c3RpZmllZA==","IEdhbmc=","aW5lYQ==","IGJvZw==","KGFw","XyQ=","IGNvbnRhbWlu","LkRvdA==","CURlYnVn","KGV4cG9ydHM=","IHBhaXJlZA==","IEFzc2lnbm1lbnQ=","IGF1dG9tb2JpbGU=","k40=","IHBoYXNlcw==","dnc=","QFN1cHByZXNzV2FybmluZ3M=","PVw=","cmFudA==","LWVk","CWF3YWl0","IGNlcnRpZmljYXRlcw==","Jz4i","IGludGFjdA==","Q1RSTA==","TWlrZQ==","Z3JlZ2F0aW9u","QVRURVJO","IHJlcHVibGlj","X3VwcGVy","aWxpYXJ5","IGNvbXB1dGF0aW9u","aGlyZQ==","IFNoaW4=","X0FOWQ==","IE1hbnVmYWN0dXJlcg==","IENhcm0=","IGJlYXJpbmdz","X2NvbWI=","Y2Fk","dXJpc3RpYw==","IHdob2xlc2FsZQ==","IGRvbm9y","LmludGVyZmFjZXM=","cHJlc3Nv","IEJydW4=","LWNsb3Nl","cHJvdmU=","X1NL","CWZyYW1l","ZXRyb3M=","IFBhaW4=","X0VYUA==","IExU","X2Zz","LmRhdGFz","CXNz","dm9pcg==","IEF4aXM=","TWFqb3I=","PSI8","W2g=","IHByb2Zlc3M=","aWdyYXRl","KHNjb3Jl","S2V5d29yZA==","Im9z","ICAgIAkK","YW5hbHlzaXM=","IHJlcGxheQ==","LnBhc3M=","XGQ=","dGxz","IHNhbmN0","LmxpZ2h0","X21vYmlsZQ==","0YHRgtGM","CXRvdGFs","dWl0eQ==","IHBhdXNlZA==","TkFT","IGVuY29yZQ==","bG9l","IC0qLQoK","LmhpZ2g=","YW1wbGVy","IFNlY3VyZQ==","IGZyYWdtZW50cw==","X3ZlbA==","aWxsYXJ5","IFN0ZWlu","IERhd24=","IG1heGltaXpl","4Lii","IC9e","IGNvbnRpbnVhbGx5","IHNoYWRvd3M=","CSAgICAgICAgICAgICAgICAgICA=","IElBY3Rpb25SZXN1bHQ=","IGluZm9ybWFjacOzbg==","Q0hFQ0s=","LlNlbGVjdGVkSXRlbQ==","YnVuZGxl","b2xsZXk=","PEludA==","QUlORVI=","IFdpbmc=","dGl0bGVz","b3VudGFpbg==","Q1k=","IExvY2FsZQ==","Zm9ybWVy","PGNvbnRleHQ=","UmFkaW9CdXR0b24=","X3NjaGVkdWxl","IGZhYnVsb3Vz","Um9iZXJ0","X1BST0ZJTEU=","IGdhdGVz","SU1Q","IFBlbnRhZ29u","Z29sZA==","YmFjaA==","ZW1wbG95ZWVz","Um90YXRl","IGNoYW1w","IHNlbGJzdA==","QWx0ZXJu","IGNvbnZlcnRWaWV3","Lyw=","IH4o","U3RyZWV0","X3BsYWNl","IHBlcnNvbmFsaXplZA==","UHVibGlzaGVy","IFNPQ0s=","X05BTUVTUEFDRQ==","IFN0YW5kYXJkcw==","c29ldmVy","X0NFTlRFUg==","SW50ZXJlc3Q=","w7R0","dGVtcGVyYXR1cmU=","Vmlld3BvcnQ=","Z2V0UmVzb3VyY2U=","IGVhdGVu","IHNlbXByZQ==","IGFibm9ybWFs","IGN5bGluZGVy","IHRyb3VibGVz","bm9k","0YvQsg==","Z2FtZXM=","X2ds","UGxhbmU=","Z3JleQ==","X3RibA==","LkNvbXBvbmVudFBsYWNlbWVudA==","IENoYXNl","TG9nZ2luZw==","bWFueQ==","7IY=","IGZsYW1l","PSI8Pz0k","IEdyb3Vwcw==","LVU=","0YDQsNC9","CgoKCgoKCg==","IHZhdWx0","b21vbg==","cHJvYmxlbQ==","IHRyYWRlcnM=","IHBlcmlwaGVyYWw=","IGhvbWVwYWdl","KGRlcw==","IFN1Y2Nlc3NmdWxseQ==","IHJlYm9vdA==","IGNlbGx1bGFy","aWlp","IFBsYW5z","bGlzdGluZw==","CWRpcw==","IFJlZmxlY3Q=","CWV4Y2VwdA==","Iiko","IHRhbWLDqW0=","VmVoaWNsZQ==","YWNjaQ==","bHVzaA==","T3JkZXJCeQ==","IGltYWdpbmVk","Y29kZWM=","IGRhdGVUaW1l","TWljcm8=","IHJlbWluZHM=","IGZydXN0cmF0aW5n","IFZpc3Rh","VHJhaW4=","INCy0YE=","IG1vbGVjdWxlcw==","YXZpbg==","IGRvdWJsZWQ=","IGJyYWtl","IGNhbGNpdW0=","RnJpZGF5","IElkZW50aWZpZXI=","5Z8=","0YvQuQ==","IEphaA==","UmVu","IHNjYW0=","IERlbm5pcw==","LnNldEludA==","4p8=","IGFwcGVhbHM=","IEF1cg==","IHNwbGFzaA==","ZXF1YWxzSWdub3JlQ2FzZQ==","d2h5","IHNhcA==","U3VwcG9ydGVk","IHNlcmE=","IDoi","IFZlcm1vbnQ=","IHJldW4=","IE5vdmE=","ICAgICAgICAgICAgCiAgICAgICAgICAgIAo=","UmF0ZWQ=","IGxheWluZw==","IEthcmVu","LkRlc2VyaWFsaXpl","IGNvZGVj","IHRheHBheWVycw==","OyIpOwo=","IGNydWRl","IG1vbGU=","IHVzZUNvbnRleHQ=","CXJlc3A=","IHBrdA==","IENhbm5vdA==","UGlwZWxpbmU=","5YaG","dGljYWw=","QWN0aW9uQmFy","YWVkYQ==","IENyaXRpY2Fs","IE5hZA==","IGJsZWVkaW5n","IGxsdm0=","L2N1c3RvbQ==","IFNpbXBzb24=","U3k=","aXRhYmx5","IFN1bW1pdA==","KCkpKS4=","RUxMT1c=","JCcs","TWV0","SW52b2ljZQ==","b2xpc3Q=","IHNwaW5l","YXV0aWZ1bA==","cGFpZA==","IGxvY2tlcg==","X2FybQ==","XCI+PA==","IHRyYWplY3Rvcnk=","X3Jpbmc=","IGh5ZHJvZ2Vu","dHJvbg==","IHN0YXR1dGU=","IGNvbmRpdGlvbmFs","IHRyYXk=","LXNjaG9vbA==","KHdpZGdldA==","JGNvbmZpZw==","IHJlcXVlc3Rpbmc=","LnVpbnQ=","ZXRvbg==","YnJpdGllcw==","T2ZUeXBl","QURNSU4=","cHJlZGljdA==","IGdlZ2Vu","IEhhcHA=","T0NVTUVOVA==","IEFwYXJ0","IC0tLS0t","cm9l","dWlkZQ==","anVzdGlmeQ==","IFNxdWFk","IHByb2Zlcw==","LmJvdA==","X2N1cnJlbmN5","aW5uZW4=","IE11bWJhaQ==","IE51bWJlcnM=","YXZhbmF1Z2g=","YWduaXR1ZGU=","4oCcVGhlcmU=","PWh0dHA=","54mH","IHZi","Kyc8Lw==","IG9yZ2FuaXppbmc=","YW5pdW0=","SW5TZWN0aW9u","LmFuZA==","IGV0ZXJuYWw=","IHNvdWxz","X09ORQ==","X25z","X2Jhc2lj","IHJldFZhbA==","LXNoYXBlZA==","aWZkZWY=","IE1vemlsbGE=","IGVpZw==","Y29tcGxldGVk","Tm90aWZpY2F0aW9ucw==","VEVDVA==","cmllbg==","Y29vcmRpbmF0ZXM=","IHByZXRlbmQ=","cG9uc29yZWQ=","LnN0ZGVycg==","IGdhbWVycw==","IGRlZmVuZGVk","VG9vbFRpcA==","dWl0YXI=","IGZyYW5jYQ==","IFdvb2Rz","IGlocmU=","IHBzZXVkbw==","IGNyb3dkcw==","IFNZU1RFTQ==","bGVj","LmtlcmFz","IGNpcmN1bGF0aW9u","ZWVy","LmNi","dXp6eQ==","7Zg=","LnJlYWRlcg==","IHNlcXVlbA==","U2V2ZXJhbA==","LnBvcnRhbA==","LS0tLS0K","aXN0cmFy","77u/Ly8=","UGk=","IFwiIg==","IGN1c3RvbXM=","IGRpc3BsYXlOYW1l","IG5vdGljZXM=","IGNhcmI=","Ll8KCg==","IHByb2R1Y3Rv","INGB0Ls=","IG51bWVyaWNhbA==","IHVuaW50","IGNvZGlnbw==","T3JkaW5hbA==","U3RyaW5nVXRpbHM=","IGTDqWM=","IExhbg==","IHNob3djYXNl","IGFyaXRobWV0aWM=","LXNjcm9sbA==","X1RFTVBMQVRF","IFJvdXRlck1vZHVsZQ==","IFNoYWRlcg==","INCd","cG9saWN5","UGVyZm9ybWFuY2U=","CWJvcmRlcg==","KGZpbGVwYXRo","56m6","X2VuZXJneQ==","X0NT","VGhlaXI=","LnNwYWNpbmc=","KGRw","IExBTkdVQUdF","IGhpc3RvcmljYWxseQ==","Ij57eyQ=","IGlub2Rl","c2ls","IGhhY2U=","IHNldmVyZWx5","IE92ZXJ2aWV3","IHNwcmF3","IGJlYWNoZXM=","OmxlZnQ=","t7s=","KCR7","IEZJUlNU","IFNwYQ==","LWFzcw==","IGJhaXNl","IE5PREU=","IFBpenph","UGV0","KHNlcQ==","XCI+Cg==","Q3BwTWV0aG9kUG9pbnRlcg==","IHZw","IGlh","X3NlY29uZHM=","ZW1ldA==","L2Jsb2I=","X1RIUkVTSA==","Li4uDQo=","RGVzdA==","IE5I","LmRhdGFTb3VyY2U=","aXTDqXM=","IEphaw==","c2VsbA==","IHdvcmtzaG9wcw==","PHU=","IHJpdmFscw==","IEVYSVNUUw==","aG9t","LXRva2Vu","Y29tcGF0aWJsZQ==","LkpQYW5lbA==","IHBoeXNpY2lhbnM=","YXJ0aW4=","IGRlc2lyYWJsZQ==","IGRpc3RpbmN0aXZl","LkRlcA==","Z2lk","aWxpYXRl","LG1heA==","IHByZW1pZXJl","IHFEZWJ1Zw==","IGFkdm9jYWN5","IHdoaXNwZXI=","UHQ=","IHVuY2hhbmdlZA==","X3F0eQ==","6K+35rGC","U2Vhc29u","YXZlbGVuZ3Ro","IFB1bA==","IGTDrWE=","J11dXSwK","YWxpcw==","KCIm","Ym9ybw==","IGJt","IFJhZGk=","d3Jvbmc=","IEdvaW5n","aW1lVHlwZQ==","aWpp","LWZlZWRiYWNr","IE5hbWVz","IEJhcHQ=","IHByb2JhYmxl","IEV0aGVy","IFBvbGl0aWNz","X3Byb3RvY29s","bGluaW5n","U2F0","IGNvcnJlbA==","LlByaW1hcnk=","KG51bGxhYmxl","UklPUklUWQ==","IGNvbG9yaW5n","IHV0aWxpemluZw==","ZGFz","IGV4cG9ydGVk","IGNhcnJpZXJz","Q29udg==","LmVkaXRvcg==","acOz","KGhhbmRsZXM=","IGFwcHJlY2lhdGlvbg==","LmltcG9ydA==","IEF1c3RyaWE=","IFN0cmlw","aWxpZ2h0","IGFwcHJvcHJpYXRlbHk=","IFByZXN0","IFdpcg==","IFVJQXBwbGljYXRpb24=","YWxjaGVteQ==","IE1vYg==","IERldGVybWlu","ZXJndXNvbg==","cmVnaXN0ZXJlZA==","X2NvbnZlcnQ=","IFZsYWRpbWly","LlNob3dEaWFsb2c=","cmVmbGVjdA==","IHNob29r","IGFzc3VyZQ==","IE9mdGVu","IGNpdmlsaXphdGlvbg==","IHZvY2FidWxhcnk=","Zm9yZWdyb3VuZA==","IFNjb3Bl","IHVud2FudGVk","YWN0aW5n","IChbXQ==","IG1hcmtpbmc=","Lm9yaWdpbmFs","IE1PVkU=","IHNwb3J0aW5n","Y2VwdGlvbnM=","TlNOdW1iZXI=","U2l6ZXM=","IHByb3ZpbmNpYWw=","X1RyYW5z","IHByb2JsZW1hdGlj","ZGlnaXQ=","IEVtbWE=","bG9ja3M=","IENyZXc=","aWJh","Jyk6","aXNoYQ==","IG1hbW0=","IG9jY3VyZWQ=","d2Nz","KHJ1bGU=","IG1lcmNoYW5kaXNl","ZXNwZWNpYWxseQ==","IFR3aW4=","IG5hbWluZw==","IHNsb2c=","IGltcHJvdmVz","IGFkaGVy","OnRleHQ=","LmhhZG9vcA==","X0hUVFA=","LnRvTGlzdA==","LmRpc2FibGVk","IGxlbnNlcw==","LmluaQ==","IFJhcmU=","IFVidW50dQ==","IHNjcmFt","b2xhdGlvbg==","dGl0dWxv","RXZlcnl0aGluZw==","IG5vZGRlZA==","aWNodGln","X2NvbnN0YW50","emM=","bGlmdA==","IE5vdGlmeQ==","b25kbw==","IElORg==","KCIr","IEtheg==","IGRyZWFk","Lm1hcHBlcg==","bGV1cg==","IENvbWV5","IE5C","aWNlcnM=","LlB1c2g=","IEhhY2s=","IEJyYXppbGlhbg==","X3Byb2Q=","IC8vCgo=","IGJpY3ljbGU=","IHVuYXZhaWxhYmxl","IGFkb2xlc2NlbnQ=","Ymxr","IG1pdGln","X2JsdWU=","7Jg=","ZmFkZUlu","IFV0aWxpdGllcw==","IE1O","O2s=","PHN0eWxl","LXN0YXR1cw==","aW5kbw==","IGlubmluZ3M=","IGdq","IHx8PQ==","LmV1","Ok51bWJlcg==","IGN1aXNpbmU=","IFVSTHM=","aWVr","IHdpcmVz","CXBz","aWVn","Lm1r","c29hcA==","IHNvbWV0aW1l","IHN0YXA=","X3Nlcmllcw==","LlRhcmdldA==","5ro=","LmRlc3RpbmF0aW9u","T1VOVEVS","UmFpc2Vz","JkE=","IHNtYXJ0cGhvbmVz","TklFbnY=","LnNkaw==","IGhlbGljb3B0ZXI=","IGltcGU=","IEJpcnRo","QVU=","YnJlYWRjcnVtYnM=","Y29vcmRz","IGV4cGxvcmVk","IGxvZA==","IElw","Z2FibGU=","aWFuZQ==","IGFydGlmYWN0cw==","Qm94TGF5b3V0","2KfYsQ==","bGlzdGVuZXI=","LmNhcnQ=","IEh1ZmY=","IEhpbmR1","IERhdGFUeXBlcw==","IERydXBhbA==","SUdOT1JF","IG9mZnNldHM=","IFJUQw==","LWxvZ2lu","5q4=","IFFPYmplY3Q=","IHByb3NlY3V0b3I=","Um9jaw==","X2NoYXQ=","V2F5","7LI=","IG5lZ2xpZw==","IGR1ZGU=","Ozw=","IGRlbGVnYXRlcw==","X2ZhaWxlZA==","L2Rldg==","L3dvcms=","KE5ldw==","ZXRhYmxl","KCki","KEljb25z","IHBvcms=","IE1vZGVsQW5kVmlldw==","IFZJUA==","IEtvcg==","bWl4","IG94aWQ=","IFNDUkVFTg==","IEZvdXJ0aA==","LyIsCg==","IHRlZQ==","IFN0ZXZlbnM=","dGlja3M=","IHBsZWRnZQ==","aWJib24=","IExvYW4=","IG5lbw==","bnVtcHk=","IFNoYXJlZFByZWZlcmVuY2Vz","LW9yaWVudGVk","IExvZ2dlckZhY3Rvcnk=","IEdyYXBoUUw=","emVuaWE=","Il8=","V29tZW4=","LmNhc3Q=","IGRlbGliZXJhdGVseQ==","K2I=","IEFybg==","Zm9udFNpemU=","IG1hemU=","IGJsYW1lZA==","Lm1hcw==","fSkNCg==","ZWxlcmlr","IHNjYW5uaW5n","IFdvcmtzaG9w","IGZpbmRlbg==","IGNhdXQ=","VUlGb250","KHJldHVybg==","YWxpbg==","Y2FzdGxl","Ly8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8v","IGluY2VudGl2ZQ==","b3BhdGg=","YmxvYg==","IGNpZ2FyZXR0ZQ==","IGZlcnRpbA==","Ki8KCgo=","IFNoYXI=","CiAgICAgIAo=","IHVuY2VydGFpbg==","IFN0b24=","T3BlcmF0aW9ucw==","IFNwZW5jZXI=","IGRlZmlu","IFNvbG8=","b25lc3Q=","t7vliqA=","IHVvbW8=","R2l2ZQ==","IGRlbnRybw==","O3BhZGRpbmc=","ZW50YWk=","IENhcnM=","IGVudGh1c2lhc20=","IE9wZXJhdGluZw==","U2tpcA==","cGFyYXRpb24=","IHByb3RlY3Rz","IHJldmVy","ZGc=","IENpbmNpbm5hdGk=","IGNvbnNlY3RldHVy","IG11c3M=","ZW1wbG95ZWQ=","YXVzZXM=","aW5rbGU=","LlZhbHVlcw==","o7w=","bG92","X1dBUk4=","IGJvb2ttYXJr","IEFwb2xsbw==","LmF4aXM=","IG3DqXQ=","IG9wZW5lcg==","IHR1bW9y","ZGFu","IGVsZW1lbnRhcnk=","IHNraXBwZWQ=","IEtlcg==","YXNpYQ==","X3Jlc3A=","IGRlbW9s","IENhbmFkaWFucw==","IHRhc3Rlcw==","VUludGVnZXI=","ICckew==","LmF3cw==","Uk9JRA==","cmlhbnM=","TVE=","b3JkYWJsZQ==","IGNvdXNpbg==","UHJvcGFnYXRpb24=","KFNlc3Npb24=","cGhhbHQ=","VUxE","IFNjYWxhcg==","IGJsb29keQ==","IOCm","Lm1hc2s=","LHE=","IFVuaXRz","IGNlbnRyZXM=","IFByaW0=","Ll0KCg==","IFNoYXc=","UHJvbQ==","IFRob3VnaHQ=","Q2hlY2tlcg==","X291dHB1dHM=","KGNoYW4=","RUlOVkFM","IGJvYg==","X2NtcA==","UGVk","IG1hdHJpY2Vz","IHZyb3V3ZW4=","IGdlbnVpbmVseQ==","aGlnaGxpZ2h0","KGRpc3BsYXk=","KSE9","IGRlbGljYXRl","IEx1dGhlcg==","IE1pbGVz","IHVzZXJJRA==","JT0=","YXRldXJz","X0JVRg==","LS0tLS0tLQo=","aW1pdGl2ZXM=","IHNoZWx2ZXM=","c2xvdw==","X2luZm9ybWF0aW9u","TEVH","V3I=","LmZvcm1z","Y2VsYW5k","L3Vu","OiY=","LuKAmQoK","PSIl","IHByb3N0","IGZvbnRzaXpl","dWNpw7Nu","Z2V0aWM=","YW10","PSIu","RGVjb3I=","QnJpdA==","ICIiKS4=","IGZvdW5kaW5n","LkZpbGVOYW1l","IFRpZXI=","IGRpc2Nsb3Nl","w6Ft","LnN5bg==","LlZpZXdIb2xkZXI=","bGljYW50","X3N0YWdl","TW9uZGF5","IGRlc2VyaWFsaXpl","dGFsaw==","IHRyYWRpdGlvbmFsbHk=","5oCB","2K4=","TEVY","IGVo","CVJPTQ==","IHt9KQo=","UXVlc3Rpb25z","bmNweQ==","IGZpeGluZw==","0LrRgw==","X0tleQ==","Ong=","IFNUUklORw==","INGE0LDQuQ==","CWxlZnQ=","IEJlbmNo","ZWxsaWo=","VVJSRUQ=","IERpYWdyYW0=","fWNhdGNo","L3RpbWU=","IE1pc3Npbmc=","ZGJuYW1l","IHNvcmU=","IFdhbHQ=","dWdnaW5n","cmVwcmVzZW50","IEdT","bmV5cw==","CXBhZ2U=","IHZvbGNhbg==","KGJ0bg==","IGV4Y2VlZHM=","IGVyZw==","IHBpbG90cw==","IFNlZA==","ZXJzaW9ucw==","IHBhdHJvbg==","UlY=","L3RvcA==","LmFzc2V0","X2Nyb3Nz","LkVkaXRvcg==","LnRi","IHdlbGNvbWluZw==","U0NSRUVO","KWZpbmRWaWV3QnlJZA==","Q29kZXI=","PElBY3Rpb25SZXN1bHQ=","X1FVRVVF","4YM=","IGhlaWdodHM=","UmVxdWVzdHM=","IHN5bWJvbGlj","DQ0KDQ0K","IGNvdXBvbnM=","LWZpdmU=","IERlc2t0b3A=","IG1pc21hdGNo","ICdfJw==","X0RJVg==","QVNPTg==","LnRyYW5zcG9zZQ==","KG1hc2s=","IENlbHQ=","LkhhbmQ=","YXR1","asSZ","IHt9KTsK","TWlzcw==","IHByaW1h","bXVuZA==","b2x2","IFByZXR0eQ==","IHJlYmVs","IEZE","YXN0aWNhbGx5","T0xU","LWF4aXM=","dXhl","IGVpbmZhY2g=","IENoZW1pY2Fs","X3NlZw==","bGVldGNvZGU=","bG9wZQ==","X29yaWc=","ICAJCQ==","KERvdWJsZQ==","IFBheVBhbA==","LkJhY2tncm91bmRJbWFnZQ==","IGhvbWVtYWRl","Liku","KHBhcnNlcg==","YXRybw==","YWNjb3JkaW9u","RGVmaW5l","IOyeiA==","IEFVVE8=","LnN1bW1hcnk=","c2NhbGFy","IEhvb2Q=","cXVpbg==","X2Rlcg==","IEdlc2No","LmNvbXB1dGU=","RmVlZGJhY2s=","IHBoYXJtYWM=","IMWfaQ==","IGdsb3Nz","IEZJTFRFUg==","SU5TVEFOQ0U=","IGthbA==","LlBM","X0ZSRUU=","R3JhZGU=","IOKZ","Lm1ldHJpY3M=","IGNhZ2U=","Llh0cmFHcmlk","X2Rz","emln","aW50ZXJvcFJlcXVpcmVEZWZhdWx0","LnJlbW92ZUNsYXNz","PT09PT09PT09PT09PQ==","IG1hc3RlcnM=","U3RhdGVFeGNlcHRpb24=","aWxsZXJ5","IEJyYWR5","IGxpbmluZw==","X2Nz","aW5zdWxh","IH06","W3Bvc2l0aW9u","IFJ4","IEJZVEU=","IFN0cmlrZQ==","INCa","IENsdXN0ZXI=","LmRvd25sb2Fk","QWxsb3dlZA==","IGFtZW5pdGllcw==","IG9uVGFw","ZnVsV2lkZ2V0","IHN0cmVuZ3Rocw==","dHdlZXQ=","IGFzY2VuZGluZw==","IGRpc2Nsb3NlZA==","Z3Jhdg==","ZGlzdHJpY3Q=","KTw8","KSwi","KGRlZnVu","X3w=","IGdhemU=","0LDRjw==","IGZvcnR5","PT09PT09PT09PT0=","U2NpZW5jZQ==","c2VtYmxlcg==","CWJvZHk=","X3RyYW5zZmVy","IGxvbmd0aW1l","IGNvbXBsaWNhdGlvbnM=","IGJvb3Ro","VkVSUg==","IHlpZWxkcw==","IG5hdmlnYXRvcg==","OjpfKCc=","RUNUT1I=","X0NvbmZpZw==","IGxhc3RlZA==","dXNhbA==","55m75b2V","IGdsb3Zlcw==","IGJlbGx5","U2FsZXM=","KE1ldGhvZA==","KG1lbWJlcg==","IFJlZWQ=","cGFzc2Vk","U2lnbklu","LG51bQ==","VUxPTkc=","IExFRw==","bmVscw==","IG1lbnRvcg==","KHJj","IE9idmlvdXNseQ==","Lmlm","IEZyZWRlcg==","SEVBRA==","QGF1dGhvcg==","Q29uZGl0aW9ucw==","IGdhcmRlbnM=","IFJpcA==","KHVzZXJz","IE9rYXk=","IHdyZXN0bGluZw==","aW1lc3RvbmU=","IENlcnRpZmllZA==","IHZlcmRpY3Q=","YWlkYQ==","LmlubmVyVGV4dA==","aWNhc3Q=","CWF0","IHByZXN1bWFibHk=","IEZVTg==","YWplcw==","0Jc=","PiIsCg==","X1Bpbg==","dWVzZQ==","IG92ZXJyaWRlcw==","X3JlYWR5","QWR2YW5jZWQ=","IG9waQ==","LWNhcnQ=","KCIvIiw=","IERlYg==","Q1JZ","IFZlcnRpY2Fs","IE9WRVI=","IENvcnBvcmF0ZQ==","ICIiOw==","IHN0ZXBwaW5n","ZWo=","IGFjY3VzYXRpb25z","IG9yYXo=","X3RhaWw=","IGluZHVjZWQ=","IGVsYXN0aWM=","IGJsb3du","LC8v","IGJhY2tncm91bmRz","4oCZdW5l","LXNkaw==","IHNldEludGVydmFs","IGluY2VudGl2ZXM=","IHZlZ2V0YWJsZQ==","X09u","ZXhwYW5kZWQ=","cGl4","X3NoYWRlcg==","IFNQRFg=","QGV4YW1wbGU=","IFdyYXBwZXI=","Llplcm8=","UG9zaXRpdmU=","IHNwaW5uZXI=","IGludmVudGVk","IEdhdGVz","0L7RgtC+0YA=","IGNvbXBhcmlzb25z","6Lc=","LnByaW1hcnk=","ZGF0YVByb3ZpZGVy","YWRkaXRpb25hbA==","CW9wdGlvbnM=","c25hcHNob3Q=","LnNldEhvcml6b250YWw=","ICJ7fQ==","IEZpc2hlcg==","aGFsdGVu","PFR5cGU=","IG1heExlbmd0aA==","IE10","IOqwgA==","LmpldGJyYWlucw==","IGlkZW50aWZpZXM=","IGZsb3dpbmc=","IERpc2N1c3Npb24=","YXRzYnk=","IHNjaHc=","dWdodHk=","IHJpdmVycw==","LnVuaXF1ZQ==","X1BIWQ==","ZWRyYWw=","KGxs","IGNzcmY=","cHBlcnM=","w7xs","IEVzcGVjaWFsbHk=","cG9ydGVk","IEhhcnJpc29u","KioqKioqKi8K","VGV4dENvbG9y","7Iq1","d2lyZQ==","IHN0YXR1c0NvZGU=","IEZpbmlzaA==","Y2VuY2U=","IE1jQ2Fpbg==","IFdvcg==","KGF3YWl0","ICktPg==","IFJlZ2lzdGVyZWQ=","SU5FRA==","a2Fs","cGFyaXNvbg==","IG9iamV0bw==","Vmk=","bWFuZGE=","IHJlbmV3ZWQ=","IFNvZg==","ZXNzZWw=","Lm5kYXJyYXk=","IGNyYXA=","566h","LmFic3BhdGg=","KHVw","IGNsZWFyYW5jZQ==","IFRX","X0NPUFk=","ICAgICAgICAgICAgCQ==","IGZvcmVzdHM=","IGFyZ3VhYmx5","IEFTUw==","aGV5","YW1lbA==","X2ZvcmU=","IFNvdXRoZWFzdA==","IGFidXNlZA==","IHByYWN0aWNpbmc=","YWtlZGlycw==","5Li7","X3Jlc291cmNlcw==","IHBvbmQ=","LkZpeGVk","TGFzdEVycm9y","IFBzeWNob2xvZ3k=","ICIvLw==","ITo=","UmV1c2FibGU=","IG1lbnNhamU=","IHJvc3B5","IGJvdXI=","IHZhcmlldGllcw==","IGVtcGF0aA==","KCh7","X29yZw==","IE1lcw==","IE1hZ2VudG8=","SVNUT1JZ","VW5sZXNz","IGhq","IER1dHk=","SnVu","LHNpemU=","IHBhaW50aW5ncw==","IGRpc3BlbnM=","ZGFydA==","IGJlaGF2aW9yYWw=","IHJwYw==","Y2FsY3VsYXRl","ZnJ1aXQ=","X21t","CXB0aHJlYWQ=","TWF4TGVuZ3Ro","IGN1cnJlbmNpZXM=","X2NhcGFjaXR5","IE96","IGZpcmVhcm0=","IGNvZWZmaWNpZW50","IGJhbmtydXB0Y3k=","d2FydA==","IGZhdGlndWU=","QVZB","IGVzcGE=","X3Bj","IFF1b3Rlcw==","X0xJR0hU","IFRpY2tldHM=","IHJlbGF0ZXM=","IHB1Ymxpc2hlcnM=","IHVubG9ja2Vk","IC8vLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQ==","IEludGVycnVwdGVkRXhjZXB0aW9u","IG91dGxvb2s=","cm4=","IHJlYmVscw==","V3JpdHRlbg==","IGFzaWFu","b3R0bw==","IAkJCQk=","X2dwdQ==","VHh0","LkltYWdlVmlldw==","IHN1aXM=","X3RhYmxlcw==","LlJlY3ljbGVyVmlldw==","IHdoYXRzb2V2ZXI=","6IE=","XSsrOwo=","YXNzZXJ0VHJ1ZQ==","X3ZlcmlmeQ==","IFJpdmVycw==","IF1b","SmV0","aWRpYW4=","U2libGluZw==","IGdlbnJlcw==","LkFjY2Vzcw==","T1BT","IHRyaXZpYWw=","4Liq","YWxlbg==","0LLQtdC0","IFN3b3Jk","IHNjcnV0aW55","KGNi","IGNvbW1lcmNl","IGd1YXJhbnRlZXM=","X2Fkdg==","IExFVA==","cmVjaW8=","IGhpbGFy","IGJhY2t5YXJk","44CP","IGlsbHVzdHJhdGVk","L3ZlbmRvcg==","LlV0aWw=","IHdvdw==","TE9Z","IE1hcnNoYWw=","Ij4nLiQ=","IEJhaw==","IG1vZGlmaWVycw==","ZGljdGlvbmFyeQ==","IFN0cmU=","bXVsdGlwbGU=","IikpLA==","IENvcnQ=","J10iKS4=","KGFkbWlu","IENyZWF0b3I=","SW50ZXJuZXQ=","KG1z","bG9neQ==","REVDTEFSRQ==","IE1hcmN1cw==","PDw8PA==","44Gg","X215","KGluc3Q=","IHNjaWVuY2Vz","TkRFUg==","LmVudGVy","IGl0dQ==","IGJlaGF2ZQ==","UGFu","b21iaWVz","PSc8","JykpOw0K","IE1FTlU=","IFdvcmtlcnM=","Lk5vRXJyb3I=","IGJpbmRpbmdz","IGRpc2FiaWxpdGllcw==","e1w=","IE11bmljaXA=","IGNvcmVz","dXJwbGU=","IE5va2lh","dXNpb25z","IEZpdG5lc3M=","LmhhbmRsZUNoYW5nZQ==","IGphdmFzY3JpcHQ=","7JqU","KGRlYw==","IHBhY2tpbmc=","LWRlcGVuZA==","IHRyYW5zY3JpcHQ=","emVyb3M=","X2FsZXJ0","PyIsCg==","bGlicw==","sdC+0YI=","IHwKCg==","dHJhaW5lZA==","IEdlbnQ=","IFJhYg==","eHA=","X2NvbmZpZ3VyYXRpb24=","5aSp","X2FjY2VwdA==","LnJlY3ljbGVydmlldw==","OnVybA==","IE11aGFtbWFk","IHByaXZpbGVnZXM=","X2Jhbms=","dWt1","d2FsbGV0","IFJPT1Q=","IGVuY3VlbnQ=","P2ZhbWlseQ==","CXBvc2l0aW9u","IGNn","IHByZWNpcA==","bWV0aG9kcw==","X2Zhc3Q=","aW5jcmVtZW50","IFRpZ2Vy","X09DQ1VSUkVE","cXVpcA==","IEhBUw==","X2RvbQ==","IHdyZWNr","Ymo=","IGRlcm4=","IG9yZ2Fucw==","LmVudHJpZXM=","IF8oJw==","cmFtZW50bw==","IEphbWll","IHB1bms=","SVBQ","IHByb2dyYW1h","IGF0dGFpbg==","IHByb3Zlcw==","L3NpZ24=","IGFuc3dlcmluZw==","IGxhZGRlcg==","KioqKioqKioqKioqKioqKioqKioqKioqKioqKg==","IFdhbG1hcnQ=","IENPTlRFTlQ=","ZHVjdG9y","IHZlcmJhbA==","IFBJRA==","Y3J5cHRv","X0NBTExCQUNL","ID09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PQ==","IHBvdGVudA==","IHNob3J0cw==","LlVyaQ==","LnVuaWZvcm0=","O2JvcmRlcg==","IFdlcg==","IGhlcmVpbg==","bGxh","IElocg==","UGl4bWFw","bGl0ZXJhbA==","ISkKCg==","Z2VuZXJpYw==","cnVzdA==","X3NjcmlwdHM=","b3N0bw==","aXR1cw==","IENvYWxpdGlvbg==","IHJlbW90","ZGVwbG95","IEVhZ2xl","44CB44CM","IGltcG9ydGFudGU=","CW9iamVjdA==","IHNlYXNvbmFs","bmVq","YWlkdQ==","QmluZFZpZXc=","IFNpZXJyYQ==","LWJn","IG1ha2VTdHlsZXM=","W29mZnNldA==","R2FtZXM=","IGhvcm1vbmU=","QVJJTw==","aGVhZHM=","KHNlbGVjdA==","IFN0YXJ0ZWQ=","QHBhcmFt","X2RlY2w=","X2Jsb2c=","IGHDsW8=","XEFwaQ==","IE1pbHdhdWtlZQ==","UHJvdmlk","QW5pbWF0ZWQ=","IGNvb2xlcg==","IFNlZWQ=","LkVkaXQ=","z4Q=","IFRha2luZw==","IGJvcmRlckNvbG9y","LWZvdW5kZXI=","LkxvZ2dlckZhY3Rvcnk=","ICIiCgo=","QUxU","IExhdGU=","RURJQVRF","ICk7CgoK","YWZh","IGNhbmNlbGxhdGlvbg==","QXRvbQ==","IEJpcm1pbmdoYW0=","ZW1wcmVzYQ==","SEVNQQ==","YXNjYWw=","IHVwc2lkZQ==","LlZlcnNpb24=","IEZvbGRlcg==","IEVpZ2h0","IFZpbnRhZ2U=","IEFwcERlbGVnYXRl","IFByZXZlbnRpb24=","LnNlcGFyYXRvcg==","U1RN","KHJvb20=","Z2VuZXJhdG9y","IGNhdHRsZQ==","CVo=","IFBhcnRpY2xl","J307Cg==","IG5laWdoYm91cnM=","IFN0YXRlbGVzcw==","IGFsdGl0dWRl","IHNhaW50","0L7QsdCw0LI=","IGNvbnZpbmM=","IENvbnRlbnRz","IGpldW5l","KHRz","U2VyaWFsaXphdGlvbg==","KGNvbGxlY3Rpb24=","IEpheno=","IERvZA==","IFJvY2g=","YWNpbw==","Y29tbWVuZGVk","REVGSU5F","Lm9ubG9hZA==","IHNwZWNpYWx0eQ==","UExBQ0U=","X01PVkU=","IGFjY291bnRhYmxl","UmV1dGVycw==","IGZpY2tlbg==","IGRlcHI=","V293","Vm9pZA==","LnNwYWNl","4LiX","IHRx","IFBldHM=","PCQ=","KEN1cnJlbnQ=","YmVycmllcw==","cGxhbmF0aW9u","IGxpc3RPZg==","IFRodQ==","IFBSSU5U","IG1pc21v","IGRvaQ==","Y2hr","IFVuaWNvZGU=","KHJvbGU=","IHZpcmdpbg==","PFBvaW50","X1JFU1BPTlNF","LWhvdXNl","IFZlbmV6dWVsYQ==","RU1BSUw=","IHDDumI=","X2V4aXN0","QmFsbA==","LkNM","cmVmZXJlbmNlcw==","IEJlYXV0aWZ1bFNvdXA=","CUV4cGVjdA==","VEhJUw==","0YPQtA==","YmFuZQ==","IHRlbXBvcmFs","RVJJQw==","ZXRhcw==","IHJlZnJlc2hpbmc=","IHNlY3VsYXI=","QHN5bnRoZXNpemU=","YWNjdXI=","IG5lbGxh","IFNPTA==","LnBpcGU=","Q2hhbm5lbHM=","6Ieq","IGluc2VydGlvbg==","4buL","ZWxpYQ==","IGFkanVzdGFibGU=","Q2FuYWRh","IElURU0=","IGN1cnZlcw==","IENoZWFw","bGV0aW5n","IG9wdGltaXN0aWM=","YWxsbw==","IHBvbGl0aWNpYW4=","X2Rvd25sb2Fk","PWVkZ2U=","T1JUSA==","IG1vZGVsbw==","YXJ0bw==","LnJvdGF0ZQ==","IHNlbGVuaXVt","5oiR","X2FsaWFz","IHJlbm93bmVk","Licu","IGN6eQ==","IGFsbGVz","LkNvbXBpbGVy","IEJhc3M=","Q29ubmVjdG9y","LlJvbGU=","TElOSw==","IGNyaXRlcmlvbg==","bGVtZXRyeQ==","U3VjY2Vzc2Z1bGx5","L3BuZw==","IGV5ZWI=","YXNwYmVycnk=","KGdy","IGRhbmdlcnM=","IGNvcnJlY3RlZA==","IGdsb3c=","IGVsYWJvcmF0ZQ==","IEJlYXJz","YXdhaQ==","PSInKw==","IHByb21vdGlvbnM=","IG1hdGhlbWF0aWNhbA==","ICJg","X0dlbmVyaWNDbGFzcw==","IENoZWY=","LlNvcnQ=","dGFibGVOYW1l","UklD","IHZvbHVudGFyeQ==","IEJsYWRl","LWVsZWN0","IENvbWJhdA==","IEFiaWxpdHk=","IGFiZG9t","IGR1Y2s=","VG1w","5YWo","IGVyYXNl","LlBo","IERlZmF1bHRz","cGFydG1lbnQ=","X1VTQg==","w6p0ZQ==","Oyc=","IHBhZHM=","IE9iYW1hY2FyZQ==","LlRvdGFs","IGRpdmVydA==","IGNyaWNrZXQ=","IHJlY3JlYXRpb25hbA==","KHJlZA==","IENsZQ==","UlU=","IG1pc3Rha2Vu","IE1vbnRhbmE=","IHN0cml2ZQ==","X3NsaWRlcg==","IFBsYXN0aWM=","IGRlY29yYXRlZA==","IFZQ","bGljbw==","CWZhbHNl","IHByZWZz","KFwi","X2ZhbHNl","aWVuZG8=","IEAk","QnVja2V0","YWN0aWNhbA==","IFpoYW5n","LmNvbHM=","LkJpbmRpbmc=","IHdheA==","X1NUT1JBR0U=","IGxhd24=","IHJm","LlNjZW5l","IENhbGN1bGF0b3I=","LmRlc2lnbg==","IHJlc2ls","0LvQtdC8","RW1wbG95","IFByaWNlcw==","IFBXTQ==","YWdp","LmV2YWx1YXRl","CXBhcmFt","IGJyYXNz","YmJlbg==","IGluZmxhbW1hdGlvbg==","dWxsaXZhbg==","IGFubm90","IHBI","aWFtZXRlcg==","IEJUQw==","KGJveA==","U3Rvcnlib2FyZA==","IGNsYXk=","LmFzc2VydFJhaXNlcw==","fHN0cmluZw==","LkFwcGx5","IG1hdGNoZXI=","dW5kZWQ=","IHNhdGlzZnlpbmc=","IOyglQ==","UmVuZGVyaW5n","X2FwcHJv","aW5kcm9tZQ==","QU5FTA==","X2ZpeA==","YnJ1c2g=","Lk1hdGNo","IHNtaWxpbmc=","b25hdXQ=","U3VuZGF5","IGRlbGV0aW9u","IGVuY291cmFnZXM=","UHVsbA==","IHJldmVuZ2U=","IHF1YXJyeQ==","dHJhZGU=","IGNhYmxlcw==","KGRlbHRh","aXRlc3BhY2U=","IGZo","LmJ1bmlmdQ==","IHZpZWw=","X0lOQ0xVREVE","IFRhaWw=","YWRhcg==","b2Zz","IG1ldGFscw==","Z29t","X21ldGhvZHM=","IG5q","LlN0ZA==","KHdpbg==","JCgn","IHR1cnRsZQ==","dXJvbg==","IGVucm9sbGVk","IEh6","IEJveERlY29yYXRpb24=","IHBvbnQ=","cmVsYXRpb25zaGlw","Qmk=","s7s=","IG1hc2N1bA==","IHNoYWRlcw==","IHZy","IExvZ2lj","IGFpbg==","IERJU1Q=","IGNvbGxhcg==","InByb2ZpbGU=","R2VuZXJhdGVkVmFsdWU=","IFBvc3NpYmxl","IGVpbmVz","g4E=","LnRpbWVvdXQ=","IEVj","IGplcnNleQ==","LkRvdWJsZQ==","IHF1YWxpZnlpbmc=","dm9y","Q1JFRU4=","X0FwcA==","X3JlY3Y=","IGFsaWVucw==","SXRz","RXNj","aWF0b3I=","IEVjbGlwc2U=","IGdo","VmljdA==","CWh0bWw=","dG9v","LmNvbnN0","IGFudGVyaW9y","IFd1","KGtleXM=","IHVsdHI=","X3BvbHk=","IFRhcA==","IEJ1ZA==","QVdT","IGNyYXNoZXM=","X3RvdA==","Q29udGlu","LWhhbmRlZA==","YWx0aG91Z2g=","4Lia","aWZpY2VudA==","IGRldmU=","dXRvcnk=","IFdvcnRo","X01T","IGZsb29yaW5n","IHNlbGxlcnM=","IFRoYW5rc2dpdmluZw==","IHBuZw==","IHZhbG9yZXM=","IHNsZWV2ZQ==","IGZpbGxl","0JA=","IGFwcG9pbnRtZW50cw==","IHZpbQ==","VXNlckluZm8=","Qk9PU1Q=","IHBvc2Vk","aW5pdGlhbGl6ZWQ=","LnByb2R1Y3Rz","IExlYWRlcnNoaXA=","bWFudWVs","JyU=","ZW1hcmtz","UGVyY2VudGFnZQ==","KGRpc3Q=","LmF2YXRhcg==","KGhPYmplY3Q=","5LuK","X2lmZg==","aWNvbmU=","Oyk=","X25pbA==","IGFib2w=","0LXRgdGC","IHZlbnVlcw==","LkNvbnZlcnQ=","IScpCg==","LkJpdG1hcA==","c2tpbg==","X0NPTFVNTg==","UmV2","R1JFU1M=","Z293","IHdpc2hlZA==","dHJhY3Rz","LmFzc2VydEZhbHNl","IHNjcmVlbnNob3Q=","IGZvaXM=","Q29tYg==","TGluZVdpZHRo","IEdyYWI=","IGludGVuc2l2ZQ==","CXNo","Kyk=","LmZpcnN0TmFtZQ==","X1BST0NFU1M=","IHRpbHQ=","aXRvcmVk","LkxPRw==","IGJhaw==","IGludGVudGlvbmFsbHk=","LnBsYXllcnM=","KGNhbnZhcw==","KSkpDQo=","LlByb3ZpZGVy","X1BVQkxJQw==","VGFsaw==","IExpdg==","Y2hlZHVsZXJz","IGxj","YWRpYw==","ZmVhdHVyZWQ=","LnJlc291cmNlcw==","RnVsbE5hbWU=","IG1lYW53aGlsZQ==","QnVmZmVycw==","IHJlc29sdmVy","IFNBUA==","X1RF","R05V","IEZvcm1zTW9kdWxl","X3do","IFN3ZQ==","LndpZGdldHM=","IGNhYmluZXRz","IHN1c2NlcHQ=","IEJvdHQ=","YWN0aXZleA==","YXZhcg==","YW50aWNz","ICI9Ig==","X2t3YXJncw==","IGdhbWVPYmplY3Q=","IEFuZ2xl","Lkl0ZXI=","bWFyc2g=","IEJpcnRoZGF5","IENNUw==","cmVxdWVzdHM=","IFBlYXJs","X0VPTA==","IGxpbnV4","KG9yZw==","X01vdXNl","LmNvbnN0cnVjdG9y","IHpk","IGtpY2tz","YXJ0aXNhbg==","IGVheA==","S24=","cG9uZ2U=","IEZpbmxhbmQ=","IG1ldHJlcw==","IEFzc2Vzc21lbnQ=","cGFydG5lcg==","L3ByZQ==","IScsCg==","W0ludA==","IG9zbG8=","ZGF0ZXBpY2tlcg==","L1N0cmluZw==","b3BsYXk=","IEhlYnJldw==","LGRvdWJsZQ==","IHRyYWJhbA==","KyJc","CUVJRg==","L3RleHQ=","X0ZJUlNU","IFBldGU=","IGVnbw==","IGV4dHJhcw==","UERP","IHJlZ3VsYXRl","IFFXaWRnZXQ=","c3Rz","IFNob3dz","IE5IUw==","LmNvdXJzZQ==","cHRocmVhZA==","IEZ1ZWw=","LnRpbWVz","IMKw","IHN0cmlkZXM=","KCQoJyM=","KHdvcmRz","IHJoeXRobQ==","IHNwb250","IHNlbnNhdGlvbg==","IHNwaWtl","Q2xvc2luZw==","6aG16Z2i","TnVtZXJpYw==","IGJyZWF0aGU=","IGZpbmFsZQ==","X0ZBQ1Q=","aW5pb24=","IGNoaWxs","IGZvcm1hbGx5","QU5HRUQ=","ICc6Jw==","INC/0YDQuA==","YXE=","IEZhYnJpYw==","KGxhdA==","IFByaW5jaXBhbA==","IGVycm8=","b2NhbGU=","Tm9t","IGZvc3Q=","X0NVU1RPTQ==","LmludGVsbGlq","ZXJ0b29scw==","IGNsYXNzZQ==","YWRpZW50cw==","IGZ1bmRyYWlzaW5n","RU5F","X09QVElPTlM=","X29i","Ly99Cg==","IHByb3RlY3Rpb25z","LnNlZWQ=","TlY=","dGVybWluYWw=","Ozs7","UHJlZGljYXRl","IOy2","IGJvbWJpbmc=","R0Y=","IGNoZXc=","KSkpLg==","cXVhbGlmaWVk","XT17","bGlzdGVu","Q0VOVA==","ZGlnZXN0","RWFzdA==","IGRpdmVy","IGVuZHBvaW50cw==","IGVl","IGNvbGxlYWd1ZQ==","IGRpc3NlcnRhdGlvbg==","X2NvbW1pdA==","X0RBVA==","LnJj","IGJyZWFzdHM=","IFJ1Zw==","IFBpbA==","Q29udHJhY3Rz","IEJyeWFu","V2ViVmlldw==","IGNvbmNlbnRyYXRl","IElubmVy","ICd8","c3Rkb3V0","X1N1Yg==","Pi0tPgo=","Vm9s","IFNTRA==","KSkpLA==","Lk9wdGlvbmFs","IG51cnNlcw==","IG9yYg==","X3Bl","KTsNCg0KDQo=","cGxhY2Vk","ZXNzZXI=","IHRoZXJhcGV1dGlj","IHdoaXRlc3BhY2U=","IGFzdG9u","U3VjY2Vzc2Z1bA==","IHByYWlzZWQ=","IFdlcw==","IGVpZ2h0aA==","aXJhbA==","IHZyb3V3","IGZhY3Rpb24=","X2JpYXM=","IHdpdGNo","IG5wYw==","KHNi","IFJvZHJpZw==","X2JpZw==","RGVwZW5kZW5jeQ==","IEFicmFoYW0=","YXJkaQ==","Q0FS","bm9z","IGFidW5kYW5jZQ==","IG51dHJpZW50cw==","aW5zdGVpbg==","LlZlcnQ=","IElTUw==","PFU=","IHN1bXM=","X2hpc3Q=","IGZhcm1lcg==","IEFicg==","U2hvdA==","IEJhZFJlcXVlc3Q=","IGhhc3M=","IFJhaWxz","IGFmZmlsaWF0ZWQ=","5p2l","IGVyZg==","SU5G","IFZpZXdIb2xkZXI=","bWluaQ==","IFJvdGg=","IGZhaXRoZnVs","IFBoaWxsaXBz","QU5ET00=","XS5b","X1BBWQ==","IEFyY3RpYw==","ZmFrZXI=","RGlnaXQ=","TWFsZQ==","c3RkZXJy","c2V5cw==","IMWh","X3JlbW90ZQ==","bGlxdWU=","IGluZGVm","IEluZHVzdHJpZXM=","aXRyYQ==","X3BhaXJz","PGlvc3RyZWFt","IHNhbGFyaWVz","aWtlbg==","LkZyYW1l","UExJQw==","X1NQRUM=","IE1lZGl0ZXJy","IHN5c3RlbWF0aWM=","IGludGVycm9n","SWNvbkJ1dHRvbg==","c2Vh","aW50cm8=","IElzc3Vlcw==","ZW5jcnlwdGVk","IGludGVybmF0aW9uYWxseQ==","IHNucHJpbnRm","IHBhc3Rh","IEJyYWRsZXk=","X1N0YXR1cw==","QUxL","X1BBRA==","LmxhdW5jaA==","PHNlbGVjdA==","IGhhcmRlc3Q=","IHBoeQ==","ICgoKg==","LXNsaWRl","IE5vYm9keQ==","U3U=","IGFzw60=","Y2xvc2VzdA==","X2luaXRpYWxpemVy","IHN1cHBvcnRlcg==","LWdlbg==","IHRhbGVz","IGNvcnA=","X2Z1","c2F0","bmVpZ2hib3I=","Lk1pZ3JhdGlvbnM=","IGFsZ3Vu","IHNpbm9u","LlNwZWM=","PywK","LkdM","bWFsZQ==","IG1vbml0b3Jz","eWxhbg==","LUxpY2Vuc2U=","Lm1hdGNoZXM=","IEFCUw==","IE1hc3Q=","IFdhbGxldA==","KCQoIiM=","RGlydHk=","IGNvcGU=","IGludGVycG9sYXRpb24=","b3VzZWQ=","IEpldHM=","LkZMQUc=","LkNhbmNlbA==","LkV2ZW50cw==","bmV2ZXI=","IE1Ieg==","PkQ=","IHNlcnZsZXQ=","YmFzdGlhbg==","ID4m","U0lE","X2Nsaw==","IGRpdmlzaW9ucw==","fScsCg==","IGRpbGRv","IHBhcmFkZQ==","bWFqb3I=","IGFib2FyZA==","Oysr","IGZ1c2lvbg==","In0seyI=","IERpYWxvZ1Jlc3VsdA==","CWFycg==","LWVt","X25y","KGhhbmRsZXI=","Lk5FVA==","Llh0cmFSZXBvcnRz","IFNoYWg=","IEJyaWVm","LSw=","IHByZWNpbw==","CQkJICAgICAg","IHRhbnQ=","IEdyYW5kZQ==","L3htbA==","X0lDT04=","IFJldHJv","dW5xdWU=","IG5hZw==","dG9GaXhlZA==","WEw=","IGRlY2xhcmluZw==","IENvbmNyZXRl","IEFtYXppbmc=","CXByaW50aw==","IGRlYmF0ZXM=","REFURUQ=","IGFlc3RoZXRpYw==","ZW1ldGVyeQ==","Um91dGluZ01vZHVsZQ==","IE5hc2h2aWxsZQ==","V0FZUw==","IHdvbGY=","IG9ic2VydmVycw==","T1RB","YW5zb24=","IGVh","IGdyZWVuaG91c2U=","k43kvZw=","IHN0YWly","IGltbWlncmFudA==","X2FwcGx5","cGVhcmU=","IEJsb29tYmVyZw==","X1BMQVlFUg==","UmVzcA==","5q2j","Q2hvb3Nlcg==","IElDb2xsZWN0aW9u","UGV0ZXI=","RXJybw==","LmRldGVjdENoYW5nZXM=","TWFwcw==","IHNxdWVlemU=","IEhvbWVz","d2VnaWFu","IGZvcm1hdHRpbmc=","IG5lZ290aWF0ZQ==","dWxk","IE5lcA==","IFFC","IGVjb25vbWllcw==","ICovLA==","IHJlZHVuZA==","IEFiZXI=","LklzTnVsbE9yV2hpdGVTcGFjZQ==","eWNsZWQ=","ICAgICAgICAgICAgICAgICAgCg==","X1No","IHNrZXB0","IHJlY3JlYXRlZA==","IGdldFR5cGU=","IG1hcmdpbnM=","IGNvbG9uaWFs","Y2hhcnRz","Ly9A","IHByb2Nlc3NvcnM=","6K+0","YmF0aXM=","5oSP","YXRvcmlv","bWVudGlvbmVk","UGF0aWVudA==","IHByZXk=","Q2hlY2tib3g=","X3hwYXRo","LnNraXA=","IE1vcm1vbg==","IE1lbW9yeVN0cmVhbQ==","Q1JFTUVOVA==","IGt1","bWVsZA==","XERhdGE=","IEtlcm5lbA==","aWx0cg==","6YCB","KHByb2ZpbGU=","Q2FyYm9u","Uk9MRQ==","KHBs","XSoo","Lm1lbW9yeQ==","IG1lZGFs","IGFkdmlzb3I=","aXTDpHQ=","IGhkcg==","aWVydW5n","IFByb3ZpZGVz","KGFscGhh","IHRlZW5hZ2Vycw==","LXBhcnNlcg==","LkxhdExuZw==","XSgpCg==","IGZlbG9ueQ==","CQkJCgkJCQo=","Qk9PSw==","IHNsYXNo","IGNsZWFyZml4","IFByb3BoZXQ=","5a65","cmlnaHRuZXNz","LWZp","LmtpbmQ=","ZXJ0b24=","Smlt","IG1hbmlwdWxhdGU=","IHdvcmtzaGVldA==","b2xpbg==","c3RhcnM=","IGFydGlmYWN0","X0VNUFRZ","CW1haW4=","LS0tLS0tLS0tLS0tLTwv","L3N0YXRpYw==","SVRJRVM=","IENvdW5zZWw=","IFdD","IEJMQUNL","LXN5c3RlbQ==","IFRyaXBsZQ==","LmJ0","c29mdHdhcmU=","XScpLg==","SW5qZWN0aW9u","X25vdGlmeQ==","IGZpZnRlZW4=","IGFtYmFzc2Fkb3I=","YnJlYWtpbmc=","VVJJQ29tcG9uZW50","IFByb3Rlc3Q=","LlJlc2V0","IE1Qcw==","dnJv","LmdldFN0YXR1cw==","X21vcmU=","Y3Vw","IEtlbnlh","5bey","IGFtbXVuaXRpb24=","15XX","IERhc2g=","IHVuZGVyZ28=","IGJ1ZGR5","0YLQvtGA","ZXRpY2FsbHk=","X091dA==","IEJyb2Fkd2F5","qow=","IEZpdHo=","IHN0cmlwcGVk","LWNhY2hl","IHVtYg==","IGFub20=","IHNpYmxpbmdz","b2N1bWVudGVk","SW50ZXJydXB0ZWRFeGNlcHRpb24=","IHBlbmc=","bHN0","X0FMSUdO","LWNhcA==","UkQ=","Y2VsbHM=","IE1vdG9ycw==","IHRyYW5zbGF0aW9ucw==","dXN0ZXJpbmc=","6Zo=","IGxlYWtz","ZmlsZVBhdGg=","IG91dGdvaW5n","X2VuZHBvaW50","X0dM","LmxpZmVyYXk=","cmljaHQ=","IE9wZW5HTA==","LmpwYQ==","IGFmZmVjdGlvbg==","Zmx1eA==","IGdseQ==","IGJ1ZA==","Pic7","IGV4cHJlc3Npbmc=","IElR","IEZhY3Q=","LyoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioK","X21hc3M=","KSk6","IGNvbmRvbQ==","IGNyZWF0ZVN0YXRl","b21ldG93bg==","IGlycg==","ID4o","PkI=","aXRlcmF0aW9u","44Oq","IHNoaXJ0cw==","b3VudHk=","LT4k","X1NJR04=","IERhbGU=","IGpq","RWFzeQ==","RnJl","IE55","IGNobG9y","bWF0Y2hlZA==","IEdlcm0=","LVVB","IE5hdGhhbg==","ZWR1Y2F0aW9u","LXlhcmQ=","LWNoZQ==","aG91c2Vz","cml0aW9uYWw=","IHByb3hpbWl0eQ==","IGRpZXNlbQ==","4bqtcA==","IGRyb3VnaHQ=","LmF1ZGlv","IExlbw==","IGZhdm9yYWJsZQ==","aW5jaA==","IERhdw==","cmlibHk=","X3N0dWRlbnQ=","aWRhYmxl","T1ZF","IGxhY2tz","b3VuY2luZw==","LmJ1c2luZXNz","IHJlb3Blbg==","bWF5YmU=","X0dMT0JBTA==","IGRyZXNzZXM=","IEVkd2FyZHM=","ZW5zaWJsZQ==","IEhhcmR3YXJl","IEV4Y2VsbGVudA==","IFRpbWVVbml0","Q1RJT05T","IHNjaGVkdWxlcw==","IHNlZ3Vl","T3BlbnM=","YW1tZW4=","LUlkZW50aWZpZXI=","IHN0YXJpbmc=","IGhhcHBpbHk=","IEhvYg==","J18=","ICIpOw==","YW1lbnRvcw==","ZXRjaGVk","IC8+fQo=","LlVzZXJz","IGludGVycnVwdGVk","Q29udGFjdHM=","IHJlZ2lzdHJv","aW5idXJnaA==","Q0hB","X2ltcA==","cGhpcw==","c2F5","IHJldGFpbGVy","Lk5PREU=","L21hcHM=","X0xBU1Q=","IENoYXJnZQ==","X2d1YXJk","Q29sbGlkZXI=","IFN0YXRlbGVzc1dpZGdldA==","IjpbIg==","KCIuLi8uLi8=","aW94aWRl","IFN1bmQ=","ICcnOw==","dW5zZXQ=","YWRkV2lkZ2V0","0LvRjg==","ZWxsZXM=","YWxrZXI=","QXJj","IGRlZHVjdA==","R1VJTGF5b3V0","IFZpbGxh","IGZvcmJpZGRlbg==","X3doZXJl","IFwv","IFRpYg==","X0FY","XQ0KDQo=","IEJpcg==","IGJlbmQ=","IE1BS0U=","IE1FVA==","IGZ1dHVyZXM=","IHdlaWdodGVk","IiIiDQo=","IGF1dGhvcml6ZQ==","KHByb2dyYW0=","fSx7Ig==","IGNvZWZmaWNpZW50cw==","w6pz","UGVyUGFnZQ==","IEJhdGhyb29t","IFB1Ymxpc2hpbmc=","R1BM","IHN1Ym1pc3Npb25z","IE5VTUJFUg==","asSF","IGFkZGl0aW9uYWxseQ==","ZW1wcmU=","IFNoZWw=","b3R5cA==","U29sdXRpb24=","IHRodW5kZXI=","X2Vj","IAogICAgCg==","IEZlbGxvdw==","IGtheQ==","IG5ld1N0YXRl","T05UQUw=","SW1wbGVtZW50YXRpb24=","Lkxvb2s=","IGVudHM=","IGxvcnM=","IEJJRw==","ZmFi","IGF2ZXJhZ2Vk","IEZlZWRiYWNr","IFdlbGxz","IG1hcnRpYWw=","IGluZHVs","IENvbW11bmlzdA==","IEZvcmV4","IEFncmljdWx0dXJl","Ils=","IHF1YXI=","IEtvbnQ=","CXZpZXc=","LkJ5dGVz","ZGVza3RvcA==","IE1ha2Vz","YWtlc3BlYXJl","Lk51bGxhYmxl","IHNwb3RsaWdodA==","VkI=","b3d5","KHRvcmNo","dHJpZGdl","X2JvdW5kcw==","IGFwb2xvZ2l6ZQ==","LmFkZEl0ZW0=","YW50ZA==","Kik7Cg==","LHU=","KGdlbg==","57uT","cmVhdG9y","IENvcmQ=","b3VwcGVy","Lm1ldHJv","IGV3","IFdPUkQ=","LkFmdGVy","IGRldGFpbmVk","IEhhbW1lcg==","ZXhpc3Rpbmc=","IG9zdA==","IG1vbnVtZW50","LWN1c3RvbQ==","VXNlcklE","IE5vbQ==","IHJlamVjdGlvbg==","KGRpbQ==","IHNpbmdsZXRvbg==","CWRpZQ==","YXJpYW5jZQ==","cmVwb3J0cw==","XSE9","ZWxkYQ==","IHByZXZhbGVuY2U=","X3JlZ3M=","LiIu","IGZlbWluaXN0","Q29kZWM=","ICoqCg==","KGxhYmVscw==","X01BUks=","RkFJTEVE","IGFkbWluaXN0ZXJlZA==","V04=","ICAgICAgICAJCQ==","IG5vdW4=","d2ln","IGdvdHRh","IHJpZg==","LWlt","IFBhdWxv","IENvbW1hbmRUeXBl","XSkpCgo=","LXplcm8=","VHJhaW5pbmc=","IGxvcmQ=","X2FydA==","cmVkZGl0","Q2VydA==","IHBlc28=","Um90","IGVuZGFuZ2Vy","LmRy","dXNlckluZm8=","dW50cw==","bnY=","IFRyYWlsZXI=","LWZpcnN0","KG1ha2U=","IGJlbmVmaWNp","LWJsYWNr","acOf","IHVuZG91YnRlZGx5","IG1leA==","IEFuY2llbnQ=","KGFz","IGRlc2NlbnQ=","UGljaw==","IHJlcGxpY2E=","JG9iag==","w6Rocg==","IGFycm93cw==","ZnR5","IExpYnlh","dWdh","Y2hhcmdlZA==","VHVy","IGhvbWlj","aXNzZW4=","IEZha2U=","IGJlZXJz","IHNjYXR0ZXJlZA==","KFRpbWU=","VVRJTA==","IGJ1cmVhdWNy","L3BsYWlu","IHN0aWNraW5n","RkFJTA==","IENvdmlk","VGhpcmQ=","X3ByZXNlbnQ=","IFBpZXJyZQ==","IOuq","IFsuLi5dCgo=","UHJvYg==","IFRyYWZmaWM=","aWNhbw==","ZG9jdG9y","ICksCgo=","VGFicw==","YWx1","77ya4oCc","IGluaGVyZW50","X05v","cml0aXM=","IFByb29m","LmJhc2VuYW1l","5Lya","IGNoaW0=","IFByb3RlY3RlZA==","Y3JpdA==","IHByb25l","INC60L7QvQ==","IEhlcm9lcw==","IGFueGlvdXM=","IGFub3M=","IHdlZWtlbmRz","IHNleHQ=","IHJlZHVjZXI=","PVVURg==","aGFsZg==","IFNhdw==","Lm1t","IG51ZXZh","LmN1cnJlbnRUYXJnZXQ=","Lmx1YQ==","X0VYVEVOU0lPTg==","CXJlZw==","IEN0cmw=","X2FsaWdu","YWNjZXB0YWJsZQ==","IHJ1c2hpbmc=","ZnJhYw==","IGJvYXN0cw==","Rml2ZQ==","wrE=","IFRlbXBlcmF0dXJl","Pik6","IGNoYXJ0ZXI=","UkVBVEVE","IHN1YmplY3RlZA==","IG9wYw==","aGVhbHRoeQ==","5L2/55So","IFNjaWVudGlmaWM=","IGZyYXU=","cmlhZ2Vz","4LiU","LmludmVudG9yeQ==","YXRpb25hbGU=","TWFk","bWludXRlcw==","Pj4oKTsK","IEVudg==","IHJlY29yZGluZ3M=","IHN1c3BpY2lvbg==","c3FsaXRl","CXJlYWQ=","44Gm","IHdvcnJpZXM=","LnB1dFN0cmluZw==","IFNoYW5naGFp","KHVpZA==","cmVy","IHbDrWRl","Iik6","IG1ldGhvZG9sb2d5","INC60L7RgtC+0YA=","Y2Nj","YXZhZA==","IGluZHVjdGlvbg==","CVRocmVhZA==","LHN0cmluZw==","4bqhaQ==","bmVobWVu","dWl0aW9u","ICpfXw==","LmVtZg==","IOyc","L3RoZW1lcw==","IE5pbmU=","Lk9uZQ==","IEVtYmVk","IGZheg==","dWF0aW9ucw==","IHByaXZhdGVseQ==","IGxpbmc=","W0Y=","dXNoaQ==","IGxhdW5jaGVz","KEtFWQ==","R01U","IGFpbWluZw==","cGF0aWJsZQ==","IEJpZGVu","aXc=","IERlZ3JlZQ==","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA=","ICQoJzw=","w6FyaW9z","dG9VcHBlckNhc2U=","7KCc","IEVVUg==","IG92ZXJzaWdodA==","IHRhYmxlc3A=","VXBkYXRlcw==","Lm1ha2VkaXJz","IGh1bWlkaXR5","L3RlbXBsYXRl","QWx3YXlz","KElT","X2NlcnQ=","RGln","IHVuZGVyd2F5","b3J0b24=","IEh1cnJpY2FuZQ==","IHNwZW5kcw==","IFNlZ21lbnQ=","IGZsaWVz","IFRvZ2dsZQ==","IEx5bmNo","IHNlbnNlcw==","IEtvcw==","c2V0RW5hYmxlZA==","aXN0aWNhbGx5","IHRlc3Rlcg==","IGFkbWluaXN0cmF0b3Jz","IHRhZ2dlZA==","0JM=","IHNob3J0Y3V0","IFJlc29sdXRpb24=","IHN1cGVydmlzaW9u","IEFzaGxleQ==","VHJhY2tpbmc=","dWxhdG9yeQ==","YW5kZWw=","aXN0ZW4=","IHVucmU=","KGRpZmY=","QU5UUw==","IHJpZGVy","IHPEhQ==","LlNlcmllcw==","X29yZGVycw==","T1JJWk9OVEFM","IHJldGVudGlvbg==","44CCPC8=","LlRlc3Rz","U3lu","LnBhcnNlRG91Ymxl","a29kZQ==","emVudA==","R2VuZXJhdGlvbg==","IGFkbWl0cw==","IExlYWs=","IGFrYQ==","Uk9XUw==","IEFuZ2VsYQ==","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg","IG5vb24=","IHN0YXJr","IGRyYWdnZWQ=","44O844I=","IHJlY3ljbGVyVmlldw==","IFNpbGljb24=","X3N1ZmZpeA==","Sm9u","Y29jaw==","IFByb2JhYmx5","SW50cm9kdWN0aW9u","IFRlcnJvcg==","KFRoaXM=","IEJhc2ViYWxs","IGplbnRlcg==","Y2hlc3RyYQ==","Lm5hbg==","PWc=","IGNsYXJpZnk=","eWlp","cm9vdHM=","IG5vdGVib29r","IEV4Y2VwdA==","IHJpc2Vz","IEJydXNzZWxz","YXRvcmllcw==","LlVTRVI=","cm9zc292ZXI=","L3VwbG9hZA==","IEV2ZW50dWFsbHk=","Q29uc2lkZXI=","IEJvdW5k","LmlkZW50aWZpZXI=","KHVuaXR0ZXN0","IGluZmVyaW9y","IGNyYw==","IGF1dGlzbQ==","VUlBbGVydA==","IEthdmFuYXVnaA==","aW5lbWVudA==","cXVldWVSZXVzYWJsZQ==","U2tpbg==","LmJhY2tlbmQ=","LmdldFN0YXRl","dW5kaW5n","IHN1YmNsYXNz","IHJlZmluZWQ=","IGFubm95","IHJuZA==","RGlyZWN0b3I=","IOuC","YmVjY2E=","bW9uZ29kYg==","IENvbW1vbndlYWx0aA==","QXo=","IFRoaW5n","IHJlY29t","dW5pbmc=","CWNvbg==","CSAgICAK","ZW1pY3M=","ZWNk","IGhvcm55","QVRSSVg=","IG1pc2xlYWRpbmc=","IEJldw==","L25vZGU=","Y3N0ZGlv","4Lin","IGFkZGl0aW9ucw==","cmly","X3JlcXVlc3Rz","IHJlY2hlcmNoZQ==","c3R1ZGVudHM=","X3Bvc2l0aW9ucw==","ZXJ0ZXh0","IEV2b2x1dGlvbg==","YW5kZXo=","IGRpc3R1cmI=","a2V5dXA=","IEJ1dGxlcg==","LnJlYWRsaW5lcw==","X3N0ZGlv","IGJlZQ==","IEFyY2hpdmVz","IG5ldmVydGhlbGVzcw==","VVJJVFk=","IGRyb25lcw==","dXJpdGllcw==","IOKYhQ==","Ij4NCg0K","IGRpYWdvbmFs","IENhbmNlbGxhdGlvblRva2Vu","X0ludGVybmFs","IHJ1aW4=","LlF0","b2NyYXRpYw==","VGVs","IEFuc3dlcnM=","bWF0aWM=","IHhw","YXRlbQ==","X2pvYnM=","X2FueQ==","IHNlbmlvcnM=","IGxhbmRtYXJr","IFFMaXN0","IG1hbmV1","b3RpZnk=","LyI7Cg==","L3NlcnZlcg==","IFBoaWxvc29waA==","dXRlbmFudA==","KGlv","aHo=","IGF1dGhlbnRpY2F0ZWQ=","ZHY=","LUNvbXBhdGlibGU=","T3JpZ2luYWxseQ==","LGZ1bmN0aW9u","44CCDQo=","IFJlcHJlc2VudGF0aXZl","YXNpbHk=","aXJjdWl0","LmR0","KG1hdGg=","Lk1hcnNoYWw=","Wyw=","IENpdGllcw==","X3R1cm4=","fCkK","IGNhbnRpZGFk","YWx0ZXI=","CXVp","IE5lYnJhc2th","IHNraXJ0","LmJn","U2hhcmVkUHJlZmVyZW5jZXM=","KHN0eWxl","IGdyaWVm","Z2V3","IHNhZmVn","b2xhbmc=","X2xpc3Rz","7Js=","IGdyYW5pdGU=","IGhvdHRlc3Q=","LmpkYmM=","LkN1c3RvbWVy","IOKJpA==","IHdhYXI=","X3NjZW5l","Kycv","IEpUZXh0RmllbGQ=","IHNlYXRpbmc=","IHdlYXJz","IGAv","Q2FzZXM=","IFlvdXR1YmU=","xLFt","IGJhbGNvbg==","LEc=","TWV0YURhdGE=","LXByaWNl","U0NS","VW5pdHk=","IHRydW5r","PXtgJHs=","IGVhcnRocXVha2U=","UGFydGlhbA==","IHN1YnN0","IGVsaW1pbg==","PSInLg==","Ly8qW0A=","IHN1cGVydmlzb3I=","dnJvbGV0","X2FydGljbGU=","IHBhbmU=","Ymlv","IG1vdG9ycw==","Tk0=","RnJhbms=","IG9uaW9u","LXdvcmQ=","SXRlbUNsaWNrTGlzdGVuZXI=","IGJyaXQ=","ZW5kZW5jaWVz","Q29tcHV0ZXI=","X3J1bm5pbmc=","KGRheQ==","LWhl","KG5hbWVk","IFNhY2g=","0L7Rhw==","Y2FtcGFpZ24=","LkFic3RyYWN0","KHdyYXBwZXI=","LnBheQ==","IHV3","R2Vv","cmFpbHM=","L3NlbGVjdA==","aWNodGU=","c29ucw==","RVZFTlQ=","IGFsaW1lbnQ=","UHJvdmlkZXJz","QXdhaXQ=","X0lOVEVSVkFM","Lm9mZg==","IGdsdXRlbg==","X2Nsb3Vk","IHdlbg==","LmV4dHJhY3Q=","CWJ1dHRvbg==","L01N","UGFydHk=","IGRlbW9ncmFwaGlj","X2Vycm5v","IGhpa2luZw==","KCcnKQo=","IixAIg==","IHdpdA==","csOh","b2xvZ2ll","IFN0eWxlcw==","IEJyb3dzZXJNb2R1bGU=","LlJlcXVlc3RNYXBwaW5n","aWNhbnM=","UEFHRQ==","Y3JlYXRpb24=","IEZlcmd1c29u","dWRlZA==","bnVtYmVycw==","IEdUSw==","IHByZXNlbnRhdGlvbnM=","IEJvYmJ5","X3NwYW4=","ZXN0eWxl","IGlsbGVnYWxseQ==","YWJlbGE=","IGJhdHRsZWZpZWxk","Y2FwYWNpdHk=","dGVycm9y","XSIpOwo=","IHdhcnJpb3I=","bGVhZGVy","IERCRw==","IFJldmVudWU=","IHZpZ2ls","IGNvdW50ZXJwYXJ0cw==","KEVycm9y","QUNURVI=","IGhlZWZ0","IHNlbGVjdGlvbnM=","emV1Zw==","dG9t","LXR3bw==","LjsK","X3N0YXRlbWVudA==","IEFpZA==","IFZ1bA==","X3JnYg==","IHByaXplcw==","IGVkaXRhYmxl","CWZvcm0=","xLFuxLE=","LmRlY29y","RGVtbw==","bGljZXM=","IGVuY3R5cGU=","cmF0dWxhdGlvbnM=","IFJPUw==","X2NoYXJz","IEphaHI=","cGFydGlhbA==","0YPRgg==","IFJlY2VpdmU=","IExhbmRz","QVBURVI=","IGNob3BwZWQ=","Li4i","IEFuYWx5","IFVJRA==","IFJhZGVvbg==","IEJlZQ==","IHVubQ==","Pk0=","LmZpbmRhbGw=","VG9rZW5pemVy","IFdIQVQ=","IHNq","RHJhd2luZw==","RXNz","T05E","irY=","KHBhY2tldA==","4oCUYnV0","SW52b2NhdGlvbg==","IE51Y2xlYXI=","PzsK","IGdyYW5kZXM=","IENyeXB0","cmVtYXJr","ICcuLi8uLi8uLi8uLi8=","IGluYWJpbGl0eQ==","bWFnaWM=","Y2F0cw==","IHNpbXVsYXRl","OiR7","aW5mbGF0ZQ==","IGVuZXI=","Ok5P","aXBsZXM=","IG1lcml0","IFJhdGVk","IGdsdWU=","L2Jsb2c=","IGdyZW4=","IHRocmlsbGVk","LkNI","dW5jYW4=","IFBSSU1BUlk=","IHBlcnNlYw==","IGZlYXJlZA==","Lk1JTg==","IFRoZWF0ZXI=","6ZI=","YXRlZ29yaWU=","5q61","IGFwcGV0aXRl","c3F1YXJl","IEFsZXhhbmQ=","LlVzZXJJZA==","X2d0","X2VudGVy","IGdyYWR1YXRlcw==","RnJhZ21lbnRNYW5hZ2Vy","QXV0aG9yaXpl","LU5MUw==","KE15","IHRyaXVtcGg=","dXN0aW5n","X1BBUkFNUw==","Q2hhcmFjdGVycw==","KDosOiw=","X0JVSUxE","TUh6","IHdhc2hlZA==","IHVuY2xl","U3RldmU=","YXJkb3du","PHN0ZGlv","X3Rlcm1z","IE1BUg==","IGhvc2U=","dWN1cw==","IENsYWlt","IFJhbXM=","IG1vZGVsQnVpbGRlcg==","IG7DqQ==","dXNlcklE","PWpzb24=","LlJlc3BvbnNlV3JpdGVy","mOiupA==","IGdydXBv","LWl0","IEtP","LU1haWw=","IGNvbmZlcmVuY2Vz","SUZB","IEFzc2Fk","IHByb25vdW5jZWQ=","IGFuY2VzdG9ycw==","IFRSQUNF","IEdlRm9yY2U=","IHByaXZhdA==","cGVsbA==","ZW1vamk=","INmI","R2VucmU=","IGNvbmNlbnRyYXRlZA==","amFuZw==","TU9URQ==","IFpvb20=","dG9vbGJhcg==","IHV0dGVybHk=","IGVuY29tcGFzcw==","IFNvY2Nlcg==","IGV1cm9wZQ==","LWFpcg==","LmFuaW0=","X0NUTA==","aGVyZW50","cmV4","aW50ZXJhY3RpdmU=","44Gn44GZ","IEthcw==","IGRlc3BlcmF0ZWx5","KGFy","IGJpaw==","IHRyYXZlcnNl","ZXVycw==","UmVjeWNsZXJWaWV3","IE1hcmdhcmV0","IGhvcGVmdWw=","IE1pZw==","X01FTUJFUg==","cmVjZWl2ZXI=","TWF0Y2hlcg==","ZGVwZW5kZW50","IGV4Y2VsbGVuY2U=","0LDQtg==","TE9T","QXNwZWN0","IGFkYWxhaA==","IEVjb25vbXk=","dWxvdXNseQ==","IGV2YWx1YXRpbmc=","IGRldmlhdGlvbg==","ZXh0ZXI=","L2RhdA==","Q29scw==","IFBva2Vy","Ym9hcmRpbmc=","LkNoaWxkcmVu","QU5HTEU=","w68=","IFlvZ2E=","IGhhdGVk","QWRhbQ==","IEZDQw==","SU1BTA==","IGZhaW50","X0RJU1BMQVk=","IGV2b2x2ZQ==","IGZyaWRnZQ==","IHLDqWc=","IGVtb3Rpb25hbGx5","4oCcSWY=","YXdlaQ==","ZXJlc2E=","Jywi","QkVHSU4=","IFZBUkNIQVI=","IHhp","ZmFjdG9y","dHo=","X3BoYXNl","U0VR","KHJhbmQ=","IG1hdGhlbWF0aWNz","IGNvbnRleHRz","LWFj","IEZJRw==","IENhcHRpb24=","IFdhaXRGb3I=","LXdlc3Q=","IGZpcmVmaWdodA==","X0xFRA==","ZWN0aW9ucw==","CXRocm93cw==","IFRha2Vz","b2JyZQ==","IEF2YXRhcg==","IElubm92YXRpb24=","IGNhbGlicmF0aW9u","OnRoaXM=","X2VuY29kaW5n","IGNhbGN1bGF0aW5n","ICMjIyMjIyMjIyMjIyMjIyM=","IFByb2dyYW1z","IEhJR0g=","LmNvbmZpZ3VyZVRlc3RpbmdNb2R1bGU=","UG9seWdvbg==","X0RCRw==","Il0sDQo=","0LDQsQ==","IHNpbWlsYXJpdHk=","IHByemV6","IEZpcm0=","IG1pc3VuZGVy","IE1vdmluZw==","IE1PVg==","IHJlYWN0b3I=","UmVxdWVzdGVk","ZXhwZWN0cw==","IGVyZWN0","bGljaHQ=","b3VsZGVy","SURHRVQ=","IGRldmls","IHByb2dyYW1tZXM=","IENvbW1vbk1vZHVsZQ==","ICInIg==","KEF1dGg=","44CC77yM","IFN0YXRlZnVsV2lkZ2V0","6K6h","L29wZW4=","aW5hbGx5","LlJvdW5k","IFdpc2g=","IGh1bWFuaXRhcmlhbg==","QWNjZXNzVG9rZW4=","IFNPQw==","IHBva2Vtb24=","IHZhcG9y","X2FkZGVk","CUdldA==","c3BlbGw=","IEluaXRpYXRpdmU=","IEhFTA==","YWlycm8=","YmxlZA==","INCx0Ys=","IHNlbnNpYmxl","IEx1YQ==","fCgK","IGZpeHR1cmVz","IG9yZ2FzbQ==","Q3V0","dWt0","Z3Vl","IGNyZWRpYmlsaXR5","OmltYWdl","IENQUA==","LnNu","KGRlc2M=","IFJlaWQ=","LWRlZ3JlZQ==","X3NvdW5k","Q2xvbmU=","4buZ","YWtzaQ==","PiR7","X2NvbmZpcm1hdGlvbg==","IHRyb3BoeQ==","V29ya3M=","IEVsZWN0cm9uaWNz","IE1lZGl0ZXJyYW5lYW4=","X21ldHJpY3M=","IGFubm91bmNpbmc=","IERBWQ==","X3Byb3Rv","IHBlYXI=","YmFzZVVybA==","CQkJCQkJCQkK","IGNvb3JkaW5hdGlvbg==","Ok4=","LmFuaW1hdGU=","IENvdHRvbg==","X2hpdA==","4pw=","IGpldHp0","aWZ0ZXI=","KGZpZWxkcw==","b3dubG9hZA==","aWZpY2FjaW9u","LmN1ZGE=","IExpdQ==","PmVxdWFscw==","IEFjZQ==","0YDQsNC8","IFN1cGVybWFu","IEdhcmNpYQ==","IGFycmVzdHM=","YWdhcg==","IHt9KQ==","IG1hY3Jvcw==","cm91cGU=","w6p0cmU=","IHR3aXN0ZWQ=","c3RydW1lbnRz","Xygi","X3ZlcnRpY2Vz","IFRyYW5zaXRpb24=","0LjQug==","W21heA==","bWluZA==","IGFjY2Vzc1Rva2Vu","IHVubGU=","bXVz","Y29w","IEZhY3Rvcg==","IGNvbmNlZA==","IHJldHI=","LmxpbmFsZw==","LXNsaWRlcg==","b2Js","X1N0YXRpY0ZpZWxkcw==","IHpvbWJpZQ==","c2VsbGluZw==","IGNoYXA=","IHNoYWtpbmc=","IFRyYW5zbGF0ZQ==","IEFtc3RlcmRhbQ==","IEVUSA==","X0VYVEVSTg==","a2Q=","X2Rpc2M=","IHByZWNlZGluZw==","IHByaXg=","T2JqZWN0TmFtZQ==","X21vZGlmaWVk","YXJkd2FyZQ==","ID8+Ij4=","IERX","YCR7","ID8+Ij48Pw==","dXllbg==","IGRvbm5h","IHhzaQ==","ICQiew==","IERyYXdpbmc=","LG5pbA==","IG9uZGVy","Qkc=","T2JzZXJ2","IGNvbnNpZGVyYXRpb25z","Ym9hdA==","IEJhbmtz","IGluZGljdA==","LEk=","IEJsdQ==","KHZlcnNpb24=","Y2xpZW50ZQ==","b2xhbg==","TEVTUw==","YXNzZXJ0U2FtZQ==","X3ZvaWQ=","IFdBUw==","CWVudW0=","IG1peGVy","RVc=","YWZmZQ==","IGJsb3dqb2I=","dGV4dEZpZWxk","IGltbWVuc2U=","X3JlcG8=","IGdsb2JhbHM=","YW50YWdlcw==","LnRvZGF5","VGh1cnNkYXk=","IEJyaWc=","e30pCg==","IEltYWdpbmU=","KEdQSU8=","IGVzdG8=","IFByb3ZpbmNl","IE1lbnRhbA==","X2NlbGxz","IEp1bGlhbg==","LlNjcmVlbg==","IGNhbmRsZQ==","IG1vbmRl","IHZlcmc=","aXRlcmFscw==","LWxheW91dA==","R3Vlc3Q=","IHZpbmQ=","IEVjaG8=","Jyl9","IG1hbm4=","X0JPT0xFQU4=","aGFw","IG5pZ2h0bWFyZQ==","VUdI","IG5vbmV0aGVsZXNz","IGF0aGU=","IEhvbGxhbmQ=","IEJvcm4=","XE9STQ==","YW51dA==","X2xldmVscw==","IHBldGl0ZQ==","LWFydA==","X1NIT1c=","bnVtYmVyT2Y=","X3RodW1ibmFpbA==","YW1pbnM=","IERlZmluZXM=","ICI9","LlN0YXR1c0NvZGU=","IGRpZ25pdHk=","IEJpa2U=","Lk5ld0xpbmU=","IEdsYXM=","KGxvZ2dlcg==","IGNhdGNoZXM=","dm90ZXM=","IGV4YW1pbmluZw==","L3JlZ2lzdGVy","IHNwZWNpZnlpbmc=","X2ZpeGVk","IGRyYXdpbmdz","VGhyZXNob2xk","QXg=","IEFyY2hpdGVjdHVyZQ==","KHBpZA==","V2lyZQ==","KGNvbnQ=","bGFuZQ==","TGlzdHM=","IHNwcmludA==","IGdyYW5kZmF0aGVy","X0FH","IHNjaGVkdWxpbmc=","Q0xVUw==","YXR1cml0eQ==","IGxvY2tpbmc=","W3NpemU=","X3N0eWxlcw==","IHdi","LS0+Cgo=","IHNwaW5uaW5n","X3BlbmRpbmc=","TWF0Y2hlcnM=","LktleXM=","IFBW","ZW51cw==","YW50aXM=","IGRpc2NhcmQ=","IGhhdWw=","IGVtcGly","IHBhdGh3YXk=","IG9haw==","0LzQtdC9","LWluZHVjZWQ=","IGltcGFpcg==","IENhbGdhcnk=","LmlzSGlkZGVu","ZHo=","X2luY2x1ZGU=","IGdt","ICcoJw==","UFk=","dWdnZXN0aW9ucw==","IGNvbW1vZGl0eQ==","Y3Jv","L3N1Yg==","IGdldEluc3RhbmNl","IExlZ2FjeQ==","IEtpbA==","QmFs","KHNob3J0","SW5mb3Jt","K3g=","KnI=","IEhvcGVmdWxseQ==","b3JhdGU=","IG1hY2hlbg==","IHRyZWF0eQ==","IE9yaQ==","LnB1YmxpYw==","LWhvcml6b250YWw=","IHRhY3RpYw==","IGJvcmQ=","d2FyZXM=","IGFtbW8=","IExpc3Rz","IGVxdWF0aW9ucw==","L2hlcg==","IE5TVw==","Qm91bmRpbmc=","X0NvbGxlY3Rpb25z","IGF2YWls","LkRyb3BEb3du","6LA=","IGho","IGzDoA==","LnBi","IG1lbW9yaWFs","IEFUVFI=","IGV4aGF1c3RlZA==","IHRzcA==","CXJlZGlyZWN0","IGxpa2V3aXNl","U1RFUg==","TGphdmE=","IGNvbmRlbW5lZA==","b2NhdXN0","KHN0cmljdA==","IGV4ZW1wdA==","IHNtcw==","IGV4YWdnZXI=","U1lT","IGxvdW5nZQ==","Ol4=","IHRvZGQ=","ZGVi","YXRvcmlhbA==","IFBvcnRlcg==","IHR1aXRpb24=","IGV4ZW1wbA==","IHBhcmVu","LmxpbmVUbw==","IGtpZG5leQ==","IMOnYQ==","IGN1aQ==","77yM6K+3","WEM=","IG1vxbw=","IG5vbWluYXRlZA==","bHVuZw==","SW1HdWk=","IEJ1eno=","IHN0ZXJlbw==","cG9ydGFs","cmVzYXM=","IGtsYXNz","IGRyYWZ0ZWQ=","IHByb2plY3RpbGU=","L2dwbA==","KHBhcmFtZXRlcnM=","KikK","IGFzc2lzdGVk","IE5TSW50ZWdlcg==","c2l0ZW1hcA==","Om50aA==","LlZpZXdz","LkFyZ3VtZW50UGFyc2Vy","IG1lZXI=","emllcg==","IERpZw==","PD89JA==","X3Blcm1pc3Npb24=","CUFkZA==","b2xvZ2lh","IHNjaQ==","IGZpbmFuY2lhbGx5","IHNjcm9sbGluZw==","LmRpc3Q=","X0hBUw==","dWJ1bnR1","LnBhZ2Vz","SW5jcmU=","YnVyc2U=","IEFtYXRldXI=","5rqQ","QmxvYg==","IGNob2xlc3Rlcm9s","REVT","bWluaW11bQ==","IHJlZnVzaW5n","dW5uZWQ=","0Jw=","IFJE","LlNlcnZsZXQ=","ICovOwo=","dWRkZW4=","IHZpZXdCb3g=","IG1ldGFib2xpc20=","IHN0ZWFsaW5n","IEJldmVy","YWduZXRpYw==","VkVSUklERQ==","X0FVRElP","0YDRiw==","IGFyY2hpdmVz","LmxpbmVhcg==","PXs8","dW5jYXRlZA==","QWNjZXNzRXhjZXB0aW9u","IHBpY3R1cmVCb3g=","CXNlbGVjdA==","TGF0aXR1ZGU=","dmlzb3I=","cmVpYg==","IHBhaw==","SG9wZQ==","IEl0ZXJhYmxl","LnJlc3BvbnNlVGV4dA==","IFF1YWQ=","IEJyb29rcw==","IFRvdA==","T1BU","ZWxvbmc=","IGNvY2FpbmU=","IGFubw==","RGFu","IHBzaQ==","0LDQu9GM","LmdldENoaWxk","IFJFRg==","LWFi","IFRyaWFuZ2xl","PFRleHQ=","IENvbG9tYmlh","aW5reQ==","6Imy","KX0+Cg==","IHBsYWc=","cGluZQ==","IGJsYW5rZXQ=","IDo8Lw==","IFRyYW5zbGF0aW9u","bm92","IHBlcmZlY3Rpb24=","IENvbmZlZGVy","LnN0dWI=","LkludGVyb3BTZXJ2aWNlcw==","LlN0b3Jl","IGVucm9sbG1lbnQ=","IGRlZXI=","TW92ZW1lbnQ=","LWZyb20=","aGM=","IGV2YW5nZWw=","IElsbHVzdHI=","IHRydW1w","X1N0YXJ0","cGxhbmVz","IEJpbA==","SW5mb3M=","LXRyYW5z","IHJhbmNo","IExpbmRh","X21hcg==","UkVU","L25ldA==","TGF3","TkY=","IFByZXZlbnQ=","IGNyaWVk","IGVkdWNhdGU=","YXN0aWNz","eWk=","LkxpbmVhckxheW91dA==","TUVUSE9E","IEVn","bWFwcGVy","5pmC","LmFzYXJyYXk=","z4E=","acOnw6Nv","UmV1c2U=","X3Jldg==","IFBST0RVQ1Q=","X0NvZGU=","ICAgICANCg==","IFNFUlZJQ0U=","X2NvdmVy","LiwK","LkV4ZWN1dGVSZWFkZXI=","IERpbmluZw==","LmFyY2g=","IG90cm8=","IERpc2NvdmVyeQ==","IEtleUVycm9y","IEJlbmVmaXRz","X1NIQQ==","LlVubWFyc2hhbA==","SEVBREVS","TXV0ZXg=","QU1B","IGluaXRpYXRl","U3RheQ==","TGl0dGxl","ICgpLA==","IGRlY2VudHJhbA==","UmVzb2x1dGlvbg==","LmhlYWx0aA==","CWZjbG9zZQ==","5Lqk","IHN0YWtlaG9sZGVycw==","IGFyY2hhZQ==","RGlnaXRhbA==","bGVzY29wZQ==","X3Blbg==","IEl0ZW1TdGFjaw==","IENhbm9u","IEtlbmQ=","IMO4","X2FqYXg=","aW5ncmVkaWVudHM=","RGVsaXZlcnk=","U2VjdGlvbnM=","IGRpc2FwcG9pbnRpbmc=","IEdyZW4=","LHJl","IGRlY3J5cHQ=","b2xvZ2lj","X2ZtdA==","IFNsaWRlcg==","bmFo","V2FzaGluZ3Rvbg==","enVuZw==","INGG","eWN6","aWV2ZXM=","LkRFQlVH","IFRJ","IGhhY2tpbmc=","IGNlbnRy","Zmxvd3M=","IGRpZFJlY2VpdmVNZW1vcnlXYXJuaW5n","IGFjY291bnRhYmlsaXR5","Q09VTlQ=","0LvQtdC80LXQvdGC","Ymxv","L2lk","IFNsb3c=","aXp6YXJk","LnJlbW92ZUV2ZW50TGlzdGVuZXI=","IOyehQ==","L0k=","aXNtYQ==","IEh1ZHNvbg==","fX0s","dW1lZA==","IHJlYWxpc2U=","dW5zYWZl","IHp1cw==","IHNob3J0YWdl","b2xpYQ==","X3ByaW9yaXR5","IGZsb29kaW5n","b3BlcmF0aW9ucw==","UG9seQ==","YWJhbg==","W2N1cg==","IGVza29ydGU=","X0RFU0NSSVBUSU9O","X25hdA==","IG1hbGljaW91cw==","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA=","IFBhcmtz","IHRheHBheWVy","IEZvc3Rlcg==","IHNleHVhbGl0eQ==","57O7","67A=","XA0K","LnNlZWs=","0LDQvdC40Y8=","L2FydGljbGU=","6L+H","IFVocg==","IGdyYW5kbW90aGVy","IEJsZQ==","ZnVydA==","YW1iYWg=","bm90aWZpY2F0aW9ucw==","ZGVwcmVjYXRlZA==","IHVpbnRwdHI=","b2tp","KEFycmF5","IGF1dG9ub21vdXM=","IG9icg==","wq/Crw==","IGJhc2VuYW1l","IHVudmVpbGVk","c29s","IE5vdEltcGxlbWVudGVkRXJyb3I=","IGRlcHJlc3M=","XycuJA==","IFVOSVQ=","JScs","LXRhZw==","Z3JlcA==","IE1haW50ZW5hbmNl","IHdhcmZhcmU=","X1JFU09VUkNF","KHNwZWM=","KGN2","IG5hZGE=","55S1","IGNyb3dkZWQ=","QmVsb3c=","IFphY2g=","RXN0YWRv","X3ByaW1l","IHRyYWJham8=","IGluZm9ybWF0aXZl","U2NvdHQ=","IHNlcmlhbGl6ZXJz","IE5hcw==","VGh1bms=","IG1lcmN5","LC4uLgoK","IGFkZGljdA==","LmNvbnN0YW50cw==","IGRhdGFmcmFtZQ==","X3JlYXNvbg==","Z29tZXJ5","7Iq164uI64uk","IG5lZ2xlY3Q=","IExpbmVz","IG1lbWI=","X0VYRUM=","YXNzYWdl","IFlhcmQ=","e30nLg==","IGxvdHRlcnk=","dGVpbg==","X2NhbGM=","aWt1","X1JFQ09SRA==","V2Fybg==","IGhlYWx0aGllcg==","dXJlbWVudA==","IHlhcm4=","IENvcm5lcg==","KHppcA==","KGluaXQ=","IExpdA==","SFc=","c3Vic2V0","IE1G","RVRFUlM=","X3JvdA==","IGVyZQ==","IE92ZXJyaWRl","V2FsbGV0","X3Jld2FyZA==","IHNhZ2U=","c2V0VmlzaWJsZQ==","IEpzb25SZXNwb25zZQ==","SUNZ","6K+i","VmFyQ2hhcg==","YWF0","LWdyZWVu","IGlycQ==","YW5pdHk=","IHdob2V2ZXI=","X3NoYXJl","IGZvdXQ=","cm9sbHM=","IHdpbGxpbmduZXNz","LmNvbXBvbmVudEluc3RhbmNl","IGhvbm9yZWQ=","dXJ2ZXk=","QmVy","IHJ1bm5lcnM=","IGxpZXU=","b3Jwb3I=","X3N0cnVjdHVyZQ==","QmFyQnV0dG9uSXRlbQ==","YWR4","IEJlbm5ldHQ=","IGRpbGln","IGZsdWN0","SURERU4=","X1NlbGVjdGVk","KGRpdg==","IHF1aWNrZXI=","YWxvbmc=","Z3JhcGhxbA==","aW5leg==","IGNpdGU=","IEluc3RydWN0aW9ucw==","IGluc2VydGluZw==","LmNsb3VkZmxhcmU=","Y291cG9u","ZWRMaXN0","IFN0b3Jlcw==","X21hbGxvYw==","56ym","IEF3ZXNvbWU=","IGxhbWI=","UkVTVA==","IGludGVzdA==","IE5hdmJhcg==","LmZlYXR1cmVz","SW5jcmVtZW50","IFBvbQ==","IGluc3VmZmljaWVudA==","X0xPR0lO","UExFTUVOVA==","IE9BdXRo","LklORk8=","IGV4b3RpYw==","IENBU0U=","CSAgCg==","IEdhbmQ=","dGhlc2Vz","IG5vdm8=","IERlbGw=","4oCm4oCm4oCm4oCm","X3NvZnQ=","IGFncmVlaW5n","Y2VudHM=","bG9hbg==","JyIsCg==","IFJhbg==","REVM","IG9yZ2FuaXNlZA==","K24=","IEhlYWx0aGNhcmU=","IGRldGVyaW9y","IGltcGxlbWVudGF0aW9ucw==","IGNhcm4=","ICwn","IExPQUQ=","IHBsYW50ZWQ=","5pyq","Rm9ybUNvbnRyb2w=","X21hdGNoZXM=","IHBlcmlvZGlj","X1Rv","IEpvZWw=","IGFua2xl","IG1pbGl0YW50cw==","IFdpdGNo","dW5pZm9ybQ==","dWVudGE=","T2ZXZWVr","IHBlcnBldHI=","IGludGVydmVudGlvbnM=","KHdyaXRlcg==","YW50aW5l","UHJvZ3Jlc3NCYXI=","IGxlYWd1ZXM=","Y29tcHJlc3M=","aXppb25l","IEVB","Il09Ig==","IFN0ZXBoYW4=","bWludXM=","c3N0cmVhbQ==","X2xlZA==","ID09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT0=","IldoZW4=","QWxyZWFkeQ==","IGNvbnRlbXBs","IGF0YXU=","IENvbmdyZXNzaW9uYWw=","IHJhcHBvcnQ=","IEJvdXI=","aXNoaQ==","IHR5bQ==","IEFybWVu","INGA0LDQtw==","LWZvcm1hdA==","X1JlYWQ=","KGNvbHVtbnM=","IG5ldWU=","X2JveGVz","IFNhbmR5","XywK","IFdpemFyZA==","IG9yZGVu","IGZpbGVzeXN0ZW0=","ZmxpZ2h0","IHdzeg==","YW5jZWxlZA==","IGRhd24=","IEdzb24=","X3dhcm5pbmc=","IEljZWxhbmQ=","IHNsdXQ=","IHNldElz","X2lkZW50","IG9mZnNob3Jl","IFNrZXRjaA==","OyU=","IHRyaWJlcw==","X1NQQUNF","IG90cm9z","Q29tcGlsZXI=","CUVuZA==","IF0pLAo=","R3Jhdml0eQ==","IHRlbnNpb25z","IHNtb290aGx5","S25vdw==","b290aGluZw==","IFN0YXJ0dXA=","IEh5cA==","IGFtYXpvbg==","IFJlY2VpdmVk","emVuaWU=","654=","IENob2NvbGF0ZQ==","IMSw","Ik5v","IEFMUw==","IFByb2dyYW1taW5n","IERvZ3M=","IGdvb2RuZXNz","KGVycm5v","L2Vz","IHJlbW90ZWx5","IEhvb2tz","VXVpZA==","IG92ZXJseQ==","IOWQ","IGdwdQ==","IHN0aW11bHVz","KHN0ZXA=","LllvdQ==","IGJpb20=","SU5D","LmJpdHM=","KG1Db250ZXh0","IGFtZXJpY2Fu","IHRlcnJpdG9yaWVz","IE5E","XSIK","IE1hcHBpbmc=","IHByb2NlZWRpbmc=","LmF4","IHN1YnN0cmluZw==","QlVUVE9O","IEln","LXBhbmU=","IEFucw==","IGdyYWR1YXRpb24=","IHBlcnNwZWN0aXZlcw==","TWl4aW4=","X21pbnVz","CQkJCSAgICA=","IikpKQ==","bm9ybWFsaXplZA==","Lmxhc3ROYW1l","IGNsYW4=","QXNpYQ==","KE1vdXNl","cGFnaW5hdGU=","IGdpZg==","ZWxpZw==","IHBvc3RlcnM=","bmluZ3M=","IM+E","IGFwb3N0","IElocmU=","RGxsSW1wb3J0","IEVxdWFs","IGRpc3Rpbmd1aXNoZWQ=","bmVhcG9saXM=","IGJhY2tkcm9w","IEFsdGVybmF0aXZlbHk=","L21vZA==","IGxlbmQ=","IFNIT1c=","X2NvZGVz","IGF0w6k=","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg","LWNhc2U=","Y2h0ZQ==","IGRvbmM=","OmFkZA==","TmVnYXRpdmU=","ZmF2b3JpdGU=","IGF0dHJhY3Rpb25z","aW50Q29sb3I=","IFBpcg==","Q29ubmVsbA==","TWFuaWZlc3Q=","dGVhbXM=","IH07CgoK","IHBsdXJhbA==","IG92ZXJ0aW1l","IEV1cm9wYQ==","IEJhbmdsYWRlc2g=","KGFu","IGxpbmd1","aXRpbWU=","aW5zdG9u","LnNoYWRvdw==","56iL","IFVTUw==","U2VydmVyRXJyb3I=","SVZFUlM=","IEppbg==","IGh1bWJsZQ==","YXV0b2xvYWQ=","YXJleg==","4oCy","IEFzdHI=","aWNvbG9u","LlZpZXdNb2RlbHM=","b2Jv","IHN3aXBl","IHJlY2Vzc2lvbg==","6ZU=","IOyY","bmVyZw==","aW5ncmVkaWVudA==","bWFpbHRv","IEZhbWU=","UHJpbnRpbmc=","UGl4ZWxz","IEJhc2g=","cG9zdGE=","X0pP","IGluZmFtb3Vz","IExhbmM=","KGxvY2FsU3RvcmFnZQ==","LmJsaXQ=","IHlvdW5nZXN0","IGZpZWxkTmFtZQ==","IGNvbnRpbmc=","IHdvb2w=","IEltR3Vp","IE5TVA==","LnByZWZpeA==","VG9JbnQ=","IFNveA==","IGhhYml0YXQ=","KCJ8","PSciKw==","SU5HVE9O","X3dyYXA=","dWNrZXRz","IFdSSVRF","IG1lZGljaW5lcw==","IG1lbWJyYW5l","IEpUZXh0","IHJlcHJvZHVjdGlvbg==","X3JlY2VpdmU=","VGFibGVSb3c=","cXVldWVSZXVzYWJsZUNlbGw=","aG9va3M=","IHJlbHlpbmc=","IGRyaWxsaW5n","X0ls","KGV4Y2VwdGlvbg==","IGR1cmFiaWxpdHk=","IGhlc2l0YXRl","IGNvbXBhcnQ=","SUxJTkc=","IEVsZGVy","IGNhZmZl","IGRldmVsb3Bz","aXNoZXI=","IHBseQ==","IHRvbA==","X1BMQVk=","IGZyaWN0aW9u","KGFsd2F5cw==","IGluZGlnZW5vdXM=","IE9wZXJh","IENhbXB1cw==","YW5jZW1lbnRz","IGxpdHRlcg==","LmxpbWl0","KFRva2Vu","ZW5pcw==","IGhpZ2hsaWdodGluZw==","IEF1Yg==","IHZhbGlkYXRvcnM=","LWhvc3Q=","d2hlZWw=","PHs=","KSkr","IE5ld3NsZXR0ZXI=","X2F2ZXJhZ2U=","IHNvZGl1bQ==","IEhpbA==","IE1pbGU=","IEF1dGhTZXJ2aWNl","U3RhdGlzdGljcw==","IE51dHJpdGlvbg==","IHNwb25zb3Jz","b3ZlbmFudA==","PT09PT09PT09PT09PT0=","LkFic29sdXRl","IGbDpQ==","SGFuZGxpbmc=","IC0tLS0tLS0K","KGRpcmVjdG9yeQ==","IikuCg==","YW5vbA==","LmJyb3dzZXI=","IEdyaW5kaW5n","IGNr","RnJlcXVlbmN5","KClbJw==","QWRqdXN0","Y3Jldw==","YWZldHk=","IGdu","IHdpdmVz","b29v","IHByb3N0aXR1","IG/DuQ==","aWZ0eQ==","IGxpdGlnYXRpb24=","IEV6","SmVmZg==","LnBr","IFNob2Vz","Y29ybg==","eXl2c3A=","IGFkYXA=","PXU=","Q09ORg==","QU5EQVJE","IGVsZXZhdG9y","YmlsbGluZw==","IGNhbmQ=","IGNhcnA=","W2ZpZWxk","LWxpYg==","c2VxdWVudGx5","Pi0=","IGxjZA==","LS0tLS0tLS0tLS0tLS0t","KCIi","IHRhY3RpY2Fs","IFJvbmFsZA==","ZXh0cg==","IEZlc3Q=","IGZ1ZXI=","LW5hdmlnYXRpb24=","IGti","Z2hvc3Q=","IGhhbmRsZUNoYW5nZQ==","X2Nscw==","KCkhPQ==","Q29tcGFyYXRvcg==","LnZt","IENveA==","X3Jldmlldw==","L0A=","X2Nvb2tpZQ==","IHJlY29nbmlzZWQ=","bGRhcA==","VGhyZWFkcw==","IFNleHVhbA==","IEJlYXJpbmc=","KFNRTA==","IHhy","IHRoaWdo","VVJMQ29ubmVjdGlvbg==","IFNVVg==","IG1Db250ZXh0","IGluY2lkZW5jZQ==","IEVzdGU=","LnN1cA==","X3Rl","KEVYSVQ=","Q01E","LyI+","QWxtb3N0","IFVuZQ==","IGFuZGVyZW4=","IFNpbmdsZXRvbg==","IGJvcmU=","VGhpbms=","IG5hcmM=","XWluaXRXaXRo","X3Nob3A=","KHN0cmF0ZWd5","IScs","aGVyaXRz","IERlc2s=","X21hY2hpbmU=","Lm5ldHR5","xLFuZGE=","PTw=","IFFS","IFNpZGViYXI=","LnNwbGl0Q29udGFpbmVy","IG9uU3VjY2Vzcw==","IG1vbmtleQ==","RW5qb3k=","KG5vZGVz","cGVjdHJ1bQ==","ICgqKA==","CVVJTlQ=","LGhlaWdodA==","IE5ldHdvcmtz","LnRhaWw=","LmxpbnNwYWNl","ICIuLi4=","TGlzdGVu","xqE=","LkNoYW5uZWw=","LWRlZmluZWQ=","UmVwZWF0","YWRqdXN0","RVJN","X2FwcGxpY2F0aW9u","LmFzc2VydE5vdE51bGw=","LXN0cmVhbQ==","IHJhYmJpdA==","IHBvc2l0aW9uaW5n","IHdva2U=","IGZpbmc=","IG11bHRpcGxheWVy","IHJlZ2lzdGVyaW5n","dW50aWw=","w6Vu","KDo6","dXNzaW9ucw==","IHBvdGF0bw==","IEVxdWFscw==","LlN1cA==","L2FwYWNoZQ==","ICg9","LiIp","LnB0cg==","IFNwZWVjaA==","LmNsaXA=","IEdhYnJpZWw=","IG11c2ljaWFu","L2lzc3Vlcw==","LnNob3A=","IEhpZXI=","X1JFVA==","X2J1Y2tldA==","44Oh","YXZz","IHJveg==","Zmxvd2Vy","V3JpdGVCYXJyaWVy","IE1pbGFu","IGxlZ2lzbGF0dXJl","IERvbGw=","IHByb3Zpbmc=","LmNvbmNhdGVuYXRl","4pWQ","IGdjaGFy","Y2RuanM=","Ymxlcw==","IExpc3Rpbmc=","0LvQvg==","LnhyTGFiZWw=","IFNhaw==","anVzdGljZQ==","IFZhbGVudGluZQ==","dW5sZXNz","IHBpZ2Vy","KHJ1bg==","IHRlc3RpZmllZA==","QU5B","IFJlbW92ZXM=","KSkpKTsK","cmVjYXRlZA==","IFJ1bnRpbWVNZXRob2Q=","IGNvbnF1","44Ki","IHRpc3N1ZXM=","YWlsZXI=","w6l0w6k=","LVN0YXI=","IGZsYW1lcw==","LnNldEljb24=","IHN1cGVybg==","IHZhZ2luYQ==","LXZhcmlhYmxl","IHdlbGxuZXNz","Q1VS","IGJlbGxl","LmdldFJlcXVlc3Q=","IHBvY28=","YmVuaA==","YWdlbnM=","IHNwaWxs","IEp1cg==","IGRpc3BhdGNoZXI=","0L3QvtCz0L4=","ZW1vbmlj","KGRpcm5hbWU=","INCU","IHBhc3Nl","IGdhbno=","cmljaW5n","RVU=","IG11amVyZXM=","ZXNzZW4=","LmF0dHJpYnV0ZQ==","amo=","CQkgCg==","W14=","IHN0cnRvbG93ZXI=","bGV4ZXI=","ZWN0YXI=","aG90ZWw=","LnNxdWFyZQ==","IHJhbGw=","IGxvd2VyZWQ=","aGFuZGxlZA==","TWFya2V0","IFVzZXM=","aXZhcw==","LkJ1c2luZXNz","44GX44Gm","RElW","IHdhc3RlZA==","IGF2b2ly","w6pt","X0FDQ09VTlQ=","LmV0","CVNETA==","a2Fw","IGZveA==","dXBwZXQ=","e30sCg==","Iiwn","RmF2b3JpdGU=","UEVORA==","IEFFUw==","fSks","IGRlZHVjdGlvbg==","IHBvbMOtdA==","IGNvbXBvbmVudFdpbGw=","IFRlbGVyaWs=","X1NFTEY=","IG11c2U=","Q3JhZnQ=","IGRlbnM=","4KS/","KHRw","IHRhc3R5","IGJhbGFuY2Vz","IGRlZGljYXRpb24=","IFdhbGxhY2U=","IHVubGF3","XCI+XA==","IG11bQ==","LXVwZGF0ZQ==","ZW1lbnRl","IHNvZGE=","UmVwdWJsaWM=","YXNtaW5l","w6lyaWM=","KFN0YXR1cw==","IEpzb25Db252ZXJ0","IERpc2s=","LlJlZGlyZWN0","IGZpbG1pbmc=","L21vbA==","Um8=","IHZpbGxl","IHRyYWJhag==","IHN5bnRoZXNpcw==","cmVnYQ==","IHJs","U2NoZWR1bGVy","SVNIRUQ=","Y3VycmVudFVzZXI=","KGVycm9ycw==","J2g=","X2JvdA==","eGltbw==","IFVTQVJU","X3N1cGVy","X0RFQ1JFRg==","0L3QvtC5","X1JPVw==","IHByb21vdGVz","IFRB","IGhvcmFz","IFJlcHJlc2VudHM=","IG5hbWVvZg==","IEV4Yw==","IEdhcmFnZQ==","IHNlaW5l","LCM=","IGhlcmI=","L3Jlc291cmNlcw==","IHBsZWFkZWQ=","LnJhZGlvQnV0dG9u","IOaY","T3Bz","IE5lc3Q=","Y3N0cmluZw==","IERlZmVuY2U=","IHJlZmVyZQ==","X2xlYWY=","IHJldmVsYXRpb24=","66c=","LmV4ZWN1dGVVcGRhdGU=","X1dPUkxE","IGV4cGFucw==","KCJcIg==","amFi","IGRvdWJ0cw==","IEdlb21ldHJ5","IGludHJvZHVjZXM=","IHNlbmF0b3Jz","IGNhbmFs","LmhlbHBlcg==","IEJpb2xvZ3k=","X1NFTlM=","LnByZXZpb3Vz","LXRvdWNo","YWJpdA==","IGltcGFjdGVk","IGJyYWNrZXRz","LmRpcmVjdA==","YWNjdW0=","IHRlc3Rvc3Rlcm9uZQ==","CWFjdGlvbg==","IENoYW5jZQ==","IHBlYWtz","Q3BwQ29kZUdlbldyaXRlQmFycmllcg==","IHVuYmVsaWU=","X3ByZXNz","LlJlbA==","YW5nbGVk","L3RlbXBsYXRlcw==","LS0+DQo=","bGltZQ==","IHN1ZmZpY2llbnRseQ==","X250","RXhwYW5k","LmlzZmlsZQ==","IGlzRW1wdHk=","IHF0","IG11bGhlcg==","YWNvYg==","R2Vvcmdl","5bi4","IGFzc2lt","YXNv","IGNvbXByaXNlZA==","T1Y=","KENPTkZJRw==","CXdyaXRlcg==","IGRlc3A=","IHRlbnVyZQ==","KGNy","LnBvb2w=","IEJyZW5k","IGNlbnNvcg==","KHRpbWVvdXQ=","IHBsZWE=","LldyYXA=","IHRpZ2h0bHk=","IFdlcmU=","IElnbm9yZQ==","YWJlaQ==","IGJyaWRnZXM=","IGNvbmRlbW4=","IHNpbXBsaWNpdHk=","IHJvdXRpbmVseQ==","IGJsYWNrcw==","amI=","IFBpdA==","VXRm","IC8K","cmVsb2Fk","IHNldE9iamVjdA==","L2dsb2JhbA==","IGZhdHR5","IHNvY2tz","Q291bGRu","IGVyb3Rpc2s=","5p2h","IFByZXNzdXJl","IE1heg==","bnBvcw==","dG9sb3dlcg==","IEVR","dXRldXI=","IE1vbWVudA==","IGV0YQ==","e3stLQ==","IGdyYXBocw==","IEd1YXI=","cmluZQ==","KC0t","IEh0dHBTdGF0dXM=","KHN0dWRlbnQ=","Km5w","IHJhaWx3YXk=","IGFzeW5jaHJvbm91cw==","X3Zt","J10sJw==","LHRleHQ=","bWVyY2hhbnQ=","KEd1aWQ=","IEdyYQ==","aXhlcg==","ZmV0Y2hBbGw=","LmFkZExpc3RlbmVy","ZmxpcA==","KiQ=","PigpLA==","IHN1bmxpZ2h0","YXNzaWduZWQ=","IGFiYw==","IENPTFVNTg==","IPCfmYIKCg==","KS4uLg==","IGVuc2VtYmxl","IG5ld2xpbmU=","X1NJTkdMRQ==","aWVkYWQ=","IGRhcmtlcg==","b3JtYXA=","IGxpb24=","cGxpdHM=","IGlsbHVzdHJhdGlvbg==","IElFRUU=","IHZpc3Rh","b3VzYW5kcw==","KioqKioqKg==","IFRvbW15","IGh1ZQ==","U2Vs","IGF1cmE=","IFRoZXJhcHk=","IGFuaW1hdG9y","LmNvbnN0cmFpbnRz","IHZhZ3Vl","KCIiKQ==","IHZpbGxhaW4=","IGJsZXNzaW5n","IHN0cmluZ0J1aWxkZXI=","IE1pc2M=","IERJUg==","ZmF4","LW5vZGU=","IFdhbGtpbmc=","IEFV","c2Vzcw==","IGdyaWxs","VkVSVElTRQ==","IEZvb2Rz","IHRvdXJuYW1lbnRz","w5M=","IE1hcnNo","IHdvbmRlcnM=","TG9uZ2l0dWRl","LkNvbW1hbmRUZXh0","PWlucHV0","X2VuY29kZXI=","cGFnZVNpemU=","IGdldFN0YXRl","Pj4K","LmdyZXk=","cG9k","IHJlYWRpbmdz","IHJlY29uc2lkZXI=","U3RhcnR1cA==","IGV4Y2Vy","LmJhbGFuY2U=","X2N5Y2xl","X1RpbWU=","TE9DQUw=","IEVGSQ==","IFJleW4=","LnNldEZvcmVncm91bmQ=","Ynlu","IGRpc2Nvbm5lY3RlZA==","QUNUSVZF","IGVtYmVkZGluZw==","aWNrZXJz","IHN1cnJvdW5kaW5ncw==","KmM=","IGdhcmFudA==","IGJm","IHdpcGU=","IOS4iw==","X1RSQQ==","YWRveA==","55U=","IHN1Y2tz","IFNvbmdz","IEFzc29jaWF0ZXM=","IEJhbGQ=","IEJyZXR0","dmVuaWxl","IHZ0","IGluYWRl","IHJlc2lnbmVk","IEdsZW5u","LnBhdHRlcm4=","LkRhdGFCaW5k","0YPQvA==","TGF5b3V0SW5mbGF0ZXI=","Y2hldA==","IFRlc3RhbWVudA==","Lm1z","IHBhdg==","IFJlYWN0RE9N","dXJkeQ==","QURBVEE=","TXU=","L2FjdGlvbnM=","IEpz","X2V4dHJhY3Q=","IEJyaW5n","Omlk","c3RydA==","aXZhdGlvbg==","IG91dHJpZ2h0","YXp1","bG95bWVudA==","0LjRjw==","YWxkbw==","IFB1Ymxpc2hlcg==","RWR1Y2F0aW9u","UGFsZXR0ZQ==","X2Rydg==","ICgkKA==","IEFuZGE=","IHJlbWVkeQ==","IGluY29uc2lzdGVudA==","dGVjdGlvbg==","IHJlZ3VsYXRvcnM=","IHNob3J0ZXN0","KHBhaXI=","IEluc3RhbGxhdGlvbg==","IGRlZmVuZGFudHM=","ICgpOw==","LWxhcmdl","TWVs","IHRocmVhdGVu","0L3Rjw==","IGZldGlzaA==","b3RpbmU=","X2RpYw==","IDwk","IHN0YWdnZXI=","c3Bp","JHJlc3BvbnNl","U2Vydg==","LWJvcm4=","am9z","CWltZw==","CVdIRVJF","X2x0","5b2T","LmNvc3Q=","IFR1ZQ==","LmxhYmVscw==","IExW","d2Nzc3RvcmU=","IEplc3Nl","4Lir","VHJhZGU=","IHByZWRlY2Vzc29y","64I=","ZmluYWxseQ==","X2dlbmVyYWw=","b2dnbGVy","X1JFR0lPTg==","bmVtZW50","IGJsb2dnZXI=","IEhhcmJvcg==","IERhdGFzZXQ=","W3c=","IGF0dGVuZGVlcw==","Lmljbw==","bWF4aW11bQ==","LlVubG9jaw==","X1NZTkM=","w6FnaW5h","IGRvd25z","IFdpaQ==","XSkv","IGtpY2tpbmc=","dW5pY2F0aW9u","IERBQw==","IElEUw==","IFJlbnRhbA==","IGN1cnJlbnRUaW1l","IHZhY2NpbmVz","IERldmls","IG5vcnM=","X21vdXNl","dXJyZWN0aW9u","KG5v","ID4NCg==","IGFnZ3Jlc3Npb24=","IGJyZWVkaW5n","LnN5bWJvbA==","aW1hbg==","QWJzb2x1dGVQYXRo","IFdITw==","X2ZsdXNo","LXJvb3Q=","YXJuYQ==","Jk0=","IGZhdGhlcnM=","IFJvY2tldA==","aXZlYXU=","IHdhbmRlcg==","IGNvbXBvcw==","IFdhcnJpb3I=","IFNlYXQ=","IENsaW5pYw==","X2ludm9pY2U=","KGRpc3BhdGNo","UHJvZHVjdG8=","YXR1cmluZw==","b3NzaWVy","IE1BWQ==","IGRhZ2dlcg==","IHNhbml0aXplZA==","IFJGQw==","IHByb3Bo","IHVyaW5l","IGdyaW5k","IEV4cGFuZGVk","ZGVzY3JpcGNpb24=","LWZ3","IEtlcnJ5","PW5hbWU=","IGNoaw==","IG5hdGlvbmFsbHk=","IHRoZWU=","SW5j","ID8+Pg==","LlJhZGlvQnV0dG9u","Lkh0dHBTZXJ2bGV0UmVzcG9uc2U=","L1k=","CWZpZWxk","IGhvbW1l","eXBlcg==","UGh5c2ljYWw=","PXY=","IGRyaXY=","IEVycm9ycw==","IGPEgw==","RGVhdGg=","IFdJTkRPVw==","IHBvZXQ=","IFNoYXJw","IEltbXV0YWJsZQ==","CWNyZWF0ZQ==","IGdlaHQ=","IFJlZm9ybQ==","YWlzZXI=","IEluaXRpYWxpemF0aW9u","IGltbXVuaXR5","LmNvbXBvc2U=","IGxhdGVuY3k=","IExlYmFub24=","IFBhcmFk","IGZ1ZWxz","IEV4aGli","Y29o","JSI+Cg==","IENMSQ==","KWluaXRXaXRo","LVph","X0NMRUFS","cmVnbg==","IGZpbmFuY2Vz","LnN0YW5kYXJk","X0NBVEVHT1JZ","LmxpYnJhcnk=","IHRyYXZlbGVycw==","X3dw","IEV2YWx1YXRpb24=","c3RhcnRpbmc=","ICkpLAo=","ZXBpc29kZQ==","IFZhcmlhbnQ=","IGRhZW1vbg==","IEp1bGlh","IE5S","IGRvdWJsZXM=","PHY=","L3J1bnRpbWU=","IGludGVycHJldGVy","IElOREVY","IEhvbG1lcw==","X0RJTQ==","IHBhZGRsZQ==","X2V4YW1wbGU=","IGZvcmVncm91bmQ=","LnJvdXRlcw==","IHNvd2ll","U1VDQ0VTUw==","IENEQw==","IEJE","Xy0=","YXN1cmVk","V3JpdGluZw==","IGN1cnJlbnRQYWdl","KGFuc3dlcg==","IEFTQ0lJ","4Kg=","IHNvY2lhbGx5","eXl5","IFNwZWNpYWxpc3Q=","KGN1c3RvbWVy","aXN0YW5p","a2VzdA==","IE1haw==","IHRobw==","LnB0","KGNvbW1lbnQ=","IENvbnZlcnRlcg==","Z2Ft","Ymlucw==","LnRlbGU=","IFZldGVyYW5z","X0FMTE9D","0L7Qu9GM0LfQvtCy0LDRgg==","aW5uYW1vbg==","O3dpZHRo","b2hs","IGZhbnRhcw==","IHN1bmc=","CUs=","KEpzb24=","IG5laWdoYm91cmhvb2Q=","IHZvdw==","IHNpbnM=","b25hY2Np","IGVwb2Nocw==","aW1hZ2Vu","LkNoYW5nZQ==","Lm15YmF0aXM=","U2Vlaw==","V0VS","566h55CG","IGludGVyZXNz","X0V2ZW50","ZWRlcmxhbmQ=","IHRlcnJpdG9y","IGNpdWRhZA==","dWNrZWQ=","IHNuYWNr","IHRyYW5zcG9ydGVk","IE1hbmlmZXN0","IERBVA==","X3RoZXRh","IHdvbnQ=","LgoKCgoKCgoKCgo=","irbmgIE=","IEVwaWM=","RGVjaw==","bHRyYQ==","X1pFUk8=","IFtdOw==","L3NjcmlwdHM=","IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0t","5oOF","IHdlZWQ=","TkJD","IHJhcGVk","IEdhdGV3YXk=","W00=","IFRpbWVvdXQ=","ZW5jaG1hcms=","LlZpZXdNb2RlbA==","IHBvcm5vcw==","IFlh","dGhyaXRpcw==","IEZseW5u","IG1lZ2E=","YWNpbg==","IHRyaWJhbA==","LmFwcGxl","IEJsbw==","w6Ju","aWJp","cm92","IExpdmVz","Xi4=","Z2V0UmVxdWVzdA==","IEVzdGFibGlzaA==","Y29udGFpbmVycw==","IHN0YXJyaW5n","IGNlbGVicml0aWVz","IFJlbGF0aXZl","IEhlaWdodHM=","IHRxZG0=","IE5vcnRod2VzdA==","aXZpYw==","CWNs","IGF1dG9tb3RpdmU=","ZW50cmlj","IGZvcnR1bmF0ZQ==","IGZpcmVwbGFjZQ==","c2V1ZA==","bmlja25hbWU=","O3M=","X0NBTA==","aGFsdA==","KG5z","X2RlbGV0ZWQ=","RGV2ZWxvcG1lbnQ=","bW92aWVz","IGlkZW50aXRpZXM=","IHByb21wdGx5","2KfZhg==","IGFudGU=","ICInLCc=","5Y+j","aW1wc2U=","IHlhcA==","VHlwZU5hbWU=","IGJpdGNo","IGFzc29jaWF0ZXM=","SEVNRQ==","LWVtcHR5","INiq","b2x2ZXJz","IHBpc3RvbA==","U2NvcGVk","YWduZXI=","J109PSc=","IElNUA==","ZXhj","IG9taXR0ZWQ=","IG1pbmRzZXQ=","IFtdKA==","IG9ybg==","X0NBTQ==","QXZn","TG9jYWxpemVkU3RyaW5n","IE5hdHVy","IGNvbXBvc2Vy","IFBsYXlpbmc=","IG92ZXJk","X3V0Zg==","LnNr","IEZvbA==","JHBhZ2U=","LE9iamVjdA==","IGJlZXM=","YWxhcnk=","YnVsbGV0","X2xpYnJhcnk=","T2ZmZXI=","bG9jYXRlZA==","IChfLA==","4oCcSGU=","IE93bmVycw==","KSkuCg==","IGJyaQ==","LkFkbWlu","a3Rpb24=","0LvRjtGH","IGVyb3RpY2k=","Q2FuY2VsbGVk","IGFncg==","cmV2aWV3cw==","X2RtYQ==","UklDVA==","IGdmeA==","bXBp","cHBv","IC8vQA==","IHVwcGVyY2FzZQ==","IGNvbW1pdHRpbmc=","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA==","VXNlckRhdGE=","IHZhaQ==","CXNvcnQ=","IGNvbmdyYXQ=","IGRpb3hpZGU=","0LTQsA==","LmFyZWE=","IEpvc2h1YQ==","IEtvY2g=","X2JyZWFr","YXp1cmU=","aXN0aWNhbA==","X0FMUEhB","X3ZpZXdz","IGVsaW1pbmF0aW5n","T01C","ZW51bWVy","IEh5ZHJv","KCoo","RVJUSUNBTA==","IGluZXZpdGFibHk=","IHN0b2xl","LWVhc3Q=","aWVyb24=","IGxpbmdlcg==","L2RvYw==","xbo=","IEFscmVhZHk=","YXNpbw==","IC0tCg==","IGFiYnJldg==","IEF0b20=","aGlt","IElOU0VSVA==","c3Vu","4pmq","Q09OTkVDVA==","ZXJhdG9y","IE1hbm5pbmc=","IDoo","Z2Fz","PT4n","IHF1ZXJ5c2V0","O30NCg==","IFBvcHVsYXRpb24=","dXRlZFN0cmluZw==","cmVzaWRlbnQ=","X0ZPTlQ=","IFJlc3BvbmQ=","IG9ic2N1cmU=","IG9ic2VydmFibGU=","IENvbnRyaWJ1dG9ycw==","a29u","IE11c2s=","ZXhhbw==","IFR1Yg==","Qm9vdEFwcGxpY2F0aW9u","U09S","Lkhvcml6b250YWw=","LmZpbmRCeQ==","LnBvd2Vy","IHBvc2l0aXZlbHk=","dmVuaWVuY2U=","IEpvbmc=","IHdoaXN0bGU=","INC30L3QsNGH","IGxlbmRpbmc=","IGRlc3RydWN0aXZl","IG9uRGVsZXRl","YXV0aG9yaXphdGlvbg==","KCk7Pz4=","X29yaWdpbmFs","c2NpZW5jZQ==","YXRyYQ==","Pyw/LA==","IEFzYw==","IGNvbnZpbmNpbmc=","JGE=","b3JnZW4=","X0RhdGU=","IFByb3ZpZGU=","IGxvbmVseQ==","KScK","ZXhjaGFuZ2U=","Oz8+Cg==","LmZhc3Q=","U2FtcGxlcw==","TG9uZG9u","J10pDQo=","IElvbmlj","IHBlc3Nv","IEtuaWdodHM=","IFJhZg==","X2F0dHJz","IHJlcGVhbA==","Pk1haW4=","IE9yZGVyZWQ=","X05ldw==","PSIiPjwv","dXJscGF0dGVybnM=","QVRJT05BTA==","cGVlY2g=","IElkYWhv","IHByaW5jZXNz","IEN1c3RvbWVycw==","YXdheXM=","YWRi","IEJyeWFudA==","bm9uY2U=","IGFkdWw=","IGBgKA==","IGFmdGVybWF0aA==","PWRpY3Q=","dGV4dEJveA==","IHNwZXJt","IGNvdWdo","SG9y","4oCZUw==","LkNvbXBvbmVudFJlc291cmNlTWFuYWdlcg==","IHJlZ3VsYXRvcg==","IHBhcnRuZXJzaGlwcw==","L3Byb2plY3Rz","dHJ5cw==","IExhc2Vy","4p+p","IEZ1bms=","IHVuY29uc2Npb3Vz","IGNydXN0","IFRlYW1z","IEJhbm5lcg==","IEhvbmV5","bGVtcw==","IG1heFdpZHRo","UG9pbnRlckV4Y2VwdGlvbg==","ZmFkZU91dA==","LVN0","IHN0cmFuZ2Vycw==","X0dP","V3JpdGFibGU=","X0luZm8=","Lk5vbk51bGw=","YW5ub3RhdGlvbnM=","IEdE","IGVuZG9yc2Vk","CVRva2VuTmFtZQ==","IERlcGVuZGluZw==","WU5BTQ==","IE1ldGVvcg==","IEluY3JlYXNl","Lk1hbnk=","PT0o","LlVVSUQ=","X0tFUk5FTA==","IHZpZMOp","IHBx","IFF0R3Vp","IFZhcmlvdXM=","IGpvaG4=","X3BhdGNo","IHRvdXRlcw==","IEZhaWw=","IHN1cnZpdmluZw==","KCIkew==","ICAgICAgIA0K","IGltYWdlVXJs","LndvcmRwcmVzcw==","c291cmNlcw==","CWdsVmVydGV4","4oCZYQ==","IGVzY29s","UkFSWQ==","IFNuYWtl","IHF1aW50","IGxhc3Rz","IEhhcm1vbg==","IGNvaWw=","IGV4cGxvaXRhdGlvbg==","bGVlbg==","Jz4iOwo=","IFNFUlZFUg==","IEhFQURFUg==","X3ZlbG9jaXR5","IEludm9rZQ==","LnRpbWVzdGFtcHM=","IHN1bGY=","SVFVRQ==","IGluaGFiaXRhbnRz","cGhpbnM=","YXp6bw==","IG1vbm8=","TGVnZW5k","IG5vbmNl","SUZF","OyI7Cg==","LWNyZWF0ZQ==","IiIsCg==","cGVybWl0","IEltbWlncmF0aW9u","IHBhdGhuYW1l","ZmZlY3RpdmU=","4pmA4pmA","IGV4YW1z","LWV2ZW50","IFRpbGw=","W21pZA==","RklY","O2NvbG9y","KE9yZGVy","X3RyYWl0cw==","IG9yZGVyQnk=","IHN1bnQ=","IE5pY2hvbGFz","2LI=","IHN1bm55","aW5lcnM=","IGFjY2Vzc2liaWxpdHk=","IEhC","LmNvbXA=","CW9w","IG1pbm9yaXRpZXM=","ZXRoZXVz","IGNvbGxhYm9yYXRpdmU=","cHJpdA==","SElS","IHdyYXBz","CWRyYXc=","Z29k","IElY","LmFwcHM=","IE5N","IGlycmVsZXZhbnQ=","IFRpZ2Vycw==","IGRpYWc=","R1Y=","IEFjY2Vzc29yaWVz","a29udA==","IHNpbXBsaWZ5","IEZhdm9yaXRl","X3Rvb2xz","KFtdKTsK","IHRvd2Vycw==","QmVz","IGh1bnRlcg==","IHNhbG9u","KGJ1ZmY=","CWRlYnVn","IG1hbHdhcmU=","TW92aW5n","LW9wdGlvbnM=","KSsn","IExPVkU=","X1NPQ0tFVA==","X2Zpbg==","IERlbGF3YXJl","IHNoZXJpZmY=","LWludmFsaWQ=","IEZVTEw=","INC/0L7QtA==","ZWxhcw==","InN0cmluZ3M=","IFJlcHJlc2VudGF0aXZlcw==","c3VyZmFjZQ==","cmVzb2x2ZWQ=","aHRkb2Nz","KSk6DQo=","IHByZXNzdXJlcw==","IG5vcm1z","IHBsYQ==","IHN1cm5hbWU=","IHBvc3RhbA==","IERlcGFydA==","IHNsYXVnaHRlcg==","b3JpZGE=","IGhlYmJlbg==","IGRlc2Fy","Y29tcGFjdA==","X0xBTkc=","5ZCI","b3BvbHk=","X3JhZA==","IFNURE1FVEhPRA==","TGF6eQ==","ICAgCQ==","Li4uLA==","KHdlYg==","IFBvbnQ=","IGV0d2Fz","IHVwd2FyZA==","X2hhdA==","IF0sCgo=","IGJhc2VVcmw=","IHdvcnJ5aW5n","LWFkZG9u","KGdldENsYXNz","U1BJ","IGNhcHR1cmluZw==","KX0sCg==","RWZmZWN0cw==","IGNvbXBldGVudA==","IGZvdWw=","IHN1YnNjcmliaW5n","IE9CSkVDVA==","SVhFTA==","YnVja3M=","KGVkZ2U=","KHBhc3M=","IFBldGVyc29u","IGJvb2Jz","IERlbGF5","X3NxdWFyZQ==","ZWxpbQ==","b3RlcnM=","X1BD","JUU=","b25jbGljaw==","IFNWRw==","IHRvcHBlZA==","IGZpc3Q=","c21hcnQ=","IFJhbHBo","KG93bmVy","am91cnM=","IGJyb256ZQ==","IEFyZ3VtZW50RXhjZXB0aW9u","KG9yaWdpbmFs","X1NDQUxF","X2Nw","IHJlY29tbWVuZHM=","LnNldFN0eWxl","U3VyZQ==","TEFORA==","IHJlcGVhdGluZw==","TWF0dA==","LlZpc2liaWxpdHk=","IGVudGVycHJpc2Vz","LlNldHVw","KHNjZW5l","IFJlYWN0aXZl","dXJnZQ==","Ync=","LlB1dA==","cGVyc2lzdA==","LmNvb2tpZQ==","IEF1ZGk=","YHM=","c3VwcGxpZXI=","KEZvcm0=","wqE=","X3Nv","jIA=","IExlZ2lvbg==","dHRl","TmQ=","TG9zcw==","KGF0dHJz","LnNjYXR0ZXI=","IGdyb29t","IGdsaW1wc2U=","IG5haWxz","IGN1bXVsYXRpdmU=","IGZhemVy","X3NlcnZpY2Vz","Lk51bQ==","aWJpbGl0","X3Jlc29sdXRpb24=","IFR4","dW1pbml1bQ==","b3Bh","LnNjaGVkdWxl","c210cA==","4LiV","dXJyeQ==","w7xr","Z29vZw==","X3NpZ25hdHVyZQ==","LmludG8=","IFN0ZXBz","IGhvbWVvd25lcnM=","IE5TVVJM","IFBBQw==","ICAgICAgICAgICAgCgo=","PicpCg==","ZW5o","IGluY2Fw","JE1FU1M=","IG1vaW5z","IEZp","IG9mZnNlYXNvbg==","cHJlc3Npb25z","Pi48Lw==","IE1hcmtlcg==","IG9uQ2xvc2U=","TEVWRUw=","IGludGVyZmVyZQ==","IENvbGlu","IFJlc2lzdGFuY2U=","RGlzY291bnQ=","IFdlYkVsZW1lbnQ=","IGJhdGhyb29tcw==","bGVnYWN5","IENhcHR1cmU=","IGFyaXNpbmc=","ICIpOwoK","0YjQuNCx","IEluZmluaXR5","QWR2ZXJ0aXNlbWVudHM=","IENvbWluZw==","IFBST0pFQ1Q=","X1BST1RPQ09M","IHVzZURpc3BhdGNo","LmNoYW5uZWxz","IENpdGl6ZW5z","ZW50cmU=","X21w","LkNvbnN0YW50cw==","IFNlcmlhbGl6ZQ==","X0lOQw==","KGx1YQ==","IGNsYXNo","X3dpdGhvdXQ=","LmtleVNldA==","IHJlY2VpdmVycw==","5pa55rOV","KG1lbQ==","IEhvcml6b250YWw=","IGNvY2t0YWls","IGNob29zZXM=","LklubmVy","IHJlbGllZA==","b3VudGVy","ICJe","IHRlbmFudHM=","ImA=","X1BN","ZXJzZWQ=","IH19Ij48Lw==","IHByb3ZpbmNlcw==","X1JBVw==","XEFwcA==","IHByb3N0aXR1ZXI=","X2dhaW4=","LnRlbmNlbnQ=","ZmZlY3Rz","KHBr","c2t1","IHVzYWJsZQ==","RVJWRUQ=","IGFudGVubmE=","aGVh","cGxpc3Q=","X1BMVUdJTg==","0YHQuw==","Lmxvb2t1cA==","4buB","IGVubGFyZw==","IHBpc3M=","SGFt","aW1hcA==","IGludmFsaWRhdGU=","IHNpbGs=","PSIjIj4K","IEdyYXNz","IEdvYWw=","X3BkZg==","SGFuZGxlcnM=","IHN0YWNrcw==","LmdldEZ1bGxZZWFy","PVtdOwo=","6L2m","LFY=","KHNwbGl0","0YPQvdC6","IGJha2VjYQ==","IH4vLg==","cGV6","dGFpbHM=","IEdsZW4=","IHNldEltYWdl","IENvbWlj","QkxPQ0s=","CVRoaXM=","b2FkZXI=","IGNhcGl0YWxpc3Q=","X1NURVA=","KEJvb2xlYW4=","IENvcnJlY3Q=","cmluYQ==","IGNvbmNhdGVu","5a6e","KCk6Cgo=","IHVuYW5pbQ==","bGxp","YWxhcnM=","LW5l","IGRpdm9y","IEtpY2tzdGFydGVy","XS5f","PG51bWJlcg==","L21lbnU=","R1JBUEg=","dmlzaXRvcg==","IGltcHJvcGVy","X05FWFQ=","IGJpc2E=","YmFja2dyb3VuZENvbG9y","L2lucHV0","IG1vaQ==","R29hbA==","bGlxdQ==","IG1pc2NvbmR1Y3Q=","IGNvbXByaXNlcw==","YXducw==","IFBpZQ==","cmFpcw==","cm9sZXVt","IGN1cnNl","eXU=","X3BvbGw=","LmN1cnJlbnRVc2Vy","RVNI","XSlb","IHN0b3J5dA==","KT87Cg==","Kj0=","IEJ1cmc=","L2xheW91dA==","X2JhY2tlbmQ=","Oz8+PC8=","IFdoYXRzQXBw","IE1vdW50YWlucw==","dmlzaW9ucw==","Zmx1ZW5jZQ==","LmNyZWF0ZUNvbXBvbmVudA==","IFBzeQ==","Zm9yZ2V0","c3J2","X0NPTVBPTkVOVA==","IE5leHVz","ICl7","ZW5kaQ==","SU1VTQ==","IEdG","57uE","4oCUdGhhdA==","Yms=","TW96aWxsYQ==","IGRlZmVuZGVycw==","LXNldHRpbmdz","aW1taW5n","IE9QVA==","IENX","IHRoYXRz","IE9wZW5pbmc=","UmVsZWFzZWQ=","bnBt","IGhycw==","IGdyb3VwZWQ=","LyIuJA==","IEhpc3RvcmljYWw=","KCQiew==","b3ZpYw==","KHNpZ24=","IFBob3RvZ3JhcGh5","IHNpZ251cA==","X0FSQ0g=","LnRlc3RuZw==","L2FuZ3VsYXI=","UmVzdENvbnRyb2xsZXI=","c2hpdA==","dWxsZQ==","LnBhdXNl","KFtdLA==","KHF1ZXN0aW9u","aWxvZ3k=","IEV1Zw==","LWxvY2Fs","IGt2aW4=","IHJlc2VydmF0aW9ucw==","b2JpYQ==","IHN1YnNpZGlhcnk=","IGFjY3VtdWxhdGVk","IFFWYXJpYW50","IEJKUA==","IE5vcm1hbg==","IEludGVncmF0aW9u","LlZhcmlhYmxl","KFJlc291cmNl","KioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKg==","RXhwb3Nl","ICd9","LkNPTE9S","INGH0LjRgQ==","QWpheA==","IHRocnU=","TW92aWVz","IHByb3Bvc2l0aW9u","L3RoZW1l","TW9kZWxQcm9wZXJ0eQ==","IEF3cw==","IEFuZHJlYQ==","IE1lcmdl","LmZpbmlzaA==","KHJlcXVpcmVk","IFByZWw=","ZWxlZA==","5pON5L2c","LlRSQQ==","TUFT","IHJlYWxpc2Vk","cm9pZHM=","CWZu","cmg=","LiI8Lw==","dmlkaWE=","IGRlcHVpcw==","IEJW","TG4=","IGx1c3Q=","QXNj","CQkJCQkJCSA=","aXNsZQ==","LWNhcmU=","X0lOVg==","IERyZXc=","IHdoYXRz","IENhcGFjaXR5","UGFybQ==","X21vbml0b3I=","LnN0dWRlbnQ=","IFJOQQ==","LmVuZHN3aXRo","Ymlo","IE1MQg==","L3Byb2plY3Q=","IHJlc3Rpbmc=","c2VwYXJhdG9y","eWQ=","ZXJ0aWE=","IG1vbml0b3JlZA==","Ij4qPC8=","LkZD","IE5FV1M=","IENhbGxz","IGFkZXF1","Q2hlY2tpbmc=","ZXN0aW1hdGU=","IHJlY2FsbHM=","X2ZyZXF1ZW5jeQ==","IHVzZVJlZg==","IEdyb3Zl","IFhpYQ==","IMOt","ZXNzZW5nZXI=","LWNvc3Q=","LmZj","IEt1bWFy","LkZvY3Vz","ZWxsYW5lb3Vz","LkFsZXJ0","ZWF4","IG9yY2g=","LnBt","IGxhbmRsb3Jk","KHBvcA==","X2FjdHVhbA==","IExC","R3JhbmQ=","LnJlbmRlcmVy","IGxvYg==","Y3VzdG9tZXJz","IGNhcHR1cmVz","V0lORE9X","IGRvY2g=","IGFwb2xvZ3k=","IEphbWE=","QFs=","LnRha2U=","bm9vcA==","IGx1bQ==","IGRpZmZlcmVudGlhbA==","IGVmZmljYWN5","CUlO","X0JPWA==","X3Nk","X3J0","Y29kZXI=","b3VuY2VtZW50","aGFzQ2xhc3M=","IHJpc2t5","IEVzdGFkbw==","LURE","IENhcnNvbg==","U3VmZml4","IHRvZGE=","IFRyYWNrZXI=","IERlbGVnYXRl","YCxg","IFBhcmtpbmc=","IG5lcg==","YXpv","IEZpbGVJbnB1dFN0cmVhbQ==","IHJlY291bnQ=","cWk=","Y2tlbg==","IHNvY2lhbGlzdA==","IEludm9pY2U=","INC/0YDQvg==","JSIs","ZW5uZW4=","IHZpdm8=","IG9yZ2FuaXphdGlvbmFs","IHVuY29tbW9u","dXRhcg==","IGh1bGw=","VHVlc2RheQ==","IGFzc2Vzc21lbnRz","KGFwcGxpY2F0aW9u","IHByZW1pc2U=","U3RhcnRUaW1l","IGRr","IGludGVyZmVy","IFF1ZWVuc2xhbmQ=","IGNyZWRlbnRpYWw=","IGxlaXN1cmU=","WVo=","IENtZA==","QlVT","dXNhbg==","CXZlYw==","aW9sb2dpY2Fs","IExvdHM=","IGVubGlnaHQ=","IGZyZXNobWFu","IENPTU1BTkQ=","IEFjdGlvbkxpc3RlbmVy","dXRt","YXJpdXM=","VHdpZw==","IHN3ZXB0","LXRvb2w=","xJA=","Y2hhcHRlcg==","LWdyYWRl","IGN1cmlvc2l0eQ==","IHN1c3RhaW5hYmlsaXR5","IE1pbmVjcmFmdA==","d2VuZA==","SWZFeGlzdHM=","IEN1bHR1cmFs","IFNhY3JhbWVudG8=","TGF5ZXJz","U3Vic2NyaWJlcg==","LkdyYXBo","IGxt","ZXN0eQ==","YWR2ZXJ0","JHA=","IEhvY2tleQ==","IERFVA==","c2V0VGl0bGU=","eWFuZw==","IGJhYmU=","ZWxzaXVz","VHJhdmVs","IG1lc21v","KG1hcFN0YXRlVG9Qcm9wcw==","X1NFTA==","LXBvcA==","IGVtaXNzaW9u","4oCZLgoK","LnN3aXRjaA==","b3Rpb25z","LnBob3Rv","TFY=","YW1vZGVs","IHdvcmR0","SUdHRVI=","IFRPREFZ","T0xT","X0lERU5U","IGNvbW1lbnRpbmc=","RGF0b3M=","IGhpbGFyaW91cw==","KGFueQ==","IGRhbXA=","LWNvbnRyb2xsZWQ=","ICI8Pw==","X2JsYWNr","TmV0QmFy","LnNldFNlbGVjdGVk","Q3Nz","IHF1YXJ0","IG93bmluZw==","IEZJRUxE","LnJlbHU=","IGxpcw==","7Jqw","LlJFTEFURUQ=","IGxvaw==","IEZsaXA=","IHByZXN0aWdpb3Vz","IGRn","IElucHV0U3RyZWFtUmVhZGVy","IHVzdQ==","IGdpcg==","IGFuYQ==","X3B5","dW5uZWw=","CXN5c3RlbQ==","IGNvYXRpbmc=","IEdlbnJl","ZXJybw==","IENMSUVOVA==","IHN0cmV0Y2hlZA==","Lkhhc1ZhbHVl","Ozs7Ozs7Ozs=","54mI","IGZpbmFscw==","LmdldENoaWxkcmVu","IC0tfX0K","IENvd2JveXM=","IEVkaW5idXJnaA==","IFBsYXph","YWJlbg==","QXJ0aXN0","VVJB","IEh1Z2hlcw==","b2JiaWVz","X25vaXNl","Lk9iamVjdHM=","RXhwcmVzc2lvbnM=","IGFudGhyb3A=","JykpDQo=","KS4i","Y3JpcHRpdmU=","IHNhbG1vbg==","IHdhc3Q=","cmhv","LnRpY2s=","IGV4cGxvcmVz","IEFsZ29yaXRobQ==","Q2hhckFycmF5","4LiE","X1BBQ0tFVA==","SkU=","Il1dOwo=","Lm5vdGU=","QmFja2luZw==","IEhvbGRlcg==","cmVpY2g=","IFppb24=","L2dy","ICAgICAgICAgICAgICAgICAgIAo=","TW90aW9u","IFRyaWJ1bmU=","IGNyaXRpY2FsbHk=","IENSTQ==","IGJsb3dpbmc=","IGNvbW1pc3Npb25lcg==","Sm9l","IFRlbGV2aXNpb24=","CXByZQ==","IFRSQU4=","IFZpa2luZ3M=","IEJFVA==","d291bGQ=","LkNhcHRpb24=","IGJhY29u","aG1h","bWVyZ2Vk","IHN1YnNjcmlwdGlvbnM=","b2NjdXBpZWQ=","TGl2ZURhdGE=","IGFsbG93YW5jZQ==","cmlnZXNpbWFs","ZGRk","LmxvZ291dA==","IFRhbmc=","IHdhcm10aA==","TW9kZWxJbmRleA==","IFByYQ==","IHNjZW50","IGhhY2tlcnM=","IGlsbHVzdHJhdGU=","SWNo","IGRpYXM=","Q0FTRQ==","IFNjaQ==","JHVybA==","IE1PRFVMRQ==","dXNob3J0","bGllcnM=","IERldmljZXM=","bWluc3Rlcg==","dW5hbWU=","IHVucg==","RXhhbXBsZXM=","IHJpc2Vu","LmFp","Y2hyb20=","X3dvcmtlcg==","IGFsaWFzZXM=","TW91c2VFdmVudA==","IHNldHRlcg==","IFB1cnBsZQ==","Sm9pbkNvbHVtbg==","PWU=","VEhPT0s=","IFRvdw==","IENydXNoaW5n","IEplZGk=","IEdyaWZmaW4=","IGtvcw==","X0ZT","aW5nZXM=","c29sZXM=","KG5hbWVz","IEJpZA==","LXBvd2VyZWQ=","TXVsdA==","YW1pbGlhcg==","LmNsZWFuZWQ=","IFppbW1lcg==","CWNsZWFy","IHVuc3VwcG9ydGVk","Q2FsbGFibGU=","IHJlcHM=","YWx0ZXJu","X1JFUE9SVA==","LmdldENvbHVtbkluZGV4","X1NUT1JF","IHN1Y2h0","c3VidGl0bGU=","IHBlcmQ=","q5g=","Lk5PVA==","fT48Lw==","OmQ=","bWRp","YmluZFZhbHVl","IERlY2lzaW9u","UmV0dXJuVmFsdWU=","LGluZGV4","eGZj","IHNlcnVt","Z2V0RmllbGQ=","Q29ubmVjdGlvblN0cmluZw==","LW9iamVjdA==","LnJlY3Y=","IHVuZGVyZ3JhZHVhdGU=","LkluZnJhc3RydWN0dXJl","IEthYg==","IGFkdmlzb3J5","LXRyZWU=","IG11ZQ==","aW5mb3Jt","LmVtYmVk","IGVycm9yQ29kZQ==","bWljcm8=","IHNwYXJrZWQ=","IGltYWdlcnk=","Y29uYw==","X21pc3Npbmc=","IHN1cnBsdXM=","S1M=","CVJUSE9PSw==","VGVsbA==","cml1bQ==","IFJhZGl1cw==","cmlrYQ==","bG9zaW9u","IEhlcm4=","R2FtbWE=","IEZlZQ==","IE5hbWVk","IENhbnlvbg==","IEpTT05BcnJheQ==","IHp3ZWk=","IFNTSA==","IHNlcnZhbnQ=","Y29hbA==","IGRlbnlpbmc=","IHNwbGl0cw==","SW5jb3JyZWN0","IHRveA==","IEFuYWx5c3Q=","IGFjY3JlZA==","dWJsZQ==","IHd0","IFRyaWFs","LmV4dGVuc2lvbg==","IENhcmVlcg==","IHNlY3VyaW5n","IExpbA==","IHByb2plY3Rpb25z","IHllYXN0","TWFkZQ==","IGZvdW5kYXRpb25z","YWNpZmlj","LnZvbHVtZQ==","IG1pcnJvcnM=","IyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyM=","IHZpb2xhdGU=","YXJzZXJz","IHNvY2lv","IHRraW50ZXI=","IExJTks=","LmdldFNpemU=","IFdob2xl","KXZpZXdEaWRMb2Fk","CWRvbmU=","dWRlYXU=","XCI+PC8=","QW5kcmV3","ZXJi","IGbDtg==","LmNsdXN0ZXI=","IGRpc2NvdXJzZQ==","X0RFRklO","IHB1ZWRlbg==","IExPVw==","LmF2","IHByZWNh","IHF1bw==","IHZlbG9j","LCcn","IHh5eg==","CXBhZGRpbmc=","IHRvbWF0b2Vz","IEJlbnQ=","X2N1cnI=","TlNEYXRl","IGdldEN1cnJlbnQ=","IFtg","V2VkbmVzZGF5","LkJhcg==","IFZvdXM=","aW56","IFF1aW5u","ZXhjZWw=","ZG9z","IG91dGRhdGVk","T1VUSA==","IE1ha2Vy","ZXBlbmRlbmN5","IGR1bGw=","IFdpbm4=","b2dl","Y2xhdmU=","IG5vdmE=","IGF2YWw=","Q2FwdA==","IFNwb3RpZnk=","IGp1bA==","KXRhYmxlVmlldw==","IGZpbGVuYW1lcw==","IGVza29ydA==","5ZGo","IHNrZXc=","dGVyaW9y","IGZpbmFuYw==","IHRhYmxh","IFVJQg==","ICgpOg==","IERvY2tlcg==","cGVyY2VudGFnZQ==","TWVldA==","aWNoaQ==","IGludGVyaW0=","ICc9Jw==","LkpTT05PYmplY3Q=","KGZpZA==","IGRvd250","IHRyYW5zaWVudA==","IFN0ZXBo","IGlnbm9yYW5jZQ==","IENvZGVz","PScnLA==","IElDRQ==","IHRyYW5xdQ==","IEV4dGVuZGVk","IG11bmQ=","IEhPTUU=","IGtpbG9tZXRlcnM=","IGltYWdlbg==","b3V4","KHN6","WW91bmc=","dWZmZWQ=","IFdha2U=","IGFpZGU=","UFJPQw==","IFJhdA==","IExpdGg=","YmFydA==","IEFycmFuZ2U=","cHJvbXB0","0KM=","KGN0","IEludGVydmFs","ZGVwdA==","RGFuaWVs","IGZpbGxz","LnRlbnNvcg==","KHRyaW0=","IGplYWxvdXM=","RmVi","XENvbW1vbg==","IGFtZW5kbWVudHM=","X29wZXJhdG9y","X2N1c3RvbWl6ZQ==","IF1d","IGJu","IGRpc2FwcG9pbnRtZW50","IG1pbGxlbm4=","LndoZW4=","IG9iZXk=","IG9mZmVuZGVycw==","V2lsZA==","IGNlbGxGb3I=","IGFwcGFyYXR1cw==","LmFmdGVy","IEVQUw==","IGFkb3JhYmxl","b3BlcmFuZA==","KGxpc3RlbmVy","dmVhbA==","ICko","IGNhcmRpb3Zhc2N1bGFy","dXBsaWNhdGVz","cmlzdG9s","IHJlZnVzZXM=","KFFXaWRnZXQ=","IGVsZW1lbnRv","TnVtYmVyT2Y=","LmRlbGF5","Lmdyb3Vwcw==","Ij4nKw==","5Z2A","YWNlbmN5","KFVSTA==","X2hhbGY=","PWw=","IGxpc3RWaWV3","KHNlY3Rpb24=","LnRvQXJyYXk=","Ky8=","IFJvZHJpZ3Vleg==","aXN0cmVhbQ==","IGVsaWdpYmlsaXR5","Ojot","Lm5ld0luc3RhbmNl","UEI=","IEFzc2V0cw==","IENvbXBvc2l0ZQ==","IExhYnM=","IEhhbWFz","KyspOwo=","IGJsaw==","IE5lbw==","THVj","QGxvZ2lu","IHVuYXdhcmU=","Lm1ldA==","X1JFTEVBU0U=","KFNU","QU1JTA==","cmlrZQ==","ICgpewo=","KHNwcmludGY=","IEFjY291bnRz","IFZJRVc=","IEFq","44Kw","IHdoaXNr","IGlkaQ==","IHJvZGU=","IGlobg==","IEVsZW1lbnRhcnk=","UXR5","IGludHJpZ3Vpbmc=","IOWk","Sm9icw==","CW9mZnNldA==","IEFobWVk","IFRhbGliYW4=","IOiOt+WPlg==","IGluamVjdGVk","LkF1dGhlbnRpY2F0aW9u","X2xpbmVhcg==","LkRlY2ltYWw=","IGFwcGxlcw==","IHNoYXJlaG9sZGVycw==","IGJha2Vk","LmRpZmY=","IEVkZGll","b2tlcnM=","IGNvbmZyb250ZWQ=","dm9pY2Vz","IHR1cw==","IFNwaW4=","Tk9ERQ==","X1Vu","Q1RY","L2dvb2dsZQ==","VGVtcGVyYXR1cmU=","ICcnKS4=","IG1hZ25pZmljZW50","IHN0YXJ0SW5kZXg=","c2VtYmxlcw==","QW55b25l","ems=","ZWhlbg==","IERhbWU=","LnN0cmljdA==","IHJlcGxhY2Vz","IGxpbmViYWNr","IHB1c2hlcw==","IGNoZWVr","IFNoaQ==","X0JZVEVT","UkVB","4bqjbg==","X0NPTk5FQ1RJT04=","R2F0ZXdheQ==","IFRyYXZpcw==","IEFY","IEJhc2ljYWxseQ==","IFVwZ3JhZGU=","4Ko=","dGhlbWVz","ZXJtbw==","a29y","RmVtYWxl","X2F0dGFjaA==","IOyCrOyaqQ==","IHBveg==","PT09PT09PT09PT09PT0K","KHN5bWJvbA==","IFNlY3Rvcg==","X18pCgo=","X3BhZGRpbmc=","77yaIg==","IGZhYnM=","IHJhbmdlZA==","c2V0TmFtZQ==","IHBlcnJvcg==","4pc=","IEZpbGVSZWFkZXI=","IGZ1bGZpbGxlZA==","X0N1cnJlbnQ=","IGRvbWluYXRl","IHNtdWdn","UG9zdE1hcHBpbmc=","X2ZvcmNl","IGJsb2M=","IEdpYW50","KHZpZGVv","IENV","U3lzdGVtU2VydmljZQ==","IGVsZg==","IGtvbnRha3Q=","66o=","a2Vlcw==","Z3Rr","IHBhcmFtSW50","IG1hcmt1cA==","dWFsZXM=","IGFjY291bnRlZA==","IGdhbmdiYW5n","UllQVA==","IFdyb25n","IGNyZWRpdGVk","IE1FU1NBR0U=","IGZsYXdz","IGJidw==","IG1ldGFib2xpYw==","IE9FTQ==","L2V2ZW50","KENvbGxlY3RvcnM=","bW9udG9u","YXBwZWFy","IG9wdGVk","IGNoZWF0","IGRhdg==","IFByb2NlZWQ=","IOq4","YW5rZWQ=","0LjQtw==","YW5zaw==","IEhhbmc=","IENsZXI=","IGRpc2d1","IGNtYXA=","LmNsanM=","IGF1bWVudA==","bGV6","IEpvaW5lZA==","X3JlY2VpdmVk","IGFlcmlhbA==","b3RlbA==","IGdyZWV0","InM=","IEdlbmVzaXM=","IENhbGlm","cGFuaW9u","IHRhaWxvcmVk","bWFwcGluZw==","YW5kRXhwZWN0","LnRyYWNr","YXRvbXk=","IE93","dWxsYWg=","Llllcw==","IFNpbXBsZU5hbWU=","ZGJo","J2Vu","IG5vbnNlbnNl","IHBoaWxvc29waGljYWw=","KGdldENvbnRleHQ=","IGlzc28=","IEFDRQ==","c3RhcnREYXRl","IGLEmWQ=","IEFVVEhPUg==","IEdsb2Jl","IGluc2VjdHM=","X0Fs","dXNoaW5n","6K6w","L0hvbWU=","IExvY2FsRGF0ZQ==","bmVlZGVk","aGVzaXZl","IGlsbHVzaW9u","5LqM","IHRyYXQ=","eG8=","L2RldGFpbA==","X01BVENI","IGJyb2FkYmFuZA==","IHdhbA==","IElsbGVnYWxTdGF0ZUV4Y2VwdGlvbg==","SVJFQ1RJT04=","IG5vcnRoZWFzdA==","ZXNpdW0=","IENsaWVudGU=","dWxhbmNl","bnR5","IHRlY24=","RGV2aWNlcw==","IGdyYWlucw==","IE9n","IFNFTA==","dWRpYW50","ICsrOwo=","IGV4cGxhbmF0aW9ucw==","b2Njbw==","IGRpZXRz","IGNvaG9ydA==","KGNvbnRyb2xsZXI=","Lkl0ZXJhdG9y","LXJpY2g=","cm9jZXNz","R0Q=","IGNhcmJvaHlkcg==","IGZyaWVk","IEVtcGxveW1lbnQ=","7J6l","IExlb25hcmQ=","XyR7","cXVhcmVz","IGNvbXBhbmlvbnM=","IHBhcmlz","IHN0aW11bGF0aW9u","IFpvbw==","IHJlbGV2YW5jZQ==","IENvbG91cg==","IHNwZWFy","b3Rpb25hbA==","IExpdGU=","IEtvc3Rlbg==","IMOz","X2F0dGFjaG1lbnQ=","b3JwaGlj","IGRhbWl0","IGRsZw==","IHRocml2ZQ==","Q0hBTkdF","IEFwcGFyZW50bHk=","IGF0dWFs","IHJvb3RlZA==","KGltYWdlcw==","YXdp","YXJpYXQ=","IGNoZXJyeQ==","U1RBVElD","bW50","IFVzZXJJZA==","aWxsZXQ=","IEhpc3Bhbmlj","IG5haw==","IGNlbnRybw==","IGRpbXM=","X2luaXRpYWxpemU=","xLFr","IENlbnRlcnM=","UkVO","IGV2b2x1dGlvbmFyeQ==","IFRvcGljcw==","X2RhbWFnZQ==","ZW1lcg==","IHJ1bmQ=","IHB1bmlzaGVk","IGN1Ymlj","ZmFpcg==","W107Cgo=","IGluc3RhbnRpYXRl","IG92ZXJzZWU=","LWRlbGV0ZQ==","dW50ZWVy","c3RhcnRUaW1l","IFBpcGVsaW5l","X0dBTUU=","IENpcg==","CU51bGw=","LkZvcm1hdHRpbmc=","dWN1bWJlcg==","IFJpZGU=","IHpvbw==","IGNoZWNrZXI=","5ZCM","PUM=","IGdyaXQ=","Iik7Ly8=","X3h5","IERlY2xhcmF0aW9u","IGNhbGxhYmxl","Rm9v","IExpc3RJdGVt","IGluYWNjdXI=","bWxpbg==","CURhdGE=","IGV2b2x2aW5n","YXdhbg==","IGNhZmU=","Zm9saw==","X0lEWA==","IEFueXRoaW5n","IFBhbGVzdGluZQ==","IEdyaWRWaWV3","IGNvbG9ueQ==","IEdlcm1hbnM=","KCs=","LnBpZA==","LmpzeA==","IFN1cGVyaW9y","Q2hyaXN0aWFu","IExlY3Q=","CUdhbWU=","IGluc3RydW1lbnRhbA==","QW5pbWF0aW9ucw==","0LTQsNC7","IE1vc2Vz","CQkNCgkJDQo=","enM=","a3Rl","5Lia","X0RJU1Q=","Yml0bWFw","ZEI=","IHBlcnNpc3RlbmNl","0YDQvtGB","JGw=","QnJvbg==","IHt8","X2NoYXJ0","IENvbnN1bQ==","IGhlbXA=","ICIpKQo=","IGF0dGFja2Vycw==","IGtub3dsZWRnZWFibGU=","IGNldA==","IHZpcnVzZXM=","J0k=","IHBpdGNoZXI=","IHN3ZWVwaW5n","PWxpc3Q=","YXB0b3Bz","LmRlcHRo","IGluc3RydWN0ZWQ=","IFJ1cw==","YmVuaGF2bg==","INC40L0=","U3BvcnRz","IG9uc2V0","5p2D","LlJFRA==","X3Np","IFBTVA==","Lm9uQ2hhbmdl","PnRhZw==","IFJvaA==","X2NoYXJhY3Rlcg==","IExhd3M=","IEJhY2hlbG9y","X3N3YXA=","LnJlYWN0aXZleA==","IHJld2FyZGluZw==","TWVkaXVt","LVs=","IFJlY2VudGx5","Sm9pbnQ=","cGFydGl0aW9u","IE1pbnV0ZXM=","IGluZG8=","IGFic29yYmVk","IEdO","X0lORA==","IHNhYmVy","U3Bhd24=","b3V0cHV0cw==","IEplZmZyZXk=","IG1lZGlldmFs","aGVk","R3VpZGU=","IHBzeWNobw==","IGdsYW0=","RWxpbQ==","w6RkY2hlbg==","X3BsYWlu","IFNhdQ==","LWZvdXI=","IGFuYWx5emluZw==","UVVFUlk=","IHRvbWF0bw==","X2J1dHRvbnM=","VkVO","LnNldFN0YXR1cw==","LlVybA==","KwoK","IGNvbXBsYWluaW5n","ZGVncmVl","Y29uZmlybWVk","IHN1YnQ=","cGFyc2Vk","IHRvcnF1ZQ==","IHRyb3VibGVk","IFRBUkdFVA==","IHRyYWRlbWFya3M=","IENvb3JkaW5hdGU=","IFZpdg==","IC8vfQoK","IGFwcsOocw==","LmdldFBvc2l0aW9u","KEtleUNvZGU=","IFNpbHZh","IG1ldGVvcg==","IGVuZG9yc2VtZW50","T3ZlcnZpZXc=","IFBvc3M=","LkluamVjdA==","IGV2ZW5seQ==","IHZpc3VhbGl6YXRpb24=","IHdjaGFy","IEhETUk=","IGZ1bmN0","aWNrbmFtZQ==","JywnJywn","IGZvcndhcmRz","TWFuYWdlZE9iamVjdA==","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA=","CXNlcnZlcg==","IE91dGxvb2s=","IENocm9uaWNsZQ==","IGR1YmJlZA==","IGRvaw==","IFdlYXI=","LkFM","cGFyZW4=","LkludGVyZmFjZQ==","SW50ZXJmYWNlcw==","LmNvZA==","IGRpYg==","Lkdsb2JhbGl6YXRpb24=","IEFjYWRlbWlj","IGFzc21z","QXV0b20=","IGx3","IE5X","ICYmDQo=","IHByb2JsZW1h","IE1hbnVmYWN0dXJpbmc=","bGltaXRz","LW1vYmlsZQ==","IGZpbG1l","L21hcA==","IGRvaXQ=","IEluaw==","IHN1ZWQ=","LmFycg==","IHVuZGVybWlu","IFByb2M=","Y3JvbGxWaWV3","X18k","IHNpZGV3YWxr","KHRoYXQ=","4Li3","W3E=","Z3JhbW1hcg==","IHTDqw==","cXVpdG8=","IHNwaXJhbA==","ZXh0ZW5kZWQ=","IGZvY2Fs","IGRpZ2dpbmc=","cGFz","IFRhbGw=","LnByb3h5","aXR1cmVz","VFJBQ1Q=","IFJlYWxt","IGZlZGVy","IG9yaWVudGVk","IEFsdGVybmF0aXZl","IG93ZQ==","IHNvdXJjZWQ=","aW5rZXI=","LmRldA==","U2Vw","IFF1aQ==","IFBhbG1lcg==","KF8s","c2FtcGxlcw==","b3llcg==","dWxsYW4=","cXVleg==","RWRnZXM=","IHNob3V0","IEFjaGll","IGhhYXI=","X0NvbnN0cnVjdA==","IHByZW1hdHVyZQ==","IHJldmVydA==","JykuCg==","IHNjaG4=","ZmlsdGVyZWQ=","bnVsbHB0cg==","U2F2ZWQ=","aXRlY3R1cmU=","Q0xB","IHZs","c3RlbGw=","CU1l","IExpcA==","bmF0aW9uYWw=","IHdob2xseQ==","IHNwcmluZ3M=","LlRpbWVy","CXNyYw==","ZWxzZW4=","5YW2","IGNvbW11bmljYXRpbmc=","IFF1aXo=","IHRlbmc=","IGdleg==","IE91dHNpZGU=","LlNpZ24=","KGNz","IGRpc3B1dGVz","IFdlaXNz","YW5uZXM=","Pk5v","IEJhY2g=","LnJlbW92ZUFsbA==","cmVmZXI=","L2Rhc2hib2FyZA==","IEFqYXg=","SW5kZXhDaGFuZ2Vk","IFdlYWs=","JyIK","IHNpZ2h0cw==","YWNjZXNzVG9rZW4=","IEpvaQ==","KGRvbWFpbg==","CWN2","IGNvbnRpbnVhdGlvbg==","IHBsdW0=","YWRpcg==","LnNldE1lc3NhZ2U=","IO+8jA==","IHN3YWxsb3c=","IExhbXA=","IHF3","IHV1","Q29pbg==","dWJpYw==","IERlYWxz","cmFjZQ==","IGRpY3RhdG9y","IG1lbWU=","dHVybmVk","IEp1bGll","LmdyaWRDb2x1bW4=","IHB1cHB5","IHBhbQ==","ICl7DQo=","IGludml0aW5n","IGZyZW5jaA==","dmlt","IHdyYXBwaW5n","ICMtfQo=","KFst","RWFybHk=","IHNoaW55","LmZhY2Vz","IHJlYmVsbA==","YWJjZGVm","w6RsdA==","IGVzdGltYXRpb24=","cGh5cw==","bG9zdXJlcw==","X1JFTA==","IGV4Y2x1c2lvbg==","IFNreXBl","d2Vpc2U=","LXN0b3A=","bm90aGluZw==","IEVnZw==","aXNvcnM=","UmljaGFyZA==","IGNvdW5zZWxpbmc=","IGNvbW1lbQ==","IFFNZXNzYWdlQm94","IFN5bmQ=","IEZyb3N0","IENvbXBldGl0aW9u","IEF3YWtl","IHRlZA==","aWNpb25lcw==","IERldkNvbXBvbmVudHM=","VkVSVElTRU1FTlQ=","b3R0aQ==","LnJ1bm5lcg==","IHVuaXF1ZWx5","LmZsYWc=","CXJz","X2dlbmVyaWM=","IGBgYAo=","QUNISU5F","IG1laW4=","KEFwcGxpY2F0aW9u","KGJy","IHJhdGlvcw==","Oiw=","IFhDVGVzdA==","dXN0YWluYWJsZQ==","LXd3dw==","aXRsZXM=","X1RFTVA=","IHN5c3Q=","dW1lcmljVXBEb3du","CWFzc2VydFRydWU=","IHdm","LnBlZWs=","IEJ1bGc=","IHRlcnJpZnlpbmc=","Lk1PREU=","IEdX","w6Fy","IGZpYw==","IGNvbW1pdG1lbnRz","LXRlY2g=","IExpcXVpZA==","b3Bleg==","emhlaW1lcg==","YcOxYQ==","LW1lZGlh","KGFuaW1hdGVk","X2dvYWw=","IGd1bQ==","eXN0b25l","LlNFVA==","IFdlbmQ=","c2V0Q2VsbFZhbHVl","IG1zZ3M=","Y2FzaA==","QUxMT0M=","L2F3cw==","IG1pY3Jvd2F2ZQ==","LlBvaW50ZXI=","CUNvbnNvbGU=","X3NvcnRlZA==","IEZpbGlw","UHJvZA==","IC8vITw=","aW5ncm91cA==","IGtz","X1RSSQ==","IHRlYXNwb29u","IEFUVA==","IHJlY292ZXJpbmc=","IEdMT0JBTA==","LlBhcg==","IC8+Owo=","IG1hcmJsZQ==","dWxhdG9ycw==","IEN5Y2xl","IGhlcmJz","X21ldHJpYw==","KSE=","X0NMT0NL","X0J1dHRvbg==","SGFycnk=","6L+b","IHN0cmFpbnM=","IEFwcEJhcg==","IENoYW4=","L3ZpZGVv","IGJhbQ==","LlByb2dyZXNz","JGY=","bGVtZW4=","IGlycmVndWxhcg==","IER1bmNhbg==","IE1pbnQ=","LXZpZGVv","4Ka+","w7N3bg==","IEVNUFRZ","IHN0YWNrZWQ=","IEhB","X2N1dA==","IHdoZXJlaW4=","IFdheXM=","KGNvdW50ZXI=","6K+V","Rm9ybUdyb3Vw","IGJsZXc=","Y291cnNlcw==","IHByb2R1Y3Rvcw==","cnlz","IFJlc3Ry","IHN0eWxpbmc=","PnM=","IHBpdg==","IGl0ZXJ0b29scw==","Z2V0UmVwb3NpdG9yeQ==","IElr","X2RldmljZXM=","bGF5dWk=","IGhhbGZ3YXk=","IGZyYW7Dpw==","IHR1bmluZw==","T0E=","X05vZGU=","YXJkZQ==","IGZpZXJjZQ==","bGljdGVk","Iw0K","IGJyZWFrdGhyb3VnaA==","IEVyaWs=","IGJyaWRl","IC4i","Y3VsdXM=","aW5zaWRl","IEluZGlhbmFwb2xpcw==","IEVF","IHlvZw==","dXJyZXQ=","LmZz","LmdyYWQ=","X2NhcmRz","X2FjY3VyYWN5","X2VwaQ==","cXVlZGE=","L29yZw==","6aqM","IGNvbXB0ZQ==","KSlb","T3V0c2lkZQ==","R3JlYXRlcg==","IFJlbmRlcmVy","LmFjdG9y","QWNjb3VudHM=","SWRsZQ==","X2hvdXJz","ZXJuZXI=","Sm9pbmVk","IG1lbmo=","cmVxdWlyZXM=","IE9QRVI=","LnJlbW92ZUNoaWxk","CXNw","IGVzc2U=","cmlmdA==","eEZF","IFNoYWtlc3BlYXJl","X19fX19fX19fX19f","IGJ1ZGdldHM=","TW9kZWxTdGF0ZQ==","ZmlsbGFibGU=","LWNvbXBvbmVudA==","b2Nvcw==","IEJVVFRPTg==","L2lv","LG91dA==","c21z","VGhvbWFz","IEFybWVk","cmVzdW1l","IHJvdGF0aW5n","IFZhdWx0","IHNldXM=","Ligq","IGFtaW5v","IFtdKTsKCg==","IHByb3ZvYw==","bm94","LkdldEVudW1lcmF0b3I=","PT09PT09PQo=","5paZ","X3Njcm9sbA==","IGZpbG1lZA==","IFNvY2k=","Z2Fw","Z3Jv","Vm90ZQ==","IkJ1dA==","X1JD","QW5pbWFs","woA=","aWJpbGU=","IGF3YWtlbg==","b3Jlc3Q=","aW5qYQ==","IEl2YW4=","KENvbW1hbmQ=","ICoqKioq","zrc=","IGt2aW5kZXI=","L2hlbHBlcnM=","X2Nhc2Vz","dGc=","7IS4","UmVnaXN0ZXJlZA==","CXBhc3M=","X2RpZ2l0cw==","IGNvbnRvdXI=","IGluZmFudHM=","IGp1c3RpZmljYXRpb24=","IEZvcnR1bmF0ZWx5","Q29udHI=","IG9uQ3JlYXRlVmlldw==","X1NBTVBMRQ==","IGFsbG93TnVsbA==","IG51ZA==","IGZldGNoZWQ=","X2VxdQ==","IFVuYWJsZQ==","PVwiIg==","PnsK","IGNvbW1pdHRlZXM=","aXN0ZW1h","KyIu","w61hbg==","bWFudA==","IHNvdXRoZWFzdA==","77yMCg==","ZGlhbG9ncw==","UFJPSkVDVA==","Y2hhcmdlcg==","LXBvcnQ=","KHV1aWQ=","LmV4cG9ydA==","U2l4","IFJQ","UHJlbQ==","IGNvbnNjaWVuY2U=","IG1hcmdpblJpZ2h0","X2Rpc3RyaWJ1dGlvbg==","eWFtbA==","cmVzaXppbmc=","RG9jaw==","IExvY2F0aW9ucw==","R1k=","U2VlZA==","QlVGRkVS","b3NzaXA=","dWxsZW4=","VGhpbmdz","LXNlbGY=","LnBvbGw=","UExBWUVS","IOWu","R1JPVVA=","IEF3YXk=","IGdvc3BlbA==","eGZk","TWFyeQ==","IFBvcnRhYmxl","VFVSRQ==","IHV0aWxpcw==","IHNlaXQ=","IHN0cmFuZA==","IHRyYW5zYw==","IChe","IEFsZnJlZA==","Lm1lbQ==","LmNpcmNsZQ==","IH4v","Zm9yY2luZw==","IHJpb3Q=","cHJveA==","VEhPTg==","aXphY2nDs24=","IE5J","cm9zdA==","IGRpc3Bybw==","X2luc3RhbmNlcw==","77yM4oCc","b2dyYXBoZXI=","ZW5kYXM=","IElzYWFj","IFBpbmU=","L2Rpcw==","IGNvbG9yV2l0aA==","aXRlcmF0ZQ==","X3N0cmlkZQ==","IHB1bnRv","LkV2ZW50QXJncw==","KGNlbnRlcg==","IG5laWdoYm9yaW5n","IFByaXNvbg==","IE1lc3Nlbmdlcg==","IGVwaWRlbWlj","ZGFv","X2NvbXBsZXg=","IGdyYXZlbA==","X0RJUA==","w6ltZW50","IEFyaQ==","X2JpdG1hcA==","LnF1aXQ=","KHZhbGlk","IHBlbmQ=","IHJlc3BpcmF0b3J5","IHJlYm91bmQ=","RGVmYXVsdFZhbHVl","44Ot","IGNvbW1pdHM=","LnRlc3Rz","X2Zy","aXRldA==","LnNm","IHNwYWNlY3JhZnQ=","Y3JpdGljYWw=","IGRlcHJlc3NlZA==","IEFueU9iamVjdA==","IHVuYg==","IGRpc2Nlcm4=","KG15c3Fs","TGF0aW4=","IEJvZw==","IFdpbGRsaWZl","VG9GaWxl","aW94aWQ=","QFJlc3RDb250cm9sbGVy","ICIkKA==","IDw8Ig==","IGRlZmVjdHM=","IGRhdHVt","aGlu","IHJlYWxpemFy","YW55YWh1","IFNpZw==","QERhdGE=","YWRhcHRpdmU=","IENhdGhlcmluZQ==","LmNy","IENPT0tJRQ==","IHBpY3R1cmVk","IEZpZ2h0ZXI=","UXVlcnlhYmxl","IEFueXdheQ==","IEdMRlc=","X25hbWVzcGFjZQ==","X2Z0","IF0p","T3JnYW5pemF0aW9u","IGNvbnN0aXR1dGVz","IHF1YW5k","KGNodW5r","Ii8+DQo=","IExha2Vz","bWFpbndpbmRvdw==","Q2FydGh5","c3Bpbg==","KGNzdg==","OnJlZA==","LWNvbW1lcmNl","4Li5","IGRpc2NvdmVyaW5n","IGVjbw==","X2ZhYw==","aW5jZXRvbg==","IEdyZWVucw==","and0","2LU=","IEJyb25jb3M=","IEdvb2Rz","KEdUSw==","IHJldHVyblZhbHVl","IHNpZW1wcmU=","IG5ldXRy","d2VudA==","IE5hdGFs","IGVudGh1c2lhc3RpYw==","4buN","Rk4=","L2RhdGFiYXNl","Q2F0YWxvZw==","IGJydW4=","IEthc2g=","X1Bs","aXNjcmlt","LHdpZHRo","IGlubWF0ZXM=","QXNzaWdubWVudA==","IEhhdmVu","IHBsYXlncm91bmQ=","ZXhhbQ==","QENvbnRyb2xsZXI=","dWxpYXI=","LmdldFBhcmVudA==","ICI7Cgo=","OnNpemU=","aXNzb3Jz","IGZpcw==","IGFsYw==","ZW5zYXRpb24=","IE5peG9u","IG1pZ2h0eQ==","LXN0cg==","X3NwZWNpYWw=","X0FEQw==","IFR3aWc=","dW1ibGluZw==","LWFkZHJlc3M=","IGhlcm9pbg==","WVRF","ICAgICAgICAgICAgICAgICAK","RnJpZW5k","IGF2ZQ==","IFBORw==","IEt1cmRpc2g=","RGF0YVNldENoYW5nZWQ=","IGJsYWRlcw==","YnJhbA==","U3RlYW0=","IHNpZ3U=","SVJUVUFM","YWNvcw==","VURQ","KGRhdGFiYXNl","aGVj","IFN0cmluZ3M=","X3NjYWxhcg==","CWRlc2M=","IFRMUw==","OyIK","IENvcmJ5bg==","U2ltcGxlTmFtZQ==","dWVsbA==","IEVudHJl","ZWxsaXRlcw==","LXBsYWNl","IGZyYW5rbHk=","IEVyZg==","Q0VM","IHBhw61z","IGhlZGdl","IGxhdGVudA==","IElSUQ==","IEhlcmFsZA==","IFByZWM=","67O0","LlRFWFQ=","U2FsYXJ5","IGF1dHVtbg==","IHRyYXZhaWw=","LlN1bQ==","IGNhcmVk","TW9y","IGludHVpdGl2ZQ==","IGpvdXJuYWxz","X0lU","IFRyb3U=","5Lyg","SGFzQ29sdW1uTmFtZQ==","Q29tcG9zaXRl","IHNwaWNl","X2Rpc2s=","X0NPREVT","IEludHJvZHVjZWQ=","aW9uYQ==","IG51ZXN0cmE=","b2N0","ICAgIAogICAgCiAgICAK","KHBhcmFtZXRlcg==","IHN0dWRpb3M=","IHByb2plY3RJZA==","IGJkc20=","LlNxbENsaWVudA==","aW1pemVy","IENBUkQ=","K3Q=","YWFu","LnNvbA==","X0FkanVzdA==","IHJpZ2h0ZW91cw==","IExvZ2dpbmc=","LmZpbHRlcnM=","X1RBQg==","CXN5cw==","cm9waGlj","b3RoZXJhcHk=","IEJyb3dzZQ==","a2V5Ym9hcmQ=","Uk9O","K1w=","cm9wcGVk","IGV4dGVuc2l2ZWx5","Zms=","IGxpbWU=","eWVhcnM=","RXhj","IHNwaA==","IGNoZWF0aW5n","YW5kcm8=","w61v","IHByaW5jZQ==","b2lyZQ==","IERlc3RpbmF0aW9u","IENvbnZlcnRz","IHVwc3RyZWFt","b2xlZA==","IHNlcnZhbnRz","IHNlbWFudGlj","IGNydW5jaA==","IGV2ZW50dWFs","cnVubmVy","L2Vycm9y","U3Bpbg==","IHNlY3JldGx5","IGFzc2VtYmxl","LlBlcnNvbg==","ZW5kZXJyb3I=","Xzw=","IHBlbmRhbnQ=","U2xlZXA=","IENoZW1pc3RyeQ==","IGJvc3Nlcw==","bGs=","KSkpLAo=","QmxvY2tseQ==","REVWSUNF","IHJlZmxlY3Rpbmc=","IGFtcGxl","TWlsbGlzZWNvbmRz","IFByZXNpZGVudGlhbA==","IHVzdWFyaW9z","IE5a","IFNhbGFyeQ==","IEFtYW5kYQ==","X25w","anVyeQ==","IGvDtm4=","IHRoZXJhcGlzdA==","IGhvbW9zZXh1YWw=","IERyYWtl","LXdpbmRvdw==","IExvY2F0ZWQ=","LkRyaXZlcg==","IFZJREVP","IG1lcmNoYW50cw==","IENoZXN0","LWxvY2s=","L3BocA==","IG1pbGFubw==","X1NUWUxF","YXJnZXI=","aWRlYQ==","R1VJRA==","YWR2YW5jZWQ=","bWVhbA==","T3B0aW9uc0l0ZW1TZWxlY3RlZA==","PScl","IENoYW0=","OmRhdGE=","KHN0YXQ=","V2lsbEFwcGVhcg==","IGluZm9ybWFs","YWpp","IHJlcHJvZHVjdGl2ZQ==","IENBUw==","44Gj","RlVOQw==","IFJ1dGg=","KSso","Q09OU1Q=","IEZhbnM=","IGdyb3VwSWQ=","eGZmZmZmZmZm","IHNhbXBsZXI=","IH19Ij4=","LnRoZQ==","IGhvbGxvdw==","V0FZ","IEZhY3VsdHk=","QXR0cmlidXRlZFN0cmluZw==","IExvb2tz","IFJleA==","ams=","IE1JTA==","IGJhcmQ=","Lkxvbmc=","IGxpdmVzdA==","IHNrYWw=","aWNpc20=","TUFJTg==","IG11Y2hv","Qk9EWQ==","IGVzZQ==","CXVzZQ==","Rm9vdA==","LlNRTEV4Y2VwdGlvbg==","IGluaGVyaXRhbmNl","cmVjZWl2ZWQ=","IHB1dGFz","ZWRpcw==","YWxzYQ==","IEVycm9yTWVzc2FnZQ==","Qm9va2luZw==","IHRyYWN0","YWN6","IENhbnQ=","X3JlZ2V4","IGlkZW9sb2dpY2Fs","IGppaGFk","aG9z","L3N5cw==","Y29sbQ==","KHBvb2w=","IGVzdMOhbg==","IFBlbmRpbmc=","ZW3DoXM=","IGt0w7NyeQ==","KSk7CgoK","dHJhbnNhY3Rpb25z","IHdpZWxk","aXRlcmU=","ZXJ0dXJl","X3Nz","IHN0cmV0Y2hpbmc=","IHByaXNvbmVy","LlJlYWRBbGw=","IGJlc2No","LS07DQo=","IGNyaXNw","X1NDQU4=","IGFl","U3RyaWN0","IE1pbm5lYXBvbGlz","IEJvZWluZw==","YXJpcw==","cmVr","X3BpcGU=","IHByaWVzdHM=","KEVJRg==","ZWhpY2xlcw==","IEludGVyYWN0aXZl","YmV0d2Vlbg==","CU51bGxDaGVjaw==","IEJsYWly","IEx0","X2lubGluZQ==","ZXRoeWw=","wrw=","X3BhY2thZ2Vz","IGJhcnJlbHM=","X2hl","IHJlZ2V4cA==","X3B0cw==","X0hhbmRsZXI=","aW5ndWxhcg==","IE5pc3Nhbg==","IFJhbmNo","IHBlcmNo","VW5zdXBwb3J0ZWQ=","U21pdGg=","IExlZ2VuZHM=","TWk=","IGdm","c3RlZGVy","IGFjcXVpcmluZw==","IHNpbXVsYXRvcg==","KCksIg==","cmVjZWl2ZQ==","IGlucGxhY2U=","QUNUSU9O","IFdlYkRyaXZlcg==","ZmlsZXN5c3RlbQ==","PE9yZGVy","bG9wZW4=","IEhFSUdIVA==","LnNldEJvcmRlcg==","jbA=","X19bIg==","IGNsYW1w","U2Vnb2U=","YmFuZHM=","dG9MaXN0","YW1iYQ==","PicrCg==","IGNyZWRpYmxl","YW1hdA==","cGxheWluZw==","LnNldEltYWdlUmVzb3VyY2U=","cXVlbA==","IHBvZHI=","Z2VvbQ==","RWs=","IFFhdGFy","IGdlbGQ=","PycsCg==","IGN5bA==","KGF4","IFdJ","dXJhbGx5","IEJyYXNpbA==","IHNlbnph","YWxleQ==","b25lbg==","IGJhaA==","IG1vbGVjdWxl","UmFk","6L+w","QU5DSA==","LWJhY2tncm91bmQ=","LWFnZW50","IHByb2xpZmVy","OmJvb2xlYW4=","IHRpZGU=","ZXJpYWxpemVy","XzsNCg==","RmVl","Kiop","ZXJneQ==","IEhvbm9y","LkxvZ2dpbmc=","aXJpcw==","IHVuZGVybWluZQ==","IER5","IHR5cg==","IGRlcXVl","IGRhbWVy","KFtdKQo=","LmxheW91dENvbnRyb2xJdGVt","cGVhdGVk","Q0FO","cmFnbWVudHM=","TGFuZA==","KV0pOwo=","IFNhaA==","IERFQ0w=","V2l0aGlu","IE5hbWVzcGFjZQ==","YW5vdGhlcg==","c2VtYmxpbmc=","LmRlc2NyaWJl","Q29uc3Vt","IEZlYXI=","Z2l2ZW4=","T3Jhbmdl","PGJvb2xlYW4=","IHN0ZWFkaWx5","cGFSZXBvc2l0b3J5","IHJlc3VsdFNldA==","X0VOVEVS","X3JlcGVhdA==","IHRvbmVz","IFBST1A=","bmFs","cGFydGljbGU=","IHNpZ25hbGluZw==","IGFjY2Vzc29yeQ==","CQkJCQkJICA=","IHZpZWxl","IE5vYWg=","LWFn","IG11cmRlcnM=","IGFpcmVk","IFBMQVk=","IFN1bGxpdmFu","X0NvcmU=","IHVsb25n","IGJsb2dnaW5n","PlRoaXM=","IGRhdGFJbmRleA==","IHByaW50YWJsZQ==","IEV5ZXM=","X3RhcmdldHM=","KFB5","Lm92ZXI=","IGJydQ==","YW1wdG9u","IHBsYWludGlmZg==","PEtleQ==","YnVsbA==","IOKfqA==","SXNzdWU=","LmNvcm5lclJhZGl1cw==","Q3JpdGljYWw=","X3BoaQ==","LmFuZ2xl","IGR5bmFtaWNhbGx5","ISIpOw0K","Pik7Cg==","aW52ZXN0","LioKCg==","IHTDqWzDqQ==","IHN1cGVyZg==","IGNhc2NhZGU=","RFRE","IHZpdmlk","IHN1YnNpZGllcw==","IEhhc3M=","IGNvbGxhcHM=","IGNlcmFtaWM=","e30iLg==","IExlYWthZ2U=","LXRyYXNo","Y29sbGFwc2Vk","LXNvY2lhbA==","IENoYWQ=","IGluY2xpbmVk","IHN0bw==","IHN0b3J5Ym9hcmQ=","LnBheW1lbnQ=","c3RhY2tvdmVyZmxvdw==","IFJhaWRlcnM=","ICMn","b2xpY2llcw==","7Jy866Gc","ZW1hcA==","IGtq","IHF1b3Rh","IEdhcmRlbnM=","67KI","IEFuZ2Vscw==","IG9mdA==","IGxvd2VyY2FzZQ==","IGlQYXJhbQ==","IGNoZWFwZXN0","dW50YQ==","X3BrdA==","aWNhdG9ycw==","IGxldXJz","IGRlY3JlYXNlcw==","CWRlZmluZQ==","UFJFQw==","YW1tZXJz","IFByZXBhcmVkU3RhdGVtZW50","KGRpcmVjdGlvbg==","IGNyZXdz","YXJrZWQ=","IE1lbXBoaXM=","IFNlbGw=","R1RL","IG1haWQ=","OmRpc2FibGU=","6ZuG","IFBm","IGFsYmVpdA==","b3Blbmg=","Pz4iPgo=","LmdldFNvdXJjZQ==","KHNjYWxl","RHU=","IFBJTA==","X3JlZnJlc2g=","IGJldHM=","KGNhcg==","IFZvbg==","fC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tCg==","IEdyYXQ=","TXVjaA==","KERpYWxvZw==","LnN0b3BQcm9wYWdhdGlvbg==","IHRlaw==","IGV4aXRz","J10sJA==","IHBob25lTnVtYmVy","dWNz","ZWNpbWFs","LS0tLS0tLS0tLS0tLS0=","aW5w","LnBvam8=","IGNvcnB1cw==","IHByYWN0aXRpb25lcnM=","LnBpYw==","InRlc3Rpbmc=","IHN0cmluZ0J5","Lk5vdE51bGw=","IHJhbmc=","LkR5bmFtaWM=","X1JlbmRlcg==","0LDRgtCw","V2FpdGluZw==","IFdpaw==","IG92ZXJ3aGVsbWVk","JSI+","IEFF","fX0+Cg==","dXc=","X3R5cA==","IGJ1Y2tldHM=","IGdyZWV0aW5n","IGxhdWdodGVy","IGFudGFnb24=","dWdnZXN0aW9u","LWVtYWls","CXRvcA==","IGVyb3M=","X3RyaQ==","IGlzc3Vpbmc=","IGjDoQ==","IGlzb2xhdGU=","T3ZlcmZsb3c=","LEU=","IG51dHJpdGlvbmFs","IEFiYm90dA==","IG5m","LnRvdWNo","LmZldGNoYWxs","X3ppcA==","Iil9Cg==","IGFtYXQ=","IENpc2Nv","IG7DpQ==","UExFWA==","IHNlaQ==","Zm90bw==","LnRvSnNvbg==","5aSa","IEtsZWlu","IGxpYmM=","IG1pbmVycw==","5aI=","LXByaW50","IFByaWRl","VG9kb3M=","IG1hc2tlZA==","IHNldERhdGE=","IHRlbGVmb24=","IHVuaGFwcHk=","IFRhYmxlcw==","Z2Vi","KGRlYnVn","X2FsbG93ZWQ=","LWFjY2Vzcw==","IGxvZ2lzdGljcw==","IGdlbXM=","IE1hdHVyZQ==","IHJzcA==","IEFsbGU=","LmdldEJ5dGVz","XHdlYg==","eW5jaHJvbml6ZWQ=","UGFyYWdyYXBo","IHRocm90dGxl","LnNxbGl0ZQ==","Y29uc3VsdGE=","IFNlYWg=","Q2U=","IHN1Ym1hcg==","RVJF","Vm91cw==","IHJlZGRpdA==","IHNxbGFsY2hlbXk=","LW1pbGU=","b2NpZGU=","UG91cg==","fX0iPgo=","c3RlYWQ=","IEAo","IFtdKQ==","IEFkcw==","IG92ZXJsb2Fk","cmlkZGVu","IERlc2VydA==","IFdyYXA=","IFBvcnR1Z3Vlc2U=","ZXR6","CWZpcnN0","IG1pbGVzdG9uZQ==","5peg","0YPRiQ==","KHN1Y2Nlc3M=","PFZlY3Rvcg==","Y29vbA==","IFtdKTsK","ZXJ2YWxz","IGludmVydA==","Imlv","Y3Vyc28=","ZnJhZ21lbnQ=","IGZlYXNpYmxl","LnNldFBvc2l0aW9u","IGVsbQ==","IGltYWdpbg==","QFNwcmluZw==","IGJhdHM=","cHXDqXM=","Z2FsZW1lbnQ=","bnNpYw==","Z2llbmU=","ZWxsYXRpb24=","IEJhaWxleQ==","U2hhcg==","IFR1bA==","IEhL","IGZyZWV6aW5n","Z2xt","Y2VhbnM=","LWN1dA==","X2NpcmNsZQ==","5ZGY","bmVnYXRpdmU=","IGluZGlhbg==","c2FsdA==","IHRpbmc=","CW1vZA==","IHNpbnQ=","YWtpbg==","dW1s","IFRleHRJbnB1dA==","IHBvcHBlZA==","VE1Q","IHBhcmtlZA==","15nX","IEZ1c2lvbg==","IGhlYXRlcg==","RVRG","cm96ZW4=","aGFsbA==","IE1paw==","bGV2YXJk","LWhlYXJ0","CW9yZGVy","TWFraW5n","IHBsZWRnZWQ=","IGRpcnM=","JHBvc3Q=","IEhlcnI=","c3RhbnRpYXRl","LCIK","LmdldENvbG9y","IFNBVA==","IHRpbWVkZWx0YQ==","IE1haQ==","CW1ldGhvZA==","IGlkaW90","IFRyYXY=","aWRlbnRpZmllZA==","IERpdmluZQ==","LmdldFBhdGg=","RGFzaA==","IGluZmlsdHI=","IGhhbmRsZVN1Ym1pdA==","YnJvb2s=","LmdlbmVyaWM=","LnNob3J0Y3V0cw==","Li4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLg==","IGRhdGluZ3M=","IE1W","77u/Iw==","fSIKCg==","IGltcHJpc29ubWVudA==","YXNvbmlj","cm91ZA==","dWNpb24=","5oql","IGRpYWxlY3Q=","IG9uTW91c2U=","Y29uc3RleHBy","LmxhYmVsQ29udHJvbA==","IHdlYWtlcg==","IG1hbmtpbmQ=","IFJFQ0U=","IGRpeg==","IGFwcEJhcg==","IHF1w6k=","ZnJh","X2RlZmF1bHRz","IGFsaXF1","X2F0b20=","OmluZGV4UGF0aA==","IG1pc3Nlcw==","IHZpc3VhbGx5","IEhhbmRz","U1RSVQ==","aWF0ZXM=","X2Fzc2V0","RmluZGVy","bWlkdA==","IHNuYWNrcw==","KF9fKCc=","LnVyaQ==","IEluc3RydW1lbnQ=","dmVuaXI=","KCRfXw==","LkRvdE5ldEJhcg==","IGNvbmZpZ3M=","IGd1ZXNzZWQ=","4KS/4KQ=","IGluaXRpYWxpemVy","ID8iLA==","IFZlcml6b24=","bWFuaWZlc3Q=","Z2ViZW4=","LmRldGFpbHM=","R2F0ZQ==","cG9uc2libGU=","IEVsaW0=","LHN0cg==","IHdyaXRpbmdz","IERlcmVr","IENvb3JkaW5hdG9y","IHBpbGxvdw==","IG5vdGljZWFibGU=","UnM=","IGR1cGxpY2F0ZXM=","ZXJuZWxz","a0o=","Lnp6","b2xsYW5k","IFNFQ1RJT04=","X2ZuYW1l","dWZmbGVk","J10uJzwv","X0NN","IHly","cGxhdA==","b2JvZHk=","bmRl","KEVsZW1lbnQ=","IEF0bGFz","IO+8iA==","IG5pdmVs","IGluc2lzdHM=","W1A=","IGVudGh1c2lhc3Rz","IOyeheugpQ==","IGJldmVyYWdl","e30iLA==","OnJpZ2h0","IG5vdXZlYXU=","IENvbXBsZQ==","IFBhZw==","b3ducw==","IHJlbWVtYmVycw==","IFByYWRlc2g=","IGNoYWxr","IExhdXJlbg==","XFNlcnZpY2U=","X0dFTg==","PiIpCg==","IERvbGxhcg==","IGVtb2pp","Q2Fyb3VzZWw=","LXBsYXllcg==","IGFkanVzdGluZw==","IGp1Z2E=","YWxsZW5nZXM=","Z2VuZQ==","KGJvZHlQYXJzZXI=","bG9wZWRpYQ==","IEJlaGluZA==","IHNsZWV2ZXM=","IGRyYWdnaW5n","IENoZXZyb2xldA==","IGJpeg==","aXZpdGllcw==","IEZyZXF1ZW5jeQ==","LGNoYXI=","LldISVRF","X3ByZXZpZXc=","KSc7Cg==","X2F4","SU9OUw==","LmNwdQ==","LmlucHV0cw==","VUJF","X2ZlZWQ=","IFN1cHBsZW1lbnQ=","ISku","ZXN1cw==","IFVEUA==","IG1pY3JvcGhvbmU=","IGNvbmZpcm1z","LmlzTm90RW1wdHk=","IjoiIiwK","X1NDUkVFTg==","CWV4cGVjdGVk","Ky0rLSstKy0=","IEhhaXQ=","ZmFzdGNhbGw=","IGRlcGljdA==","dmI=","X3BpY3R1cmU=","CWRlc2NyaXB0aW9u","IFdpZmU=","dWNp","IHZpY2lvdXM=","5LuW","dWViYQ==","IHNldFVzZXI=","44Gh","IGRpdmluZw==","IG9wZXJh","dXNlcmNvbnRlbnQ=","YXJhaA==","KX0s","eXVu","dmVsdA==","IHVuY292ZXJlZA==","IGhpcHM=","IG9zY2lsbA==","IGFzc2VydGluZw==","IFhp","LnJlc3RvcmU=","a2Vh","IHNwZWxsaW5n","IGRlcml2ZQ==","YWJ3ZQ==","IERvdw==","LnNldFR5cGU=","X3Zz","IGNvenk=","LmNhdGVnb3JpZXM=","T3Jn","X21ncg==","IGR1bmdlb24=","Y29sbGVjdGlvblZpZXc=","IEJsYW5r","YWNpYXM=","w6TDpA==","X2NsZWFudXA=","X0FDVElWSVRZ","IHRyaWFuZ2xlcw==","Lk1lbnVJdGVt","IGlwaG9uZQ==","IFdvbg==","XV0KCg==","IENvbXBhcmlzb24=","LkRvYw==","IGNhbm9uaWNhbA==","IFN1ZGFu","Jyl7","VXBJbnNpZGU=","YnVpbHRpbg==","RU5DWQ==","eGJl","IGNodWNr","IGNvbnRyYWRpY3Q=","IG51ZXN0cm8=","IGFyY2hpdGVjdHVyYWw=","IEZpYg==","IGNvbXBhcmVz","Kms=","Q2Zn","54Sh","bnRlbg==","TWF0Y2hlcw==","IERPV05MT0FE","X0hBTkRMRVI=","bWFuYWdlbWVudA==","W1M=","RU5H","woDC","ZmFuZw==","IHNsaXBwZWQ=","IExhbmth","ZXNjYXBpbmc=","IHRhY2tsZXM=","IFBlZHJv","LlByb3A=","Licn","LkdlbmVyYXRlZA==","Lk5ld0d1aWQ=","YXRyaWdlc2ltYWw=","aWxsb24=","IHN0YXRpc3RpYw==","c3BlY2llcw==","aG9sZGluZw==","RHJ1cGFs","IGZ1bmRhbWVudGFsbHk=","IGJvbmRhZ2U=","IHJlc29sdXRpb25z","SW5saW5lRGF0YQ==","XFR5cGU=","ZXN0aW9u","LndyYXA=","IHdhcnJpb3Jz","IExPQ0FM","QXJjaGl2ZQ==","IGVtYnJhY2Vk","4bun","LlZlcg==","IEFmZm9yZGFibGU=","b2xlc2FsZQ==","IEFwcGxpZWQ=","IENvbnZlcnNpb24=","bWVnYQ==","X2NhbQ==","IGNlcmVtb24=","YXVydXM=","IFZvbGs=","Lm9wZW5z","L2Fib3V0","IFN0ZA==","am91cm5hbA==","KCkpew0K","LCJc","KEFycmF5cw==","IERlbnNl","YXNlw7Fh","w6RubmVy","L3N0YXQ=","dXNlckRhdGE=","IGdlcm1hbg==","IHR6","d29ydGh5","Rm9ybWF0RXhjZXB0aW9u","cGhlcmQ=","IHNtaWxlcw==","IFdoZW5ldmVy","KGFkYXB0ZXI=","LmJhZGxvZ2lj","IGJyaWVmaW5n","LkdyaWRDb2x1bW4=","LWNoYXI=","ZGltZW5zaW9u","IENvcHBlcg==","IG5pbnRo","ICd7ew==","IHJhdg==","X1RhYmxl","IGRlcml2YXRpdmVz","IFJhaXNl","IEZ1dA==","YXJtb3I=","LXBhZGRpbmc=","IHJlbWlu","CXN0eWxl","IE1lbWJlcnNoaXA=","IHNwcmVhZHM=","IGdhbGxlcmllcw==","IENsYXJrZQ==","IGNvbmNlcHRpb24=","bWludXRl","IGFidXNpdmU=","X2Fkag==","IHRlcnJpZmlj","IG92ZXJ0","b3VyY2luZw==","IGVudHJhZGE=","bGV2ZWxz","IGNyaXRpcXVl","IHJlc3BlY3Rz","IE1NQQ==","aWVuZQ==","IGVuY2Fwcw==","IFJheW1vbmQ=","RGl2aWRlcg==","aXZhYmxl","YmF6","IEBfOwo=","IENsYWlyZQ==","IHVyZ2luZw==","Q0VF","IHRyYW5zZm9ybWVy","ZGlzY29yZA==","IEpvdXJuZXk=","dG9z","IGNvbXBldGl0aW9ucw==","IE9CSg==","IEJpcw==","IHJlbGF4YXRpb24=","aWR5","X0lOU1RBTkNF","IFByZWY=","ZGFkb3M=","aWNpZW5jaWVz","IE1lZGlhUXVlcnk=","IEN1YmU=","IFN0cmFuZ2U=","Z3B1","KGRheXM=","X0luaXRTdHJ1Y3Q=","IGZpbmdlcnByaW50","ZW1hdA==","IEdlY2tv","IHJhaWxz","IEx1bQ==","c3RyYWN0aW9u","aWd1bmc=","KG1vdmll","X2RpY3Rpb25hcnk=","X2ludGVycnVwdA==","IFFD","aWtlZA==","YXBwZW5kQ2hpbGQ=","cmVjaXBpZW50","csOp","VmU=","IHRvd2Vs","Lmxhc3RJbmRleE9m","IHBsYWNlYm8=","IFdpZQ==","LmVzcA==","KERlYnVn","b3BlcmF0aXZl","IGRlY2Vhc2Vk","Jmlk","CW11dGV4","ZWxpYw==","IGJhcHQ=","CQ0KDQo=","IGZhcnRoZXI=","SGFsZg==","LmRpc2FibGU=","Lm1lbnVTdHJpcA==","bGVjY2lvbg==","IHJlc3VsdENvZGU=","IGNhbnM=","LWVsZWN0aW9u","ZmVtYWxl","X0ZJWA==","YXVzaWJsZQ==","IFBPV0VS","IHJlY29uc3RydWN0aW9u","IHNjYW5z","Llh0cmFCYXJz","4oCYcw==","UmVtb3ZlZA==","IHBhcmFncmFwaHM=","X21hcmdpbg==","IGx5bXBo","IGJvcw==","bGluZ3Rvbg==","IEJhcHRpc3Q=","IGFkdmVydGlzZW1lbnRz","IE1hbmFnZQ==","L3l5eXk=","SU9VUw==","RU5DRVM=","IEZpY3Rpb24=","CW1lbnU=","IEZpbGVPdXRwdXRTdHJlYW0=","b3Zhbg==","IEZlbmc=","IHNraXBwaW5n","Z2V0Q2xhc3M=","YW5uaQ==","IHJlYm91bmRz","IHB1YmxpY2l0eQ==","IGluZ3Jlcw==","dXNlbWVudA==","IHRob3VnaHRmdWw=","LkNoYXJ0","IGhhdHRl","cGFzc3BvcnQ=","IGhvb2tlZA==","IExlbnM=","IGZsYWdzaGlw","IHN0aXA=","IEdFTg==","IGNsdWVz","aXB2","IFJpc2U=","IEdldw==","dGFibGVuYW1l","IGZvcmVtb3N0","X3ZhbGlkYXRl","X2FuYWx5c2lz","b2xsYQ==","IHF1YWxpZmljYXRpb25z","IGRpc3RyaWJ1dGlvbnM=","IEZsb3dlcg==","IHRlbnNl","IHRoYW5rZnVs","IGNsdXRjaA==","IHVuaWZpZWQ=","cm9hZHM=","IHNpdGk=","IHN0YWxs","X1BSSU9SSVRZ","Y3N0ZGxpYg==","X1VTRVJOQU1F","LmJ5dGVz","P3BhZ2U=","ZXJtYWxpbms=","IFZlZ2V0","L3ZuZA==","LWF1dGhvcg==","Lk5PTkU=","IENvbmN1cnJlbnQ=","IENyeQ==","IHN0YXJ0ZXJz","IEludGVyYWN0aW9u","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg","IExFVkVM","RWxs","IGNvbWJvQm94","IFRoZXJlc2E=","dGVr","X0hhbmRsZQ==","IGFieQ==","LmdkeA==","LGVuZA==","KExvY2Fs","T2w=","a25pZmU=","YXJpYWw=","IEhvZmY=","IHByb3N0aXR1ZXJhZGU=","RG9jdG9y","SW5zdGFuY2Vz","LlNldFZhbHVl","CWZyb20=","IGx1eHVyaW91cw==","SW5kZW50","QWxsb2NhdG9y","X0RSQVc=","KCIsIiw=","IEZyYW5jZXM=","IGdyb3VwQm94","KHNjaGVtYQ==","UHJpbnRm","T1JJRVM=","LWdyYWRpZW50","IHJlcHV0","YXJpbg==","X0RPTkU=","aW5jcmU=","aWdudHk=","IGV4ZXJ0","IC0u","L0FwcA==","LXRocm91Z2g=","IGRlY2xpbmluZw==","IGRlc3NlcnQ=","IGluY3VtYg==","IGRlc2lnbmF0aW9u","LlBPUlQ=","LHN0cm9uZw==","IHNhbmRib3g=","IHdpbmVz","IFBhdg==","JHN0cg==","YXNrZWxs","IGjDtg==","IFBZ","R2V0SW5zdGFuY2U=","VGV4dElucHV0","Z2FtZU9iamVjdA==","L2V2ZW50cw==","Y3JlYXRlZEF0","IGxvY2FsVmFy","IFdISVRF","cGVyZWQ=","aWxlZ2U=","ZWZmaWNpZW50","LGNvbG9y","Y2F0ZQ==","IENhZmU=","IHNpbWlsYXJpdGllcw==","IHB1bXBz","IEh1bmdhcnk=","LlVzZXJuYW1l","IHNrYXRl","IHRvdWNoZG93bnM=","IGFjY2VsZXJhdGU=","IEhlbGVu","T01FTQ==","IEt1bg==","X3ZvbA==","IGZpbmRBbGw=","IE1lbnNjaGVu","YWhlYWQ=","KTsi","a29tbWVu","IHBvc3Nlc3NlZA==","LmFyZ21heA==","LnRyYW5zaXRpb24=","QVJQ","T0xVTUU=","KHNjcmlwdA==","INCY","IEZpbmRpbmc=","b25jZXM=","SW8=","Qm9sZA==","IHJlbmV3YWw=","X0RJQUxPRw==","IGRpc3JlZw==","SU5URVJO","IHRvdXRl","IGVsZWN0cg==","IEdyb3Nz","CXRydWU=","LkZpZWxkcw==","IFdJRFRI","IERlbnQ=","IMOB","TlNOb3RpZmljYXRpb24=","IGFvcw==","IG1lbGVl","LlZhbGlkYXRpb24=","IERFQw==","LWRlcGVuZGVudA==","IHN1aWM=","VHJhaXRz","JG1lc3NhZ2U=","IERlYXI=","CUZJTEU=","bGFuZ3VhZ2Vz","LlByb3Q=","LmFkZHI=","LWdlbmVyYXRpb24=","SUNPTg==","IHRyYW5zcGxhbnQ=","LWRlc2NyaXB0aW9u","IGNoYXNpbmc=","IGNoZWVz","IH0qLwo=","VHJhZA==","cXVlcmllcw==","L3dpZGdldHM=","c3VicGFja2FnZQ==","IGVzcGVj","IGNyYWNrZWQ=","IGNvbXBldGl0b3I=","UHVyY2hhc2U=","LXRlYW0=","b2xlY3VsYXI=","b3JUaHVuaw==","JlA=","IHJlbGVudA==","LyN7","IHByb2R1Y3RJZA==","IOi+","IExhdg==","IEFsdGVy","Lk1vZGU=","QURJTw==","Z3Jw","5re75Yqg","UXVpdA==","IGRlcHRocw==","LWNhdGVnb3J5","IERBVEFCQVNF","U1BFTEw=","IEZhbGNvbg==","IFFTdHJpbmdMaXN0","ICcnLg==","IEluc3RpdHV0aW9u","ZGFtYWdl","YXpvcg==","YmVsb25nc1Rv","dmVyYWdlcw==","IE5PTkU=","aXBwZXRz","LFwK","IGZvb3RwcmludA==","X2FyY2hpdmU=","bmFr","LmdldEZpZWxk","IFJlZmxlY3Rpb24=","ICdd","IEhCTw==","X2Rpc2NvdW50","IGluY2VzdA==","IERvZGdl","IFdhZGU=","Lk5P","ImVuY29kaW5n","IEJsb2NrY2hhaW4=","IGxhd3N1aXRz","IE1haW50","Y2h0ZW4=","IMOpdGFpdA==","IGt0w7NyZQ==","X2N0bA==","KHRpbWVy","QmF0dGxl","aXpv","YXllZA==","SU9S","IEdsYXNnb3c=","IHN5bnRo","X2xvZ3M=","LnBvc2U=","X0FkanVzdG9yVGh1bms=","KCgm","IHVuc3VyZQ==","eXN0YXRl","7ZWY64qU","T1VMRA==","Lm5n","IGRlZmF1bHRkaWN0","d29ya3NwYWNl","IHNlbGVjdGl2ZQ==","UGlja2VyQ29udHJvbGxlcg==","WU5BTUlD","Lm1ldGhvZHM=","IHBhdGh3YXlz","IEZldw==","S0c=","Q1JZUFQ=","Zm9sbG93aW5n","IERMQw==","IFNhcmE=","IHByZXNldA==","ZXN0cnVjdG9y","IEt1cnQ=","IGFpcnBsYW5l","IG9tcA==","IFBhcmVudHM=","IE1hcnRpbmV6","LmNvbXBsZXRl","IGJyb2FkbHk=","IHNjYXJl","IE3DqQ==","IGVsaW1pbmF0aW9u","IHBvdXJlZA==","L3N3","IGNvbXVu","IG1hc2M=","IE9yZ2FuaWM=","IFN0cmluZ1V0aWxz","aWxhdGVyYWw=","IHJlbHVjdGFudA==","LWFnZQ==","IG56","LiJc","IHBhc3Rvcg==","YWxleg==","IGVmZWN0","cHJvdg==","L2luaXQ=","IHBlbm4=","dW5kcw==","IHNzaXpl","IFByb2o=","YmFzZW5hbWU=","IHNoZWxscw==","IE5lY2s=","IEVuZm9yY2VtZW50","dmlkZWQ=","c3Rvd24=","U3BoZXJl","JHI=","dXNzZW4=","YWZpbA==","IFRlbGVncmFt","IGFuYWx5dGljYWw=","0L3Ri9C1","dXN1YWxseQ==","eG4=","IGhpc3Rvcmlhbg==","IEdyZWdvcnk=","b2xwaA==","IFVuYQ==","IGNvbnRyaWJ1dGVz","JS0=","YW50aWFnbw==","0YDQtdC0","LnJlZ2lvbg==","IGFicnVwdA==","IFVuc3VwcG9ydGVkT3BlcmF0aW9uRXhjZXB0aW9u","IFRBU0s=","X2ZpbmlzaA==","IG5vdG9yaW91cw==","IFZz","IE1R","IHN1bnNldA==","IHVuYWNjZXB0YWJsZQ==","YXJjZXI=","IGlsbHVtaW4=","IE9yYg==","IGJo","RXN0ZQ==","X2Rpc3BhdGNo","IHJpcHBlZA==","IHRvdWpvdXJz","IFBhcmNlbA==","X2xs","LnVzZXJOYW1l","LmNsYXNzZXM=","U09VUkNF","KE51bWJlcg==","0LXQu9GP","IGhlYWRwaG9uZXM=","KHNpZGU=","Y29uc3RpdHV0aW9u","YW5uYWg=","DQogICAgICAgIA0K","IGNsaWZm","LXJlZg==","IG1vc3RyYXI=","IFBvd2VsbA==","K3k=","IEJH","X2ZyYWdtZW50","LlBvcnQ=","IHJlYWxpemluZw==","cGFyYW1yZWY=","IGhvbWV0b3du","QFRhYmxl","KyI8Lw==","b21pZA==","IGR1Zw==","CWJ0bg==","IHN1YmplY3RpdmU=","L2Jyb3dzZXI=","IHVzaG9ydA==","IE1vbnRnb21lcnk=","LXJhdGU=","CXB1dHM=","bGV0aWNz","b3Jucw==","4oCcV2hhdA==","ZWVwZXI=","LkludmFyaWFudA==","IGNvbmNlYWxlZA==","X251bXB5","PT09PT09PT09","KHBz","TG9jYXRpb25z","LmFzdHlwZQ==","IENIQU5HRQ==","Lk9yZGVyQnk=","O2hlaWdodA==","IGdlbnRl","IGdydW50","IFBsYW5l","IHNhZGx5","IExvZ2Fu","X3VzZWM=","LmRndg==","IHNpbmNlcg==","IHBu","CWd0aw==","IGluc3RhbGxlcg==","IGRpc3BsYWNlbWVudA==","IGJ1cm5z","0YPRgQ==","aXZlcmVk","Ol0pCg==","c2VhdA==","YW5pbmc=","fSkKCgo=","X3JvbGVz","YXRpY2Fu","IGdlbmVyYXRvcnM=","IGh1cnRz","IHNuaXBwZXQ=","IGdzb24=","IHNlZ3JlZw==","IGRpc3RyaWJ1dG9y","IGFkdmFuY2luZw==","cG9zdGdyZXM=","IHVzcg==","IExpcw==","LmFzc2VydElz","X2Nk","IGh5ZHJhdWxpYw==","LmNvdW50ZXI=","IEluZGVwZW5kZW5jZQ==","IGRpZmbDqQ==","VW5saWtl","IHRvbWI=","dmlr","cG9zdGVk","d2Y=","IGRlc2NlbmRpbmc=","ZHlu","YW1lbnRhbA==","IEZydWl0","IFlv","LmRvdWJsZQ==","IElB","aWV2","aWJyYXRl","IFJlbGlnaW9u","TWFueVRvT25l","LVRh","IGJhbmFuYQ==","IEF2ZW5nZXJz","IEhvbG9jYXVzdA==","IGdldEM=","IGNvbmRv","IEdvdGhpYw==","IHByb3NwZXJpdHk=","VFJBTlM=","IGRvZXNudA==","IENoYW9z","SVRU","IENVUlJFTlQ=","XGhlbHBlcnM=","X1NBVkU=","YXZpdA==","Y29tcHV0ZXI=","X3NoZWV0","IEJyZXdpbmc=","IHJvYmJlcnk=","IOqyvQ==","INC60L7QvA==","IG7DpA==","LnJlZ2V4","IGRpc3J1cHRpb24=","IFNpbXVsYXRpb24=","YXBpZA==","IHN1cHJlbWU=","zrw=","IGNvbW1pc3Npb25lZA==","IGFic29ycHRpb24=","IE5ld2Nhc3RsZQ==","CWNvbnN0cnVjdG9y","VGVybXM=","IHJpdg==","IHJlbGlnaW9ucw==","V2l0aFRhZw==","Lkh0bWw=","bGlua2Vk","Q29tcG91bmQ=","IE1hbnM=","IGxha2Vz","aXp6bGU=","LnNldFNpemU=","YWJlcg==","IE5lZWRz","cGFja2FnZXM=","LlRhYlBhZ2U=","IHJlZnM=","IGlvdXRpbA==","IERvaW5n","ICJcKA==","IHBoZW5vbWVuYQ==","LkdldEludA==","QUxUSA==","IHBhcmxpYW1lbnRhcnk=","IHJlZnVzYWw=","IGluZXhwZW5zaXZl","IH0KCgoKCg==","IHNvbGlkYXJpdHk=","CXB1c2g=","aGF1bA==","IEJlcmU=","U2l6ZXI=","SW5kaXZpZHVhbA==","IGFuY2U=","IGRpbGU=","IFBlYWs=","KGhy","RWRpdGluZ0NvbnRyb2xsZXI=","SE4=","X1BFUklPRA==","RVRT","QmFubmVy","ZXJyb3JNZXNzYWdl","LkNBU0NBREU=","LWlnbm9yZQ==","IFNJR04=","IE9C","X2Rk","KERFRkFVTFQ=","IHNvbw==","IFZpY3Rvcmlhbg==","IGN1cnQ=","IGRpc2NyZXRl","cnlsaWM=","aW1iYWJ3ZQ==","LnRvRml4ZWQ=","bMOk","LnN0ZGlu","IHF0eQ==","Uk9MTEVS","bWVkaWF0ZWx5","IHBsdW1iaW5n","IFByb3BlcnR5Q2hhbmdlZA==","YXJyYW50eQ==","IEJyZWFrZmFzdA==","LnNldEhlYWRlcg==","LnB5dGhvbg==","Y29tbWVyY2U=","b3BlbmN2","Pi0tfX0K","RnJlbmNo","RW50aXR5TWFuYWdlcg==","IFBsYWlu","Ly8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8=","wrM=","KFJF","Y2FwdA==","IG9yZ2FuaXNtcw==","IGpldHM=","b2xvY2F0aW9u","IEFwcFJvdXRpbmdNb2R1bGU=","IGdsb3Jpb3Vz","5pyN","IGRpc2NhcmRlZA==","CQkJCSAgICAg","IEFybm9sZA==","bHVn","IHBhcmw=","IGhvcm1vbmVz","IG1haA==","IFNvbmlj","IG9yZ2FuaXplcnM=","X1BMQVRGT1JN","Lmludg==","IGNob3Jk","dmVudGlvbmFs","CW9m","RXBpc29kZQ==","LkVudW0=","dW5rdA==","IERo","IEphcmVk","IE5haw==","IGludGVuZHM=","RW5kaWFu","IGF1c3RyYWxpYQ==","X2N2","KHJlc29sdmU=","IGNsaW5pY3M=","bGlrZWQ=","QVNISU5HVE9O","aW5oYQ==","Jyo=","IE5Q","X2JlaA==","IGhm","IHfDvHI=","Y2F0ZWdvcmlh","JGZvcm0=","IHN1YndheQ==","IGlzQWN0aXZl","cG9wdWxhcg==","Q291cg==","IGNvb2xkb3du","IGFpbnNp","IEdMdWludA==","ZXJlYWw=","IGFycmF5T2Y=","IGhhdGNo","PT09PT09PT09PQ==","cmVzc2Vz","X1BQ","Ll4=","X2RlY2F5","IEJsZXNz","bWV0cmljcw==","IENPUFlJTkc=","IER1bXBzdGVy","IEpvc8Op","IERlc2lnbnM=","PFZvaWQ=","57q/","ID8+PA==","ICJ9Cg==","dGltZXpvbmU=","IGVlcg==","bWF4Y2Ru","IEVTQw==","aWdhcmV0","X2Nvbm5lY3RlZA==","X3JldmVyc2U=","IHF1ZXN0aW9uYWJsZQ==","IFVTQw==","IHR1dHRp","IGRyb3BvdXQ=","IEFjdGl2aXRpZXM=","IFdpbmRz","JykpKTsK","IGNvbmdlc3Q=","xJ/EsQ==","IHByb2xvbmdlZA==","6L+Z","IENyb3NzQXhpc0FsaWdubWVudA==","TEVFUA==","IFZBTElE","IEdheg==","IGRlcGVuZGVuY2U=","IFByaXg=","LkNvbXBpbGVyU2VydmljZXM=","anVtcA==","IHN0cmF0","Y2lyYw==","IENVU1RPTQ==","eGFh","IGJtcA==","IGJ1cmVhdQ==","IHdhcmVu","Tlg=","KFdpbmRvdw==","IENocmlzdGll","X0ZF","IHRu","IE9tZWdh","Y29tbXVuaWNhdGlvbnM=","SG9tZVBhZ2U=","Y29tcGxldGlvbg==","IHN1cHBseWluZw==","WVBFUw==","w6F2ZWw=","5Yi2","KGNsaWNr","XENvbnRyYWN0cw==","L3F1ZXN0aW9ucw==","IGV6","QU1T","Lm1lc2g=","ICc8Pw==","asOg","SW5p","LiM=","IENhcmRpbmFscw==","cGNpw7Nu","Q3ViZQ==","IFBhdGllbnRz","X3ByZWY=","QWN0aW9uQnV0dG9u","KGJ1aWxk","IFZpc2E=","b3ZlbA==","KEFycmF5TGlzdA==","SWdu","IHJlaGFiaWxpdGF0aW9u","IHBhbGFjZQ==","IHNwZWVjaGVz","fScK","SHR0cFJlc3BvbnNl","CWNvZGU=","RHVtbXk=","IGFjYWRlbXk=","Lm1vdmll","IGluY29ycmVjdGx5","IGN5Yw==","KFVuaXR5RW5naW5l","CWNhbGxiYWNr","IFNhdGFu","IEZVTkM=","IGNoYW50","IEhlYWx0aHk=","OicsCg==","U2hpcHBpbmc=","X21j","IER5bGFu","IFByb2R1Y2Vy","IHJlc3B1ZXN0YQ==","IHBvbGlzaGVk","QnJvYWRjYXN0","IGJhbGFuY2luZw==","IFNsaWRl","IENhcHM=","c3RpbGw=","IGhhcHBpZXI=","IEdvc3BlbA==","dHJhbg==","LnBhdGhuYW1l","QWN0aXZlU2hlZXQ=","IENoYW5n","PlwK","Um9ib3Q=","SnNvbk9iamVjdA==","IERG","IFByb2Nlc3Nvcg==","X3Nob3VsZA==","LnByb3RvYnVm","LXVzZXJz","IGVtYnJ5","Rk9OVA==","IHN0YXJ0dXBz","IERhdGFTb3VyY2U=","KSM=","dXJvcw==","X0NvbG9y","IHN0YW5kYWxvbmU=","fVs=","amQ=","IGZvcmdpdmU=","IG5neA==","IEdlbmVyYWxseQ==","IGNvbmZpZ3VyYWJsZQ==","L29yZGVy","IHZhcw==","JykiOwo=","IFJS","IFRyb3k=","IGNvbXByb21pc2Vk","IFN3YW4=","aW50ZW5kZW50","Q2VudHJhbA==","X2tlZXBlcg==","IGFycXVpdm8=","IFJlYWRPbmx5","X2N1cnZl","a3Y=","ZW50aW4=","6LE=","IEV5","LmltcmVhZA==","IFBhbQ==","aWZmZQ==","YXRpdml0eQ==","eGJj","IGdyaW0=","LWZpbGxlZA==","bmFtZXNl","J106","IGF1cg==","IEdpYnNvbg==","Lk1vdXNlRXZlbnQ=","IGxhZG8=","YXZhZG9j","IGZhbWls","IE1vZGVy","ZnBz","44CA44CA","LWV4YW1wbGU=","IEFsemhlaW1lcg==","IFV0Zg==","X2FyZ3VtZW50cw==","Q29uY2x1c2lvbg==","dGV4dENvbnRlbnQ=","cmVtYWluaW5n","IGludGVycnVwdHM=","IEJhY2t1cA==","IE1vbmc=","IHJlY2VwdG9ycw==","aGlzdG9y","LmNvcm91dGluZXM=","IHNob3V0ZWQ=","QWxhcm0=","IGNvbWJ1c3Q=","IGdyb3Rl","dWx0dXJhbA==","KGlkcw==","LS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0=","aXBsaW5hcnk=","T3B0cw==","IFlhbGU=","bG9jYWxTdG9yYWdl","IGVxdWl2YWw=","IEZsZWV0","XGI=","KnBp","IFFMYWJlbA==","5qE=","IHZ4","IEFDTA==","IHN1Y2Vzc28=","IHBlcmM=","IE5vdHJl","IGFuYXJjaA==","UmluZw==","c3Bi","IHN0cnBvcw==","c3RvcmVz","IE1hcGxl","KE1haW5BY3Rpdml0eQ==","KCIiKSk=","IHZpZXdIb2xkZXI=","UXVhZA==","IGlndWFs","b3JzY2hl","Lm1hcmdpbg==","IGluZGll","IGZyYW5j","IEZvcm1CdWlsZGVy","IFBhcnRpY2lw","LmZsYXNo","IHN0b3Jtcw==","VWx0","IGZlbg==","W25ldw==","RXZlcg==","PSIK","IGxvY2FsaXplZA==","X2ZvbGxvdw==","IG5hdmU=","IGRvbWluYW5jZQ==","KHRpbGU=","Sm91cm5hbA==","IFZD","IHBlbmV0cmF0aW9u","77yV","IGNvbXBhcnRtZW50","IGJpZHM=","Rm9ybWF0dGVk","KioqKioqLwoK","KGNpdHk=","4oCUaXQ=","W0M=","IHVzZUNhbGxiYWNr","YXVi","KT8u","IFZBUg==","IFNlYmFzdGlhbg==","IE1vc3M=","IGFidW5kYW50","R3JlZw==","0YLQsA==","X2Np","IGJpYmxp","Q1JN","IEF0dGVtcHQ=","aXNtZQ==","ZGFzaA==","44CO","X211","LkZvcm1hdHRpbmdFbmFibGVk","SW5kZWVk","LWRpcmVjdA==","IHN1Y2tpbmc=","IHBuZQ==","b2NhYnVsYXJ5","IFBhY2tlcnM=","Lk5hdmlnYXRpb24=","IHBpZWQ=","Y3JpYmluZw==","IFN0dWFydA==","LlRvRG91Ymxl","IFNlY29uZGFyeQ==","U2F2aW5n","IER1dA==","IE1hZGQ=","TWFnaWM=","LEg=","LmRvY3VtZW50RWxlbWVudA==","IEJTVA==","IGRpZmZlcnM=","IG1vcmVvdmVy","X25k","U0VBUkNI","0L/RgNCw0LI=","5rQ=","dG9NYXRjaA==","IGRlY3JlYXNpbmc=","LW1lbWJlcg==","YW1wdXM=","KGJvb3N0","RGFpbHk=","RGF0YUdyaWRWaWV3","IEh0dHBDb250ZXh0","IGhpcHA=","X3dvcmtlcnM=","LWxhbmd1YWdl","6ZM=","IGNvbnNpc3RlZA==","YXRoaW5n","IE1lcmN1cnk=","JGNvbnRlbnQ=","IHByYWN0aWNlZA==","IE1vZHVsZXM=","X0RBWQ==","IHdlYWtuZXNzZXM=","IExvZGdl","IG5hcg==","IE1hdGU=","IGpw","IEh0dHBIZWFkZXJz","IHNtbw==","IFRPS0VO","XSko","IGFxdWk=","c3dhZ2Vu","IHNydg==","CWFucw==","QXJvdW5k","IE1hbnVlbA==","IGZpY3Rpb25hbA==","IElNRw==","IC4n","IEJlcnJ5","IHdhbGxwYXBlcg==","c2V4dWFs","aWVybw==","IOeahA==","7IaM","QmFja2luZ0ZpZWxk","IEFkcmlhbg==","QkFTRVBBVEg=","IHJlcGVhdHM=","IGJsdWVz","IHVucHJlZGljdA==","X2NvbGw=","c3RhY2xl","IFR1bWJscg==","IEVsZg==","IGFzc3VyYW5jZQ==","IGNlbnN1cw==","IElNUE9SVA==","RU5ERVI=","YW5vcw==","ID0o","IEVsbGlz","IgoKCgo=","Lndpbg==","IEFib3Zl","YWxvbg==","X3RpY2s=","IHJlcHJlc2VudGF0aW9ucw==","IOaV","d2lk","IEFybXM=","TGlzdGE=","X2ZhaWx1cmU=","X2Nt","LkZsYXRBcHBlYXJhbmNl","IHRocm9uZQ==","UGF0Y2g=","IFZveQ==","ZW5nbA==","IG5lZ290aWF0aW5n","PmA=","IHNob290cw==","IEZQUw==","LlllYXI=","IEtpc3M=","ZW5jacOzbg==","cmVldGluZw==","RnJvbUZpbGU=","IHJlc2lnbmF0aW9u","2Lc=","IHR3aW5z","xrDhu6M=","IGdlYnJ1","LmdldENvbnRlbnQ=","LlRyZWU=","IEVtcGxveWVlcw==","IEZJRkE=","IGNlcnRhaW50eQ==","KENs","IHRvdGFscw==","ZWRpdGFibGU=","4KWA","LlJlcG9ydGluZw==","TWFz","cXVpZXQ=","LnJ1bGVz","IFZP","Y29uZXhpb24=","LEs=","IGFsbG9jYXRvcg==","IFBvd2Rlcg==","XFJlcG9zaXRvcnk=","QmVhdA==","X3RpcG8=","IFsnJyw=","X0lOVFI=","IDw8PA==","PGhy","Iik9PQ==","dWdnYWdl","IENyYXc=","IMOpZ2FsZW1lbnQ=","IGdpbmdlcg==","IHByaW1lcmE=","IHByb2R1dG8=","bHRr","LlVzZXJOYW1l","IHN0cmVycm9y","bWl0aA==","X25i","IGRpc2NvbWZvcnQ=","J107Pz48Lw==","UVQ=","IGVydXB0","IERhbmlzaA==","XEFjdGl2ZQ==","X2FkYXB0ZXI=","IGJ1YmJsZXM=","cm9sbG8=","b3Jnb3Q=","0L3Ri9GF","VkVDVE9S","b2NvZGU=","IEJ1bGxz","IGJvaWw=","PiIpOw0K","ZHJvcElmRXhpc3Rz","IEJlZw==","X0hBTA==","IGNyb3NzQXhpc0FsaWdubWVudA==","IEV2aWRlbmNl","IHBlY3VsaWFy","IGluc3RpdHV0ZQ==","dmVpcw==","IGZmdA==","w4E=","IHpvZWt0","YW5hbHk=","IEhvbWVsYW5k","IHBlbmV0cg==","dWRkZW5seQ==","CWVsZW1lbnQ=","IEJyZW4=","IFRydWRlYXU=","IEN1YmFu","amFt","dXNsaW0=","X2V2","IHN0ZW1z","fSU=","neWniw==","IGJyYW5kaW5n","IGNvcnJlc3BvbmRlbmNl","LmpxdWVyeQ==","ouWNlQ==","IFJlYWRz","KEh0dHBTdGF0dXNDb2Rl","YXNzaW4=","KHNsb3Q=","IEdyYWR1YXRl","Ly8vPA==","IGluZm9ybWF0aW9ucw==","RU5BQkxF","IHB1aXM=","IGZpbmRlcg==","IEJyaXM=","IG5ldHRzdGVkZXI=","X21pZA==","IG9ncw==","IFN0ZXJsaW5n","IGFycm9n","c3RyZnRpbWU=","fAoK","IHZveA==","IFJlZ2FyZGxlc3M=","IGVzbw==","IENvbWZvcnQ=","LkJvb2xlYW5GaWVsZA==","IHVo","QUNZ","IHNxdWVleg==","IFZpYw==","Y29udHJv","Lmxv","IGlyZQ==","IENvbWVkeQ==","67Y=","IG9yaWdpbmF0ZWQ=","IHNoaXBtZW50","fG1heA==","X2d1aWQ=","bGV2YXRpb24=","0L3QsNGP","KHVuZGVmaW5lZA==","IEREUg==","IHNob290aW5ncw==","IExhdGlubw==","RU5ET1I=","IGF2ZXJhZ2luZw==","IGdyZWV0ZWQ=","IHRoZWF0ZXJz","0L7QtQ==","IGRC","IGdzdA==","IGRlZmluaXRl","LlN0b3JhZ2U=","Lmhlcg==","IGFmb3Jl","IFJlYWxpdHk=","IEdvZHM=","dmVyc2Vk","IGhhbmRzb21l","IGV4Y2x1ZGluZw==","KGFk","UXVvdGVz","IFNjaGVtZQ==","P3E=","IFRhbWls","VGlja3M=","IHBlc3Q=","J24=","IHBvcm5vZ3JhcGh5","X21vZGFs","IC0tLS0tLS0tLS0=","IGRpc3Bvc2FibGU=","RlJFRQ==","IHNoYXJr","Q0hF","IGRlcGljdGVk","IGRlbW9uc3RyYXRpb25z","IEtpbGxlZA==","IFJVTEU=","IG9ic2Vzc2Vk","IHNpbXBsaWZpZWQ=","UG9zdGFs","IGNvbmNlcHR1YWw=","IHBzdA==","TGFz","X1BST0pFQ1Q=","dWNjZWVkZWQ=","b2x1","xJ9p","IHBlcnNvbmFsaXRpZXM=","IHJlc2hhcGU=","IGVuY2xvc2Vk","CXB0cg==","IHR1dG9yaWFscw==","IGV4cGxvZGVk","X0RJUkVDVE9SWQ==","5YaF5a65","IGNhbm9u","IHJlY29nbmlzZQ==","UEFE","IEFwcHJveA==","IFJlc3RvcmU=","IEltcG9ydGFudA==","IGhlYXZpZXI=","LlNlcXVlbnRpYWw=","RWFydGg=","IE1pbGs=","LnNldFJlcXVlc3Q=","LnRlbQ==","IHJlY29uc3RydWN0","IHNrZXB0aWNhbA==","X1ByaXZhdGU=","QlVG","cXVh","OmE=","IHNlaw==","IGR3ZWxs","b3NzYQ==","IHJld2FyZGVk","0LjQuQ==","KHRvcGlj","X3BhcnRpdGlvbg==","IF9fX19fX19fX19fX19fX19fXw==","S2V5d29yZHM=","IEZyYW5jbw==","TGl0ZQ==","IG5ha2Vu","INC30LA=","T0JKRUNU","IGNyYWZ0cw==","IFN3YXA=","LlhuYQ==","LkNvbm5lY3Q=","IGJhbGNvbnk=","KHJlYWw=","IEJhcm5lcw==","Ymly","IFR3ZW50eQ==","YXlhbg==","YXRhcnM=","IFByb3BlbA==","IElobmVu","VXBncmFkZQ==","IGN1cmI=","LXNlY29uZA==","IG5lcGg=","LnByZXM=","7J6F","LnNlcQ==","IHBhZGRlZA==","Ij8=","amw=","44Os","Jyk8Lw==","IGNpdmlj","Z29ucw==","PmE=","Q29vcmRpbmF0ZXM=","IGVuYWN0ZWQ=","RU5UUw==","IGxhYw==","LmZpbmFs","IFBocFN0b3Jt","Y2FsbGVk","IGlucXVpcmllcw==","Lm1pZGRsZXdhcmU=","IERvd250b3du","Lyc7Cg==","IGtpbG9tZXQ=","YWNjZWw=","IHF1aWVu","d3N0cmluZw==","c2V0RGF0YQ==","IG1hbmVyYQ==","IG1vZHVsYXI=","cmltcA==","IHRhcmlmZnM=","4oCZaWw=","X1RIUk9X","L2NvbG9y","IEhUTUxFbGVtZW50","IGNhcnJv","IHByZXJl","IHBsb3R0aW5n","IFBvc2l0aXZl","IE1hY2hpbmVz","T1RFUw==","4bub","cGxlYXNhbnQ=","IGFsdGU=","IGFpbmRh","dGhlc2U=","IGNvcnM=","aXBheQ==","IEFkdmlzb3J5","IFJ1Ymlv","anE=","IGxpbWVzdG9uZQ==","IGRldGFjaGVk","6K6+572u","dGVuYW50","IERlcHRo","YWxvcmU=","INGB0YLRgNC+0Lo=","IEZPUkU=","IExheQ==","cHJlc2VudGF0aW9u","KScpOwo=","LnN1YnBsb3Rz","z4M=","Tk9X","R2Fy","aGFuZGxlcw==","YWJyYQ==","cHV0aWVz","IEVsZWN0cmljYWw=","TWlkZGxl","cm9waWM=","IEpE","IER5bg==","IEJyaXN0b2w=","IE1jQ2FydGh5","IHN0cmlrZXI=","IGVudW1lcmFibGU=","IEV2YW4=","LmRlZmF1bHRz","cXVlbmNlcw==","KXx8","CXRva2Vu","4peP","LWRyb3Bkb3du","U1RPUkU=","IEdyYXBoaWM=","KHBw","RXhwbA==","IHVwd2FyZHM=","IERpc3RyaWJ1dGVk","IFdFQg==","SmVy","aXNOYU4=","55Sf5oiQ","PlI=","w7xzc2Vu","ZWZz","IHVuY292ZXI=","IGx1ZA==","LmNhbGN1bGF0ZQ==","IGludHB0cg==","IG1pZGZpZWxkZXI=","LkhlYWRlcnM=","IG1m","ZXJlZg==","Lk1ldHJv","IFNwZWFraW5n","OmI=","IGNyeXB0b2N1cnJlbmNpZXM=","IGRlbW9ucw==","CUVYUEVDVA==","IHdpY2tlZA==","eW91dHViZQ==","OkludA==","IEhpbmRp","IENBVA==","INi5","cmFy","b21vcmU=","L3Blcg==","L2xpY2Vuc2U=","IHJlaW0=","IGF3YWl0aW5n","IGxldGhhbA==","IEVG","cm91bmRlZA==","IFBsYXRpbnVt","INCy0YHQtQ==","LmNvb3Jkcw==","LkRldmljZQ==","L2l0ZW0=","IFdlbm4=","Y29tcGlsZUNvbXBvbmVudHM=","IEtpbmRlcg==","LnJlbW92ZUl0ZW0=","IGFuZGE=","Ym5i","IHByYQ==","KHRyYW5zYWN0aW9u","IGVtYmFycmFzc2luZw==","CUJPT0w=","LmNvbnRlbnRWaWV3","IGV2ZW50ZGF0YQ==","YXRvcmU=","IHByb3ZpZGVkSW4=","aXJtYQ==","IHpvbmE=","X0hX","5pk=","IHN0b3Zl","IGNvdW50ZXJwYXJ0","X1Byb2R1Y3Q=","X01BTkFHRVI=","IGluZnJpbmc=","IEVSQQ==","X3BhcnR5","0ZE=","IGluaWNp","X1JlcXVlc3Q=","IG1pcmFjbGU=","IGNhbmNlbEJ1dHRvbg==","U3B5","YXTDsw==","IHBvbGlzaA==","IE5pY29sZQ==","LmRpc3BsYXlOYW1l","XFJlcXVlc3Rz","IHVzZUhpc3Rvcnk=","Um91dGVyTW9kdWxl","IHN0YXJlZA==","SURFUg==","0YPQvdC60YbQuA==","IG5vdGE=","JGFycg==","cGVjaWZpZWQ=","IHRvcHA=","X0RSSVZFUg==","L25n","5aA=","X3Rt","JXRpbWVvdXQ=","PHM=","ICgqKQ==","IEh0dHBSZXF1ZXN0","X1RSQUNL","KG5vdGU=","IEV4cGxvcmU=","X3NlcnY=","IOe7","QmluZGVy","KyIs","LmF0dA==","IEV0aGk=","IGPDs2RpZ28=","PSdc","LmxpbmVz","KE9m","5bCG","bWlzc2libGU=","IHbDqQ==","IGFjb3VzdGlj","IGNyYWZ0aW5n","bml0","LmJh","IEx1Y3k=","IGlQb2Q=","IHB1cGlscw==","LW1heA==","X3dy","KGNw","IFJFUE9SVA==","IGRucw==","IFJlZmVyZW5jZXM=","IHVuZGVydGFrZW4=","IGvDuGJlbmhhdm4=","IGNoYWk=","IENyb2F0","X0xvZw==","cm93bmVk","X21lZA==","CWRhdGU=","I19f","IGNvc3R1bWVz","IFJlcXVpcmVz","YWZmbGU=","54q25oCB","LVNlbWl0","ZWxhaWRl","0LXRgtC+0LQ=","IHBlc3RpYw==","IGRyYQ==","RE9DVU1FTlQ=","IC4uLg0K","fWB9Cg==","IEF1Y3Rpb24=","IERvY2s=","eHh4eHh4eHg=","KGdldFN0cmluZw==","hY0=","IGJvcmRlcldpZHRo","IE1hY2hpbmVyeQ==","IHByZWRpY3RhYmxl","LlNI","IGFtcGxpdHVkZQ==","LmZvclJvb3Q=","SU5hdmlnYXRpb24=","VGFibGVNb2RlbA==","YXR0cmli","IG1hbmV1dmVy","IGV4Y2F2","QkVSUw==","IGRhcGF0","IGluc3RhbGxhdGlvbnM=","LkFzeW5j","IHJheXM=","PeKAnQ==","Ow0NCg==","LmNyeXB0bw==","X2RiZw==","IEVudW1lcmFibGU=","T2ZTaXpl","X2Vwb2Nocw==","bXc=","TUVOVQ==","b3V0bGluZQ==","IFBhcGVycw==","PT09PT09PT09PT09Cg==","IHVuaWZvcm1z","IEdpZw==","LXBhY2thZ2U=","IEplbmtpbnM=","IEhvbWVQYWdl","LmlzU2VsZWN0ZWQ=","IG1lY2hhbmlj","TUs=","IFNvdW5kcw==","Ly8tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQo=","IHJlc2VhcmNoaW5n","IGluZm9z","b2dyYXBoaWNz","ZXJzZXQ=","KFsnLw==","IFRpbWJlcg==","LmFnZW50","LnRvSlNPTg==","X2NvbW1hbmRz","cGFyaW5n","X2FkanVzdA==","Lm5vbWU=","KGdsbQ==","U3RhdHVzQmFy","ZmlsZXBhdGg=","P+KAmQ==","IGRldGVjdGl2ZQ==","IHVuc2VyZXI=","IFRpYmV0","RU5ERUQ=","KHNlZWQ=","IHNuZWFr","IGFtb3I=","PSIvLw==","IFBhbnRoZXJz","YWxsYXg=","IExJVkU=","CURXT1JE","XT0t","IHRvcm5hZG8=","L21pbg==","IGx1bmdz","LWN1cnJlbnQ=","IEJvb2tpbmc=","5YiX6KGo","IGVuam95bWVudA==","4KSw","SkE=","dHlwZWQ=","LkJ0bg==","ZmF0","dWdhbA==","IFNoYXJlcw==","IGRpc2dy","IEJBUg==","IEZPWA==","T3Bjb2Rl","IFN6","a2V5ZG93bg==","aWN0aW9uYXJpZXM=","IGRldGFpbGluZw==","fSkpCg==","IHBvaw==","IGRlbW9uc3RyYXRpbmc=","IG5vdGF0aW9u","bGF5ZXJz","QGlm","IE5QUg==","LnN0cmljdEVxdWFs","IFJlY2lwZXM=","LlRlbnNvcg==","IGxpcXVvcg==","IGRlYnRz","LmVuZHNXaXRo","V2hlZWw=","LlBvcw==","Q1NW","JGFyaXR5","IHVuc3RhYmxl","KGxvc3M=","RU5TT1I=","IGVsZXZlbg==","IExvcGV6","IEhvcGtpbnM=","Y29ub20=","IFNldGg=","IHBvZW1z","UXVhbnQ=","IGdzbA==","IHN5cnVw","IHNpYmxpbmc=","IGNhc3M=","LXZvdXM=","w7Z0","X1BBVFRFUk4=","X1NFQ1RJT04=","ZXN0aW1hdGVk","dXBncmFkZQ==","Lm1vbmdvZGI=","IEJvYXQ=","X0NUWA==","IGZldGNoaW5n","dXN0aW4=","cGllbA==","TWFyZw==","UmVmbGVjdGlvbg==","IGR1Y3Q=","IE11bmljaXBhbA==","IGJ4","LkdldEN1cnJlbnQ=","bWxpbms=","IEFjY291bnRpbmc=","IEdlbmV2YQ==","X1Bvcw==","IHBhc3Nlcg==","IGhlYXJpbmdz","Y29tcGFu","IGZyYWdpbGU=","SW5pdGlhbGl6ZXI=","d2Fsa2Vy","Lk1hdGVyaWFs","IEh1bnRpbmc=","dHJ5c2lkZQ==","IGthdA==","IGNsZXJr","4Z8=","ZG9pbmc=","CWdyb3Vw","IHNhbmN0aW9u","Lmxi","IExhenk=","IENvbnN0cmFpbnQ=","UGFnaW5hdGlvbg==","IHBvdXZleg==","IEluZGljYXRlcw==","TUVS","IGNvdXJz","IHllYXJseQ==","IGdyb3NzZQ==","YWJicmV2","IERPTg==","IHByb2NlZWRlZA==","ZW50bGljaA==","IHByb3BlcnR5TmFtZQ==","IFRlYWNoaW5n","c3RhZHQ=","IGN1dG9mZg==","b3JuZXJz","IGFmcmljYQ==","IHJlbmRlcnM=","IFlhbmtlZXM=","IFRvb2xiYXI=","c3BhY2Vz","LmZpbGxTdHlsZQ==","IHNlZ3VuZG8=","X3N0cmxlbg==","LkZpcmViYXNl","5aSE","IG1lbnRpb25pbmc=","XCg=","IFZhbHZl","U2V0dGVy","IHNwYW5z","IEFsY29ob2w=","IExldHRlcnM=","XHhl","IFRL","X0JMRQ==","LmdldFJlc3VsdA==","PFBsYXllcg==","IFBhdHQ=","IGVhc2luZw==","IHR1cmtleQ==","IEZlbg==","Jyki","IGNvbmZpbmVk","IGluY2x1cw==","U3VwZXJ2aWV3","KHdpdGhJZGVudGlmaWVy","ZW5jaWFs","IHN0dWZmZWQ=","VGhldGE=","IGVjb25vbWlzdHM=","fSkpOwoK","Y29va2llcw==","IFJvb3Nl","IENoZWVzZQ==","IGZpY2hpZXI=","IGVuZm9yY2Vk","QUJC","bm/Fm2Np","X0FMTE9X","IHJlY3J1aXRlZA==","IGV4cGVuZGl0dXJl","LW5pZ2h0","IGFzc2VydE5vdE51bGw=","X2V4ZWN1dGU=","INiv","SU5ERVg=","X0ZNVA==","IHJlc2N1ZWQ=","IE1vbnRobHk=","IENvbnNlcnZhdGlvbg==","IEdlYg==","T2JhbWE=","RXBvY2g=","aWNpZXM=","IE9ydA==","IHNvaXQ=","KGljb24=","RnJpZW5kcw==","bW9s","IGdyb3VuZGVk","IENhdXNl","YWRlbmE=","V0VFTg==","IEx1bg==","SVRJVkU=","Lmxvb3A=","X3VudGls","IGNvcnI=","LmVkZ2Vz","IGh5cG90aA==","Y2hlZHVsaW5n","dHJhbnNsYXRvcg==","INCc","Um9t","44CRCgo=","IFhhbWFyaW4=","IHZpb2xhdGluZw==","LmFuY2hvcg==","LS0tCgo=","IHRyYWRlcg==","QURWRVJUSVNFTUVOVA==","IHVuc2VyZQ==","IERBTw==","IGJsb25k","IFBBVA==","Lmdsb2I=","IOi+kw==","IHNwbGl0dGluZw==","IHVuc3Vic2NyaWJl","IGF0bW9zcGhlcmlj","IFRyaW0=","IGNpdGF0aW9u","IGluZmVyZW5jZQ==","IEZ0","IERhcndpbg==","ZmluZE9uZQ==","IEdlbA==","KENvbnZlcnQ=","IGFjY2Vzc29y","O3RleHQ=","KHNvcnRlZA==","IGp1ZGdlZA==","KTtc","OnA=","IG1laW5l","IFNsaW0=","LkNvbW1hbmRz","IHBlcmNlaXZl","Y29ob2xpYw==","PERhdGE=","LmVudHJ5U2V0","IGFzc2VydEZhbHNl","IFBhdHJvbA==","ZW5zZW0=","xYLEhQ==","qKE=","V0lEVEg=","IFJlc2N1ZQ==","IFVJRg==","X1RIUkVTSE9MRA==","IE1pY2hlbA==","QVRFUklBTA==","b3BlbnNvdXJjZQ==","IERpYW5h","IGludml0ZXM=","X0JPRFk=","IHJlc2Vydm9pcg==","IHJvaQ==","Y3VzdA==","KHRj","77yBIik7Cg==","IGZlc3RpdmFscw==","IHBlcmZvcm1lcnM=","IGNsaW1iZWQ=","IGp1bmdsZQ==","U3RyaW5nTGVuZ3Ro","IHVubGF3ZnVs","aWVycmU=","dmVydGlzZW1lbnQ=","IHN0YWtlcw==","IGhhdHM=","TW9kaWZ5","IExFVFRFUg==","LkhpZGU=","IHN0YXR1dG9yeQ==","X3doaXRl","IFBlcmw=","dXRlbmJlcmc=","ZW1wbGU=","Lldvcmxk","IG92ZXJsb29rZWQ=","IGNvbmNsdWRlcw==","Lyo9PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09","LXdpc2U=","CXN0cmVhbQ==","cG9wdWxhdGlvbg==","IGV2ZW50bw==","IGlsbHVzdHJhdGlvbnM=","ZnRz","IGF1dG9m","IFByb2NlZHVyZQ==","IGRlc2VydmVk","LXRpbWVz","IGdvbA==","TlNFcnJvcg==","Y3Jlc3Q=","IFBha2lzdGFuaQ==","YW55Y2g=","Z2V0Q3VycmVudA==","IGxhcg==","bnRs","IFJlYmVjY2E=","IG1hdGVyaWE=","IGZpbmRCeQ==","L2Fk","Q2FsbGJhY2tz","IEFscw==","IEthdGll","IE9ic2VydmFibGVDb2xsZWN0aW9u","IERvY3VtZW50YXRpb24=","VHlwZWQ=","IEN1bHR1cmVJbmZv","IFRpbW90aHk=","IGxhdGVyYWw=","InR5cGU=","IHVuYXV0aG9yaXplZA==","IHRlYWNoaW5ncw==","IGRlYnVnZ2Vy","W3ZhbHVl","IGFsb3Jz","IHV6","IHNjYXR0ZXI=","IGRvd253YXJk","IG1pZ2xp","c3RhdHVzQ29kZQ==","ICgpKQ==","IE1X","INC80L7Qtg==","Uk9TUw==","LmJ1Zg==","IGZhaXJ5","IEluZnJhc3RydWN0dXJl","PT4i","dGxlbWVudA==","JCgi","RnJvbVN0cmluZw==","IEJpbGQ=","IGNvbnZlbnRpb25z","X25hdGl2ZQ==","IEluc3BlY3Rvcg==","IFBpc3Q=","dWJhcg==","IHJlZ3M=","IFBpbG90","VGh1cw==","Picr","IGNlbGE=","Lm5ld3M=","KFByb2R1Y3Q=","TGl2aW5n","UnVzc2lh","IGZhY2V0","ZXRpY2Fs","IFsnJA==","L1s=","IERpcmU=","IGdhc2Vz","IElORk9STUFUSU9O","IEVhdA==","IEZvcnVtcw==","IENoYXJhY3RlcnM=","X21ldA==","IOyLnA==","IGtpbmdz","YWNoaWU=","IExhbWJkYQ==","IHRpbWVycw==","IExpZ2h0aW5n","IENhc2V5","YWRkaXI=","YW5kZXg=","LmFuc3dlcg==","IEhpcA==","IFByaW5jaXA=","U3RhcnREYXRl","IOOAjA==","dHJlcw==","ICYj","Lk1heFZhbHVl","IFByb2JsZW1z","IGxhdGV4","T2ZDbGFzcw==","IEx5bm4=","Ly8n","IHZveWFnZQ==","IHNodXR0bGU=","IFJvbGxlcg==","IFJ1bnRpbWVFcnJvcg==","dXlh","RGlj","CWJ1aWxkZXI=","IGJ1bGx5aW5n","IHNpbXBsZXN0","LmNhbGxlZA==","IExS","IG1vcmFsaXR5","IHN0dXJkeQ==","dHJhY2tpbmc=","LnN3YWdnZXI=","X0JJTkQ=","SVRPUg==","LXVybGVuY29kZWQ=","INGF","IFRyaW5pdHk=","IHRyYXBz","IHwt","IHNldFRleHQ=","IGJhcmdhaW4=","IGJyYWtlcw==","LmdldENvZGU=","IG1pZ3JhdGU=","IHJpYmJvbg==","KXJldHVybg==","IGNoYXJnZXI=","YWNvbQ==","QURJVVM=","IEFtYmFzc2Fkb3I=","LWFmdGVy","IGFubmk=","CXNwaW4=","Q29uY2VwdA==","IEhlbmRlcnNvbg==","IEhPU1Q=","LnJhbms=","IE5vcnRoZWFzdA==","IGJlcmxpbg==","IHJlcXVpcw==","LmZlZWQ=","IHNvdXJjZU1hcHBpbmc=","IFJlbmNvbnRyZQ==","LmFqYXg=","bmVzdGpz","IHRyZWs=","IE5hY2lvbmFs","ICZb","IHBheWFibGU=","b3J0ZXg=","IGRlcHQ=","ZmllbGROYW1l","IGNvbXBsZXRlcw==","IFJWQQ==","IG9uaW9ucw==","YWxpZ25tZW50","Rm9ybWF0cw==","ICd7JA==","SGFzaFNldA==","IEJvZA==","LkludmFyaWFudEN1bHR1cmU=","IHNldHRsZW1lbnRz","IGh5ZHI=","LnVwZGF0ZWQ=","dmVudGg=","KHNlY29uZHM=","PSIvIg==","IHdlYnBhZ2U=","KAoK","IHRpcg==","IHRvZXM=","IEJyaWNr","IGFtYml0aW9u","UG90","PW1heA==","RVRJTUU=","IGRlcG90","Y2FsbHM=","IE5vcndlZ2lhbg==","YDo=","IGJ1cmdlcg==","IHByb2Zlc3NvcnM=","IEFsbG9jYXRl","LXRoaXJkcw==","LWNoYXJ0","IGZvcmQ=","Kk4=","LmtvdGxpbg==","IHBhcGVyd29yaw==","IERFVklDRQ==","JUAiLA==","cmVzcGVjdA==","KG1w","6auY","LWlm","IGN1c2hpb24=","b2JvdA==","IHBhcmM=","U1BBQ0U=","IE5ldGFueWFodQ==","IHNlbGZpc2g=","ZmVhdA==","IGNsaWVudGVz","LXRvb2xz","IHBvcmNo","IGpx","LnZlcmJvc2U=","IGxpYmVyYWxz","XSkKCgo=","cGllcw==","Tm90Qmxhbms=","KHRlcm0=","yJtp","X1BhcmFtcw==","Lm5vcm1hbGl6ZQ==","QnVsbGV0","QVNJQw==","KGhleA==","X2NsaWVudGU=","Kyw=","X0RJ","IGZvcnRoY29taW5n","fSIpXQo=","c2Vv","VW0=","Pk5hbWU=","IGNvbWZvcnRhYmx5","aXJlY3Rpb25hbA==","V0lUSA==","L3By","IFBvb3I=","IFZpdGFtaW4=","dmlj","R0g=","IHByaW9yaXQ=","IE5O","IENsb3NlZA==","pO0=","IGlzT3Blbg==","XENvbnNvbGU=","QW5kRmVlbA==","LlNVQ0NFU1M=","X09QRVJBVElPTg==","cG9sYXRpb24=","IFRhcw==","cHN6","Picu","Q1VSUkVOVA==","VmVuZG9y","aG9zdHM=","IEVyZA==","PnRhZ2dlcg==","IHNvdXJjZU1hcHBpbmdVUkw=","IG1hcmF0aG9u","X2Nsb3NlZA==","IGV4ZW1wdGlvbg==","IHJlY29nbml6ZXM=","aWRlc2hvdw==","JyQ=","KCcvJyk7Cg==","bWl0cw==","d2Fyeg==","IENoZXJyeQ==","taw=","bm9y","cG9ydGU=","IHds","X2JhY2t1cA==","LmdldEJvb2xlYW4=","LmdldFJlc291cmNl","IGRlZmluaXRpdmU=","LkVkaXRUZXh0","IHPDrQ==","LkNPTlQ=","IFBMQVlFUg==","LmNhcmRz","IFNob3Jl","KCcvJykK","Y2x1aXI=","V2ViRHJpdmVy","KG1vbnRo","LXJlbGVhc2U=","IGluc3BlY3Rvcg==","5aM=","IE5G","X2NsaXA=","5a2Q","IGludGVyYWN0aW5n","LnRtcA==","ICcnJwoK","IGRlZQ==","IGZyb3N0","Il0pKQo=","IFBsYWNlcw==","VGhyb3dz","Zm9yaw==","L2RheQ==","aVBob25l","IE1JQw==","IGZvbGRpbmc=","IGNyb3Jl","IENoaWVmcw==","cGhlcmljYWw=","KHByaWNl","LldyaXRlU3RyaW5n","IGV4aXRpbmc=","XScsCg==","aWdodGluZw==","SW5ncmVkaWVudA==","KHZlcnRleA==","IHNjcm9sbFZpZXc=","aGY=","Om5ldw==","U0VO","c2VjdG9y","IHNwaW5z","IFNjaGVkdWxlcg==","b3RlY2hu","c2VtaWNvbG9u","Rm9udE9mU2l6ZQ==","IFNwZWNpZmljYWxseQ==","ZmxhbW0=","Lk9iamVjdElk","IGNvbnRh","X3Blcm1pc3Npb25z","CUZST00=","SUNPREU=","L2tn","IEhvdGVscw==","LW1lZA==","IERpbg==","IG5hdnk=","Z2V0UGFyYW0=","IG1lbmQ=","IHBvcnRyYXllZA==","IE1ldHJvcG9saXRhbg==","UGFpbnRlcg==","IHJlZmVycmFs","X2dvb2Q=","IG1hcnZlbA==","b3NhaWM=","Pigm","LnVy","IGVzdG9z","V2lsbGlhbQ==","IHRpbWJlcg==","IHF1ZWxxdWVz","IERvY3VtZW50cw==","LlhhbWw=","IGJhdGNoZXM=","6YGT","IFJlbGVhc2Vk","VGFpbA==","Q09PS0lF","aGVpZA==","X3N0YXRpb24=","IFZpYQ==","U2FsZQ==","IFJlcGVhdA==","IHByb21pbg==","IFpv","LWZvcndhcmQ=","IElvbg==","aXRhcnk=","IGp1cw==","LXJlcXVlc3Q=","IHByb3VkbHk=","IFN0cmVhbWluZw==","KE1vdXNlRXZlbnQ=","IFNwcmludA==","X3JvdGF0aW9u","UmVwb3NpdG9yaWVz","IHRhcnQ=","INGB0LI=","IG1hcHBpbmdz","6Ko=","Q3U=","Q3ljbGU=","IGJ1bg==","CWx1YQ==","44OJ","ICgoIQ==","IGNvbGxlY3RpdmVseQ==","IENvbmQ=","IHdzenlzdA==","KGxpYg==","b3BlbmhhZ2Vu","X3NraXA=","LkNvbHVtbkhlYWRlcg==","6YI=","cGVyaWVuY2Vk","j+i/sA==","X3Byb3Bz","IGNvbnRyYWNl","IG1hdGNodXA=","YWJldGlj","Lm1lbWJlcnM=","UkVDVA==","KGRhdA==","IHNvZw==","cmVub20=","X01ldGhvZA==","Q3VzdG9tZXJz","ZnVsbG5hbWU=","Wk4=","cmV0cnk=","IGthcA==","IE5ldQ==","6Io=","YWRkQ2hpbGQ=","d2lsbFJldHVybg==","X3Blcm1hbGluaw==","IGVuZXJnZXRpYw==","IFdldA==","IE1vcnI=","IGdjZA==","Y291bnRz","LHR5cGU=","ZGln","KExvZ2lu","IGNyYWNrcw==","IGJhY3RlcmlhbA==","IE1lYXQ=","IEFybXN0cm9uZw==","IEJyb256ZQ==","IGFwcHJveGltYXRl","X2RpcnM=","bGlnYQ==","xYJhZA==","IGtpbmRuZXNz","IGNvbnRyZQ==","IEVWRVJZ","TUVU","IGFubm91bmNlbWVudHM=","Z3Bpbw==","IFdhaXRGb3JTZWNvbmRz","IFBob3Rvc2hvcA==","IGRpc2NvbnRpbg==","L2Rk","IHRvcG9sb2d5","YW5pY2Fs","LmludGVyZmFjZQ==","YXVjb3Vw","Lkhhc2hTZXQ=","QVJJQU5U","KHJvdXRlcw==","IFRlaA==","IGh5cGU=","XSIpLg==","IHNsYW0=","IGJyb3Ro","LWludGVy","IFJpZA==","LW1hbmFnZXI=","Q2FuY2VsYXI=","IFBhZ2luYXRpb24=","IHNvdW5kdHJhY2s=","IHBvc3Rlcmlvcg==","IHNjcnVi","Y3JlYXRpbmc=","LSo=","aXJ0ZWVu","LmR5","LnN5bW1ldHJpYw==","ICIiLg==","PT09PT09PT09PT09PT09","IGNoYXNzaXM=","IG51bWJlck9mUm93cw==","RGV2ZWxvcGVy","X2JpbnM=","IE9VUg==","cmllYg==","UHJvcw==","IHdpxJk=","ImQ=","IGFzeW5jaW8=","emVpZ2Vu","X3NwaQ==","LkFMTA==","IHNjcmV3cw==","Q2hpbmVzZQ==","IGFwaUtleQ==","IHVuc3VjY2Vzc2Z1bA==","IFNlYWhhd2tz","T1JH","56ug","IHByb2Zlc3Npb25hbGx5","IENvdXBvbg==","5a2X5q61","Q29udmVudGlvbg==","IHBvbHlt","5omL","IHNhbHZhdGlvbg==","IGVuZ2luZWVyZWQ=","IFdyZXN0","IEdDQw==","IHdhcm1lcg==","TGF5b3V0Q29uc3RyYWludA==","IGFnZ3Jhdg==","U2NyaXB0cw==","dmVudHVyZQ==","IHJlZnJpZ2VyYXRvcg==","IGlubm92YXRpb25z","IFJ1bm5lcg==","TklD","IFJvbGxpbmc=","Q29udHJvbEV2ZW50cw==","IGxvb3M=","cGFj","CXBhbmVs","ZWZl","IEJ1ZGRoYQ==","LS0tLS0tLS0tLS0tLS0K","5bqT","KGZvcktleQ==","IGx1bWlu","ICg/","IEFJRFM=","LHVzZXI=","aW1pZW50b3M=","Y29udGVudFR5cGU=","YW50bHI=","6aY=","IFdlbHQ=","UHJvZHVjdGlvbg==","bWlnaHQ=","IFZJSQ==","Iiwo","IG9ic2VydmluZw==","IGRlbGliZXJhdGU=","KGNvbnRyb2w=","IHdpdGhk","IHNlbWFuYQ==","U1RBQ0s=","dWNoZW4=","TmljZQ==","IERldXRzY2hsYW5k","IFNwZWNpZmllcw==","ZG1h","aXppbw==","IEZhY3Rz","X3BvcHVw","IERpcmVjdG9ycw==","ezo=","W1I=","INGN0LvQtdC80LXQvdGC","IHBsYXQ=","IGRpcmVjdGluZw==","5LiJ","IEdpbGJlcnQ=","4oCmLgoK","LnFtbA==","IHRoZXJlYWZ0ZXI=","IGRpc3Bvc2l0aW9u","ZHJhZnQ=","IHN1cmdlb24=","IEluc2lkZXI=","QmxlbmQ=","IFRyZXY=","dHJpbnNpYw==","VG9waWNz","cmlldmU=","X0ZJTEVOQU1F","IGF1dHJlcw==","Sm9zZQ==","UHJvZHVjZXI=","ZXJ1cw==","IHBldGl0","IE5FWFQ=","IEZpbHRlcnM=","IHJlcGxpY2F0ZQ==","Il0pLg==","IGxlbmRlcnM=","XSIsCg==","O2NoYXJzZXQ=","Q3BwT2JqZWN0","IGZsb3JhbA==","IFRpcG8=","IGNpcmN1aXRz","ZWFzeQ==","KCYk","aXR0YQ==","ZXJ5bA==","X0NPTU1PTg==","J319Pgo=","LWJhY2tlZA==","KHZhcmlhYmxl","KEluZGV4","IHZvaXI=","X2xvY2F0aW9ucw==","Kyspew==","IExvdWlzdmlsbGU=","IGdyYXRpdHVkZQ==","Lk1vY2tpdG8=","IFBvd2Vycw==","aWV1cnM=","IGdlb2dyYXBoaWM=","cmFsZQ==","IGNyYQ==","IFNwdXJz","aXBoZXJ0ZXh0","QUNJT04=","LWNvbW1vbg==","IHZpY3Rvcmllcw==","IEZpbmFscw==","LnNodWZmbGU=","LW1pbGxpb24=","X1BST0M=","YXNzdW1l","IGlscw==","REJD","Qm9vdFRlc3Q=","IGxhdm9y","LnRlc3Rpbmc=","LmFzdA==","Il0v","bW9pZA==","IHF1YWxpZmljYXRpb24=","Z2VzY2g=","CXB1dA==","IGFpcnBvcnRz","Skk=","VGVhY2hlcg==","X3VuaWZvcm0=","IG5hbWE=","IEJhc3Q=","ZXJ0eXBl","Y2FwdHVyZQ==","Z2V0QWxs","IFJleW5vbGRz","b29sZWQ=","LmNvbW1lbnRz","IGNoaW4=","KS4q","INC40LvQuA==","dGds","dWRvcw==","IGTDrWFz","Y2hhaQ==","LnByb2dyYW0=","IHBzeg==","CWljb24=","cGhpbA==","ZW50cmFs","X1dSQVA=","b3Zp","IG5vc3RhbGc=","SW5maW5pdHk=","CXlpZWxk","IHZpdGFtaW5z","UXVhdGVybmlvbg==","U2luaw==","X2dvb2Rz","IC4uLi4uLi4u","IFdpbmdz","dXJpZGFk","LXN0b3J5","Il0pCgo=","aWRlbGl0eQ==","VHlwZURlZg==","R3Rr","IO2M","X01haW4=","IGNoZXo=","IFJhdmVu","IHBheXJvbGw=","IGZyZWVsYW5jZQ==","TExV","IE1lbmQ=","ZWRheQ==","QXBpTW9kZWxQcm9wZXJ0eQ==","LkZvcm1Cb3JkZXJTdHlsZQ==","IGVjb25vbWlzdA==","c3RhbmJ1bA==","IGZyZWlnaHQ=","LUFnZW50","KG1ldGE=","IHN5bW1ldHJ5","ICcuLg==","LkNhbGVuZGFy","LWF1dA==","Z2Y=","cGVudA==","eWNsb3BlZGlh","IHdpc2hpbmc=","CgoKCgoKCgoKCgoK","IGdlbnRsZW1hbg==","IOqz","PSM=","IGxlY3R1cmVz","4oCcSW4=","ICFf","IGhi","IFZlbmRvcg==","UmVjZW50bHk=","X25vdGVz","5o+Q56S6","Ik15","SGVhZGVyc0hlaWdodA==","X1NP","IHVud2lsbGluZw==","IHN1cGVyaGVybw==","Z2lv","cHN5","IFBlZXI=","amF2YXg=","JmFwb3M=","IENyaXNpcw==","b3JkaW5hbA==","TWVtY3B5","KysrKysrKysrKysrKysrKw==","LXZhbA==","IHdvcmtib29r","LWFw","PWs=","IG1ldGFsbGlj","X3BlZXI=","QnlQcmltYXJ5S2V5","X1NE","dWF0b3I=","X1NIQURFUg==","KU1hdGg=","LlRyYW5zZm9ybQ==","IGNvd3M=","UGhp","IENsZW0=","KF8oIg==","IEx1ZA==","LWRlbGF5","IFNlY3VyaXRpZXM=","IE9ydGhvZG94","U3ltZm9ueQ==","KHJlcG9ydA==","IGVudGVydGFpbg==","RVBT","aXpvcGg=","ZXh1YWw=","SVJE","5LuO","IGxpdGg=","IHNhbml0aXpl","IGZlbWluaW5l","SVNCTg==","LmF1dGhlbnRpY2F0aW9u","X3BpcGVsaW5l","L2NvbnN0YW50cw==","IENPTkY=","IGx1Y3I=","cmljaWE=","LnR0Zg==","LnNldENvbnRlbnQ=","IHN0YW4=","b3JlYW4=","IExsb3lk","LnJhd1ZhbHVl","IGdvcg==","IEJyb3ducw==","UmVncmVzc2lvbg==","IGxvd2VyaW5n","bmFpc3NhbmNl","IGJsb3dz","IGFtYXplZA==","IHVucmVsYXRlZA==","UmV2aWV3cw==","IHJ1Ynk=","IE1vZGlmaWVy","IGdpYW50cw==","LnRocmVhZA==","IGNvbnRhaW5tZW50","IFN0YXJ0Q29yb3V0aW5l","dW1hdA==","b3JlbGVhc2U=","IFJhbmR5","QGVuZGlm","RGlnZXN0","IHN1YnVyYmFu","PSIpOwo=","IGFubm9uY2U=","LnZhcmlhYmxl","XEZvdW5kYXRpb24=","IGFjcmU=","VmFu","IHR1cGxlcw==","ZG5z","IFN0YW5kaW5n","X2xhcmdl","IGJveGluZw==","U3VwcG9ydEFjdGlvbkJhcg==","IEZvcnR1bmU=","IFJ1bQ==","X211bHRpcGxl","YXJjaGljYWw=","IGZ3cml0ZQ==","X3F1b3Rl","IGZvb2xpc2g=","IGNvbXByaXNpbmc=","INC+0L8=","LXNlbGVjdGVk","dmY=","bWFpZA==","TmFtYQ==","KGRhdGV0aW1l","IGluZGlyZWN0bHk=","Z2FydA==","Zml4dHVyZXM=","Y2hvcw==","IEhhbG8=","IHJlY3VycmluZw==","LW5ld3M=","dmls","IE51cnNpbmc=","LXByb2R1","IEhR","XEh0dHBGb3VuZGF0aW9u","ZW5jaQ==","YXVlbg==","IHZ5","b2NyYWN5","IGRlbGVnYXRpb24=","IGFzcGhhbHQ=","IHNldFNlbGVjdGVk","a29r","L3Jlc3Q=","bWV0aWNz","IE5TRGF0ZQ==","IHRyYXZlbGxlZA==","IHJlY2li","IG1pbWU=","Q0xJRU5U","IEdV","IEhBTkRMRQ==","L1E=","W3o=","IGJvdGhlcmVk","IEJCUQ==","w6dhcw==","X2V4YW1wbGVz","X0ZJTg==","IHdoaXRlQ29sb3I=","IGFzdHJvbm9t","LWRpcg==","IHNvdmVyZWlnbg==","IGJyZWV6ZQ==","IGlubmluZw==","IEVkbW9udG9u","Z2xp","LmJsb2dzcG90","anN4","IHZlcnNh","IE1vaGFtbWVk","LkpvYg==","LXRvZ2dsZXI=","INC/0L7Qu9GM0LfQvtCy0LDRgg==","YXJkb24=","IG5ld2Jvcm4=","IG5hdmFs","bm90ZXE=","IHR1bWJscg==","IGhlbnRhaQ==","IFR5cGljYWxseQ==","IGxvb3Q=","LlNwcml0ZQ==","RmxpZ2h0","IHdhdmVsZW5ndGg=","LXNr","IEVsbGU=","X2V4cG9ydHM=","INGP","IElI","aXpvcGhyZW4=","IO2B","X3ByaW1hcnk=","IG1vaXM=","IEJO","IHN5c3RlbWlj","IGRpZmVyZW50ZXM=","SU5DVA==","ICcnCgo=","JHE=","V2lkZ2V0SXRlbQ==","Y2xpZGU=","JGZpbGU=","TGVtbWE=","L3RhYmxl","YWdyaWQ=","IE1vbmdvREI=","aW50ZQ==","IGFwcHJlbnQ=","wq1pbmc=","LkRi","IMOC","aGFtbWVy","PScnOwo=","IGJyb2tlcnM=","aXRsZW1lbnQ=","c2VtYmxpZXM=","RWxl","e3g=","IGxhc3RuYW1l","PC0=","IGZsYXR0ZW4=","X2JhbmQ=","LlJvb3Q=","LnJlYWRGaWxlU3luYw==","PT09PT09","LnJ4","Pw0K","IG1ldGFwaG9y","VGk=","Y29udGU=","IGRlYml0","IGNvbnRlbXB0","Q3BwVHlwZQ==","5pSv","Rm9ybUZpZWxk","cmF0aW8=","b3NvcGhlcg==","IGltcGxhbnQ=","UFVSRQ==","IGFsdGE=","X21hbmFnZW1lbnQ=","IHJlZmluZQ==","IENoZWNrQm94","IENoYXJs","LXZlcnNpb24=","Y29uZGl0aW9uYWw=","dmVudWVz","IHJpZmxlcw==","IG9mZnNwcmluZw==","IG1pbGxpbmc=","IHNoYXJwbHk=","IHVuZGVyd2F0ZXI=","KG9yaWdpbg==","X0NvbnRyb2w=","IC4k","UGx1Z2lucw==","IGRyeWluZw==","IGlsbHVzdHJhdGVz","LXU=","IHZlZ2V0YXJpYW4=","bnBj","SGVhcnQ=","OycsCg==","Y29tbWE=","dGVlbnRo","YXNhbg==","L3NwZWM=","X21vdmVz","LW1hcmdpbg==","IGluZ2Vu","wqDCoMKg","IHByb2pldA==","IG90cmE=","IGJyYXM=","LnV0Yw==","IHNsZXB0","PXN1Yg==","YWJpbGl0","cG9zdGVy","IHNkaw==","b3VuY2lsbA==","IHdk","UHJlcGFyZWRTdGF0ZW1lbnQ=","IERydW0=","KGF0dHJpYnV0ZQ==","IEV0aGVybmV0","CURC","Q2FsaWZvcm5pYQ==","Y3ViZQ==","W0k=","LkNyZWF0ZWQ=","IEhN","IHRyYWNpbmc=","Rm9ybXNNb2R1bGU=","LXlvdQ==","LmN1cnJlbmN5","ZmVlZGluZw==","IHRib2R5","TGk=","YWNjaW9u","bmFz","IHRyb3V2ZXI=","Tk9ORQ==","In0sDQo=","IGZ0cA==","V2l0aElkZW50aWZpZXI=","cG9sYXRl","RmlsZUluZm8=","IHB1cnN1ZWQ=","ICAgIA0KICAgIA0K","REVTQ1JJUFRJT04=","fSovCg==","RnJvbU5pYg==","IGRlY29yYXRpdmU=","X1NTTA==","KGNoYXQ=","VExT","IHN1cnByaXNlcw==","YWxjdWxhdGU=","IFNwbGFzaA==","KENvbmZpZ3VyYXRpb24=","IFNFTQ==","aW1zb24=","L2xpYnJhcnk=","PERvdWJsZQ==","LnJvYm90","wqDCoMKgwqDCoMKgwqDCoA==","IENQRg==","IFVuZGVyc3RhbmRpbmc=","IGNvc21ldGlj","IFh0","dGlwcw==","K2s=","KCIn","IFBEVA==","V0FS","LmdldE9iamVjdA==","IFRyYWRpdGlvbmFs","LnNsdWc=","IERpcGw=","PSIiLA==","IEZpbG1z","IEFuaW0=","LmhlbHA=","IGVtYmFzc3k=","IEJvb3Rz","IGJ1bms=","LXJpc2s=","IHBjaQ==","IC9cLg==","IElQVA==","IGNyYXNoaW5n","IGlwdg==","X2tl","IFJFU1A=","LkxvZ0Vycm9y","IGluYWRlcXVhdGU=","SW9u","IEbDvHI=","cmljdWxh","IHNob3VsZEJl","YWxyZWFkeQ==","J10uIjwv","IFN0dWZm","RGlnaXRl","IHRyYW5zbGF0b3I=","X3Nwcml0ZQ==","bGV0YWw=","IG1haW9y","IFNleGU=","dGhhbmtz","IENvbXBsZXRlZA==","IGdhc29saW5l","LmF0dHJz","YmFnYWk=","IE9yaWc=","Ol0s","LmxvY2FsZQ==","IFJvbWE=","w61m","IGZhdm9yZWQ=","IHZhaW4=","IHNwb29u","IEphaHJlbg==","IG5pbmc=","V1dX","LGZsb2F0","X0RBVEFCQVNF","Qm9vdHN0cmFw","IENCQw==","IENodW5r","X2ludG8=","IEtvbA==","IGRlZmVuc2Vz","b3JlZFByb2NlZHVyZQ==","YmFsbHM=","VGV4dENoYW5nZWQ=","IHNoYXBpbmc=","IH19Pg==","R0VE","ZmFx","IG9wdGlvbmFsbHk=","X0Rpcw==","IFN1Y2Nlc3NmdWw=","IENlbnN1cw==","IGluY2FyY2Vy","X0NBUkQ=","IGF2aWF0aW9u","IEd5bQ==","QXV0aG9yaXR5","LkJlYW4=","c2hhZGVy","Tm90RXhpc3Q=","X1RleHRDaGFuZ2Vk","IFNUT1A=","KHRlYW0=","Ikg=","d2c=","IGdyaW5kZXI=","IHN0cmlwZQ==","IHByZXNlcnZhdGlvbg==","Q2xhaW0=","YXZlcnNhbA==","d2FyZWhvdXNl","dGFyZ2V0cw==","VHJ1c3Q=","IGFsbGV2","LHd3dw==","b3Vzc2U=","X2NoYW4=","X1NpemU=","c3lzdGVtcw==","IG9iamVjdGlvbg==","IEthbmU=","IGNvcnJvcw==","IERTTA==","IHVh","IE1I","IFN0cmF0ZWdpYw==","X3RjcA==","IOqwkg==","IGJvcnJvd2Vk","IEFjaA==","CWNvbW1hbmQ=","IGdwcw==","bGVzdG9u","aWNoZXZlcg==","IFVB","IGFzc2F1bHRlZA==","IHNwZWNpYWxpemVz","CXNlYXJjaA==","SG90ZWw=","ICAgICAgICAgICAgICAgICAgICANCg==","IFBpdGNo","INmB","UkVBRFk=","IHBhcmVudGFs","IGfDqW7DqQ==","IGRvbm7DqWVz","IGRldGFpbg==","VEFSR0VU","IHByb3RhZ29uaXN0","IGNsZWFySW50ZXJ2YWw=","IEljb25CdXR0b24=","IEdldEFsbA==","VHlwZUluZm8=","RUg=","4oCcVGhleQ==","IHtb","IGdhZw==","INqp","IERyb3Bkb3du","LmZyZWU=","Z29uZQ==","aW1lbnM=","IGluc3RhbA==","CWN1cmw=","X0NBTg==","IEJvbmU=","77yU","b255bXM=","LWdvdmVybm1lbnQ=","LmJpbmRpbmdOYXZpZ2F0b3I=","IERhbnM=","IE1jTA==","KGVu","Pihf","0JLRiw==","Lio7DQo=","PWo=","LWNvcg==","U29u","LlRvb2xTdHJpcEl0ZW0=","LWFyb3VuZA==","X1hNTA==","ZW5kRGF0ZQ==","IHNsYWNr","IHJvdGF0ZWQ=","IG5vcWE=","IGNvdHRhZ2U=","IGVuY29udHJhcg==","X3NraWxs","aG91ZXR0ZQ==","IQ0K","LndlYXRoZXI=","IGVtcGhhc2l6ZWQ=","5a62","INGB0L/QuNGB","IENvbXBpbGVy","KGFuZHJvaWQ=","IOKAug==","LnR1cm4=","IHN1cHByZXNzaW9u","X2NhbGxz","ICpA","KHN0cmxlbg==","LmhleA==","IEJpbGxz","IFJTQQ==","z4I=","IEVzY2FwZQ==","ZW1lbnRpYQ==","IGZyb250ZW5k","IHBpbnQ=","X2V4Yw==","enpv","W10sCg==","ICInLCci","LkVudmlyb25tZW50","IGFmb3JlbWVudGlvbmVk","IGVuZHVyZQ==","cHJvdG90eXBl","dGhlcmFweQ==","c3Np","RGVn","X3BsdWdpbnM=","LnVzZXJJbmZv","UHJpbnRlcg==","IFBST0dSQU0=","IHJ1aW5z","IGVtcGlyaWNhbA==","IGNyYXds","IEJvaWxlcg==","LWNvbW1lbnQ=","LnN1YnBsb3Q=","X2V0","ICcuJyw=","bWlub3I=","IEN1c3RvbXM=","IHlhdw==","dW5kZXJsaW5l","IENvbW8=","KCgn","KG1lYW4=","IGNoYXF1ZQ==","IEJsb2Nrcw==","LnJhZA==","aWxpYnJpdW0=","IHdlYmRyaXZlcg==","IG1lbGhvcg==","ZGFuYQ==","IEFidXNl","IFNvdXRod2VzdA==","IFBhcmVu","UEVSVElFUw==","CUlM","IHNjcmVhbQ==","dnU=","IGluY29tZXM=","IG5pbQ==","IGxhY2U=","IGNvbXBlbnNhdGU=","UmV2ZXJzZQ==","RGF0","X2F0dGFjaw==","IG5vdXI=","YWNoZW4=","Y2Vr","PEZ1bmM=","d2ll","Y29tcHJlc3NlZA==","LW1hdGNo","KCIiKV0K","aW1pemVk","Lm9yaWVudGF0aW9u","LmNvbXBhcmVUbw==","IG1hc3NhZ2dp","IOychA==","IGVsYm93","IGFudGlveGlk","dW5kcmVkcw==","L3Rvb2xz","IFJPVw==","YW5tYXI=","IFdvdw==","X3RpY2tldA==","UHJvZ3JhbW1pbmc=","IHRoZW9y","LXJldmlldw==","KCkpKSk7Cg==","IFJpY2hhcmRzb24=","IFBvY2tldA==","XVtd","YW1wcA==","X2hlYWx0aA==","IFBPUA==","IE5hdmFs","R3Vlc3M=","IGFuY2VzdG9y","LkdldEFsbA==","LmxvY2FsU2NhbGU=","IE1hcHBlcg==","IGFjY3VtdWxhdGlvbg==","IHNpbXVsYXRlZA==","IERyaXZlcnM=","IGTDqXM=","Y3VycmluZw==","IGVsZXBoYW50","IGFkdmVydGlzZWQ=","IG1haWxib3g=","U0hJRlQ=","IE1vbmljYQ==","IGFuYw==","IHdhcmRyb2Jl","SW5ncmVkaWVudHM=","IHx8DQo=","aXBweQ==","IGFudGliaW90aWNz","YXZpbmdz","KGN4","IEZlcnJhcmk=","IEFuaW1hdG9y","LmR0eXBl","cmVtb3ZlZA==","b3JkZXJieQ==","IGNyZXM=","b2PDqg==","IHB5bQ==","IENpcmN1bGFy","QGluZGV4","IFdhcm0=","U2F5","IEFzc2lzdGFuY2U=","IGN1cnRhaW4=","IE1vbnRl","SUxFUg==","IENWRQ==","IER1Y2s=","IEFsbG93cw==","X2ZpcmU=","IERlcmJ5","IHJlcG9z","IGh0dHBDbGllbnQ=","IHBzeWNoaWF0","IG5vd2FkYXlz","IGNhdXRpb3Vz","IENvbXB1dGluZw==","IGNvbXBsZXRpb25IYW5kbGVy","IFdlbHNo","IEJFU1Q=","IHN0cmVzc2Z1bA==","X1BF","5pel5pyf","IERhdGFGcmFtZQ==","CUludGVnZXI=","X1ByaW50","TW92ZXM=","IHRyYW5zZm9ybWluZw==","LkJhdGNo","eWFob28=","UG9zaXRpb25z","emVq","IG5vb2Q=","aW9yZXM=","Xyo=","IGNsaw==","IEZsb3lk","IGhhcA==","Zm9udHNpemU=","IG5heg==","Lm5vdGlmaWNhdGlvbg==","IERlcHJlc3Npb24=","IGFjbmU=","KioqCgo=","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCg==","LmNvbnRlbnRz","eW50aA==","IFN0cmFpZ2h0","Jyl9fSI+PC8=","IGJ1bGI=","Ulg=","Ly8tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0K","IGNvbXVuaWM=","IFJO","LW1lZGl1bQ==","TEVBTg==","PWxlbg==","UGhvbmVOdW1iZXI=","ZXJ2YXRpb25z","QWNjdXJhY3k=","IEFubm90YXRpb24=","X2tleXdvcmQ=","X2hpbnQ=","IEF0aGVucw==","IGFzc2lzdGluZw==","IEhD","LkluaXRpYWxpemU=","JykpKQo=","dXBh","IHN1aXY=","IElQQw==","PFRFbnRpdHk=","IGJyYW5kZWQ=","b29tbGE=","bGFyxLE=","IFhNTEh0dHBSZXF1ZXN0","IGTDqWrDoA==","IHRyYW5zY3JpcHRpb24=","IHByZXZhbGVudA==","LnBsYW4=","IHN0YXJl","IHdvcmtvdXRz","IEVkdWNhdGlvbmFs","IG1lc3N5","IE1PVA==","LkNvbW1hbmRUeXBl","UWVk","KGdjYQ==","IExpbmVhckxheW91dE1hbmFnZXI=","IEJsb3c=","IEFsdW1pbnVt","IHN3aW5nZXJjbHVi","IFRyYW5zaXQ=","IGV4cG9z","dmly","KHNlY29uZA==","IGJlbG9uZ2Vk","U3RvbmU=","6ZW/","IFN1bA==","IGdpZA==","IGFsbG95","ZXJ2YQ==","aXNlY29uZA==","X1JFTkRFUg==","IGFuZ2Vscw==","IFBoaWxvc29waHk=","b3B1cw==","IG1vbw==","ZW5ndWlu","X1ZBUklBQkxF","X0RFU1Q=","KGF1eA==","IGhvZQ==","IGRvYg==","YXR0YWNobWVudHM=","IGNvcnJpZG9y","IGRpdmlkZW5k","nbw=","IFRocm91Z2hvdXQ=","Lm9wdGlt","JG5ldw==","IGJlcmc=","IHNwcmVhZHNoZWV0","LlRyeUdldFZhbHVl","IHBheW91dA==","IE9uRGVzdHJveQ==","YXV0aGVudGljYXRpb24=","IE1pZ3VlbA==","cnRj","IENocmlzdGluZQ==","IEFJUg==","IGp1cmlz","IGRlc3BhaXI=","IHBhdGVudHM=","LWhhcw==","JV4=","5LuY","X3N0cmR1cA==","IFJlYXI=","ZXR0ZXM=","KHByb3BlcnRpZXM=","IHdyaXRhYmxl","LmlzTnVsbA==","b2xpY3M=","X2Jsb2I=","IGN1YWxxdWllcg==","YWZp","b3d5Y2g=","6I635Y+W","w4c=","IENhcmRpbmFs","IHRlbWE=","IkFuZA==","UGFnZVNpemU=","56eS","LlNpbXBsZURhdGVGb3JtYXQ=","IFdpbm5lcg==","IGNvcnJlbw==","X3dl","LmFkZE9iamVjdA==","KGNvdXJzZQ==","IGhvZw==","b3Bybw==","IHByb2JhdGlvbg==","dW5hYmxl","KGFjdGl2ZQ==","5Zu+54mH","IHBlcnRhaW5pbmc=","IGVtcGhhc2l6ZQ==","IFByaW50ZXI=","PS4=","IHVwZ3JhZGluZw==","L2NvbnRhY3Q=","PVtb","LXNhbg==","CXZhbHVlcw==","IGRvc2FnZQ==","U29saWQ=","IFJvb3NldmVsdA==","5ZWG5ZOB","IHJlY3JlYXRpb24=","IFRlcm1pbg==","LkJhZA==","IEJvbHQ=","U2t5","X0ltYWdl","IHNxdWly","IENvYg==","T1JO","IGF1Yw==","LkxFRlQ=","J0I=","LXJlc2lzdGFudA==","PiIr","IHRva2VuaXplcg==","IHNvdmVyZWlnbnR5","IFBlbmNl","KCkiKTsK","IHBlc3NvYXM=","Lkdl","IEluY2x1ZGVk","IHBhZ2luYQ==","IGV4cG9zaW5n","0LXRiA==","X1NDUklQVA==","LyQnLA==","VGh1bWJuYWls","15Q=","d2ViRWxlbWVudFg=","d2ViRWxlbWVudFhwYXRocw==","cHJlc3N1cmU=","IEN1cnJ5","X0NQ","T0xVVElPTg==","SUxFUw==","cHJvdGVjdA==","b29sYQ==","V29ya3NwYWNl","e307Cg==","IFVOUw==","IHN5bXBhdGh5","cm9rZXI=","IHJlbW9kZWw=","CWNlbGw=","IGF0b3A=","LkZ1bGxOYW1l","IGZhdXQ=","IEVhc2lseQ==","X2R5bmFtaWM=","IGZyYW1lZA==","IG1vdGl2ZQ==","6Lev","c2Ft","IG1hcmNh","IFRleHRFZGl0aW5nQ29udHJvbGxlcg==","IGRlc3RydWN0b3I=","Y3JlYW0=","IHJ1ZGU=","IEJvbGQ=","IEluZGlnZW5vdXM=","IGdlbnM=","IHJlbGFjaW9u","KHN5c3RlbQ==","IFVJRm9udA==","X2NoYXJnZQ==","VVNURVI=","RVY=","Lk5hbWVzcGFjZQ==","IG1lcmdlcg==","IGNhbGxvYw==","Z2FuZw==","QmFkUmVxdWVzdA==","IHNwZXI=","LWRlc2lnbg==","IOKH","Q2hhbg==","IG9yZ2FuaXNt","LCk=","PWlk","X3BsYW5l","IENhc2Vz","ZWxmYXN0","IExlZ2lzbGF0dXJl","IEZha2Vy","IGludm9raW5n","LXV0aWxz","KCkuJw==","LmZhY2U=","IGd1YXJkaWFu","bXlNb2RhbA==","IGNsaXBib2FyZA==","IEFUTQ==","IHBlYXM=","IFN5bHY=","LmNhbGM=","IENvbnRhY3Rz","aW50VmFsdWU=","IG1vZGlmeWluZw==","IEJhcmI=","Lmxvc3M=","X3BlcmNlbnRhZ2U=","QXNrZWQ=","KGxzdA==","YXRlZ29yaWNhbA==","LWZpbGVz","IFJvbWFuaWE=","LkFj","IGhhaQ==","IEZseWluZw==","IMW8","anA=","IFRyYWluZXI=","LmFyYw==","X2RlZw==","IHRyYWNlYmFjaw==","T3JGYWls","RkxPVw==","Lm9sZA==","b3lh","Z210","aXNlbXB0eQ==","IHZhY2NpbmF0aW9u","IG9ic29sZXRl","cmVjb2duaXplZA==","IHJ1aW5lZA==","IFJlaW4=","IFRyYWNraW5n","eGZi","2KfbjA==","IHbDpnJl","IGJyeXN0ZXI=","IElUUw==","IGRlc3Rpbnk=","IHN3ZWFy","IHJlZGVz","IGNsZg==","IGZsaXBwZWQ=","CWhlYWQ=","Qmx1ZXRvb3Ro","IE92ZXJyaWRlcw==","OkJvb2xlYW4=","Xz0=","X2xy","c3Bhd24=","OmluZGV4","VkFMVUVT","aXNrZXk=","PyIpOwo=","LnN5bnRoZXRpYw==","IENoZWNraW5n","c3RydWN0dXJlcw==","aXBpbmc=","IHZvY2Fscw==","LVVw","IE1hbnVmYWN0dXJlcnM=","IE1hcnJpYWdl","5Luj56CB","IGdhcm5lcg==","X0NsaWVudA==","cGFyYWxsZWw=","UklFTkQ=","IHZpbmVnYXI=","c2VndWU=","SkI=","IGNvbnRhY3Rpbmc=","IENhcnJvbGw=","IG91dHJlYWNo","dGVuc29y","X3ZhcmlhbnQ=","IHRoZWF0","bGljYWJsZQ==","e3w=","dGlueQ==","X2xldHRlcg==","IHBlbmNpbA==","SGVhZGVyc0hlaWdodFNpemVNb2Rl","aWx0cm8=","LmF1dG9jb25maWd1cmU=","LmRyYWc=","LnVzZVN0YXRl","IEJNSQ==","aGludA==","Q29tcGlsZQ==","Klw=","ZW5hcnk=","IGx2bA==","LkNhY2hl","Kz0i","X3R2","cnVpdG1lbnQ=","IGZyZWFk","QXJ0aWNsZXM=","ZmlsYQ==","IHBhY2thZ2Vk","4piG","QVRIRVI=","IFBsYW5uZWQ=","c2NoZW1l","IGRpYXJ5","IG9mZmVuc2Vz","Lzw/","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA==","UHJvZ3Jlc3NIVUQ=","IEdvcg==","LmdldFRpdGxl","IG1vY2tlZA==","IFRvcnk=","ICIpIjsK","I2c=","IGxpZWQ=","IHN2Yw==","X2d1aQ==","RU5UUlk=","IHNlcnZpY2lv","bW91c2VvdmVy","U0FDVElPTg==","44Kz","IHJlaWZl","bGVjdHJpYw==","X2NyZWF0aW9u","UmVhbGl0eQ==","KCcr","cHJvZHVjdElk","U3VwcGxpZXI=","LUxl","LnJlcG8=","dWNraW5n","X1N0cg==","IFJlbGF5","0LjQuA==","IHBlcnY=","Q2hpY2Fnbw==","IG1haXNvbg==","IHN0aWNrZXI=","X3ByZXNzZWQ=","U3dhcA==","IElH","IHN1c2NlcHRpYmxl","b2NhZG8=","IGdpbg==","ZXhl","aWdoYm9yaG9vZA==","KWA=","IGRpYWdyYW1z","IGluZmxhbW1hdG9yeQ==","IHTDqQ==","IFBvcHVw","IGFwcHJlaA==","IFBvcnRmb2xpbw==","IHdvcnM=","LmVudW1z","0LXQs9C+","L0J1dHRvbg==","IFBoYW50b20=","ICM6","IGRpaw==","cGFnZXI=","ZnRhcg==","IG9yZ2FuaXplcg==","KGNoaWxkcmVu","IE11bmljaA==","IHN0cmFuZw==","IFJX","44K/","TWFo","cHRpZGU=","IGxlYXJucw==","IHJlZHVjdGlvbnM=","IFJlcGxhY2VtZW50","T1RT","YWxjb24=","KHBhcnRz","YmFzaA==","IENpdGl6ZW4=","jbDsnbQ=","IEh0dHBTZXJ2bGV0","X1NDSEVNQQ==","bWVhbnM=","IGhvcnJpZmlj","VkVSSUZZ","IERDSEVDSw==","ICgv","LmJlZm9yZQ==","LnRleHR1cmU=","Z2V0TW9jaw==","IFNlbnNl","SW5zcGVjdG9y","VGV4dE5vZGU=","KEFM","LmdldE5vZGU=","IGJveWM=","IEJyaXNiYW5l","IGJhdHRsaW5n","CXR4","IGxvYmJ5aW5n","YnVpbHQ=","IFNFRUs=","IHJhbmRvbWl6ZWQ=","Z25p","X2NsdXN0ZXJz","X2lkZW50aXR5","IGNhcmRpYWM=","IG5ld1VzZXI=","LlZpZGVv","ZHVpdA==","XWluaXQ=","QXRs","KXZhbHVl","VGV4dFV0aWxz","INC10YHQu9C4","Q29tcHV0ZQ==","PSgn","CQkgICAgICAgICAgICAgICA=","IGFydGVy","IFRXTw==","JykpLA==","IERJVg==","IHByaXZpbGVnZWQ=","IFBhcnRuZXJzaGlw","IEhlYXRoZXI=","YmF5","YXRpc2ZpZWQ=","aW5zdGFncmFt","X1NlbmQ=","IEFTRg==","JG5hbWU=","IGJvbw==","IGTDqWY=","X0ZpZWxk","IEVkdQ==","Y2FuZGlkYXRl","cnVieQ==","IGFjY3VtdWxhdGU=","KEludFB0cg==","IGJ1c2luZXNzbWFu","IGVjb25vbWljYWxseQ==","IFJpbmdz","IElucHV0cw==","uYQ=","YWNpZQ==","IEFsYXJt","IExvZ291dA==","LnNlcXVlbmNl","IFZpZW5uYQ==","b3By","IGRydW1z","PWNvbmZpZw==","cXVp","IGRhdG8=","IHBvbHltZXI=","IENoYW5nZWQ=","V2ViUmVxdWVzdA==","IEFkdmFuY2U=","IHVuZGVyZ29pbmc=","LkNvbnNvbGU=","IGN1cnJlbnROb2Rl","IFdvb2w=","IHDDoWdpbmE=","UkVHSVNURVI=","IHNhZ2E=","IFlPUks=","YW1hbmhv","5a6M","IEJ1bmRlcw==","IERpYWxvZ0ludGVyZmFjZQ==","Z2VvaXM=","dW5jaWF0aW9u","PyQ=","LkFzc2VydGlvbnM=","IHNlYXRlZA==","IFNweQ==","UG9zZQ==","IkM=","IGFob3Jh","INGE0LDQudC7","IOuzgA==","IHdhcnA=","UHJvamVjdGlvbg==","IFNpbmdsZXM=","IEFkdmVydGlzaW5n","TGludXg=","dXN0eQ==","IHBlbmFs","VVNJQw==","b2RpYQ==","Lm5ldGJlYW5z","IFVn","IEJyZW50","LWxvZw==","L2NhdGVnb3J5","IEN1c3RvbWl6ZQ==","aXJlbg==","77yaPC8=","aW5hcnM=","ICgrKw==","R29pbmc=","RVhFQw==","KG1lc2g=","IHBlcmltZXRlcg==","Q2xz","Y2VpdmluZw==","bWVuc2FqZQ==","KCkpKXsK","IHByb3N0YXRl","X2J1eQ==","IFJvb2Y=","LlJldHVybg==","IG1hcnJpYWdlcw==","X3RodW1i","574=","4K+N","VGV4dHVyZXM=","KFRFWFQ=","c2hvcnRjdXQ=","VHJhbnNmb3JtZXI=","QVRJQw==","IFNub3dkZW4=","c2NyaWJlcnM=","bWFya2Vk","IOKGkQ==","aG9yYQ==","T1BFUg==","IEZZ","IEF1dGhlbnRpYw==","IGF1ZGk=","cmFtZXI=","IExpdGVyYXR1cmU=","IGl0ZW1JZA==","LkF0dA==","KGNudA==","IEtT","LWxpbnV4","IFBhcnRpY2lwYW50","IENydWlzZQ==","aXR1bG8=","dXN0cmlhbA==","IGNsYXNl","ID0k","X2RhdGVz","Y3VycmVudFBhZ2U=","aXhh","ZXhhY3Q=","IHRzbA==","LlNv","L2RvY3VtZW50","aGFydA==","X0lETEU=","e30u","eWV0","SXJvbg==","IFRocm9uZXM=","c25k","XHhh","IGJldmVyYWdlcw==","X3RyYW5zcG9ydA==","IGZvaWw=","IHRhc3Rpbmc=","IGdvZWQ=","TWVtbw==","IG5pdHJvZ2Vu","Lk1lbWJlcg==","LmZsYXQ=","IGlsbHVt","bWluZW50","Lnpvb20=","IFB0cg==","b2Npbw==","IENvbnN1bHRpbmc=","IENvbmU=","CWl0ZW1z","IExN","IG9hdXRo","IFByb2dyYW1tZQ==","b2Nob25k","KHNlbGVjdG9y","IHdhdGVycHJvb2Y=","IE1lcmtlbA==","IHN1ZmZlcnM=","IG5wbQ==","6LGh","IExhbmRpbmc=","IExBTg==","CQkJCQkJDQo=","L2lz","IHPDqXJpZQ==","IEdVSUxheW91dA==","Z2l2ZQ==","X0NZ","QnJvd3Nl","Lm11bHRpcGx5","PSIkKA==","dXNv","LXBhcmVudA==","Lk1hdGg=","Lm51bWJlck9m","IHRpZW5lbg==","IHJlc2VudA==","IHBpdGNoaW5n","Il0pLAo=","LlV0aWxpdGllcw==","IG11bHRpcGxpY2F0aW9u","OnR5cGU=","IHBwcmludA==","aWFuaQ==","5YiZ","IGxhdW5jaGVy","IHJ1Z2J5","546w","CgkJCQo=","aGlk","QW5nbGVz","IGdvb2RieWU=","IGlucHV0U3RyZWFt","LndhdGNo","R29vZHM=","IFNheXM=","PkY=","IFN0aWNr","IGNlcmM=","IFNsZWU=","CQkgICAgICAgIA==","PEltYWdl","IOiuvg==","LWVkaXRvcg==","cGllY2Vz","IERyYW1h","IC8vLy8vLy8vLy8vLy8vLy8vLw==","IFRhc2tz","QVJD","Z2F0ZXdheQ==","LmdldGN3ZA==","Lk1ldGFkYXRh","IGd1ZXNzaW5n","5Zyw5Z2A","IHNtYXJ0ZXI=","IEdldEVudW1lcmF0b3I=","IGVmdGVy","L29wZXJhdG9ycw==","IEdMZmxvYXQ=","IGbDuHI=","IG9wYXF1ZQ==","5L+d5a2Y","U3ByZWFk","U1lTVEVN","IGludmVyc2lvbg==","IEJhc2tldGJhbGw=","IHNpbXVsYXRpb25z","IGRlbmllcw==","IGF2ZXo=","X2xpc3RlbmVy","IGVuaGFuY2luZw==","IE15dGg=","IExha2Vycw==","X01E","TmRFeA==","REFUQUJBU0U=","IHThuw==","YXJ0aA==","W2xlZnQ=","IGNvbnRlc3Rz","c3RpbGU=","KEtFUk4=","X2Zj","X3Bt","IHByZXNpZGVudHM=","IGhvc3BpdGFsaXR5","IGZhZGVJbg==","Uk9QRVJUWQ==","X21hcHM=","IERlZmluaXRpb25z","IGFzc2Vzc2luZw==","IHVzYXI=","IHF1YW50aXRhdGl2ZQ==","bW96","QmVhdXRpZnVs","Wygo","Ym9ucw==","ZnJlcXVlbmN5","Q29udGFpbg==","IHB1enpsZXM=","IENhc3Rybw==","IHZpbGxh","IGtpbmRseQ==","Rm9udEF3ZXNvbWU=","ZXJuYQ==","ZXBvY2hz","X2RhdGFz","CWlw","LnBhZGRpbmc=","IENvbnRlc3Q=","IGVkaXRpb25z","IGRpc3Byb3BvcnRpb24=","IElDTw==","IGNvbWViYWNr","PXZhbHVl","cmlhZA==","LXNvcnQ=","U3VibWl0dGVk","KG5ldHdvcms=","IENlbA==","IGluc3RhbGxtZW50","bGFzaGVz","Lkxpc3RWaWV3","IFZhdGljYW4=","KE1lZGlhVHlwZQ==","SVZFRA==","cmVhY2hhYmxl","Oklz","IENJVFk=","5Lqs","IEhlbHBmdWw=","IGJhxZ8=","JQ0K","IHBzeWNoaWF0cmlj","IHJlY3ljbGVk","Rk9STUFU","IEdyb3c=","YmluZQ==","R2l0","LnNz","IFdlYXBvbnM=","IFN0eQ==","X2Fycm93","KnNlbGY=","aXJlbWVudA==","IGRlZ2xp","QXBwRGVsZWdhdGU=","X2Jhbm5lcg==","IGNvb3JkaW5hdGVk","IFdlYmNhbQ==","IGNlbGVicmF0aW9ucw==","LmFjdA==","KioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioq","KHNob3c=","IHdlZWtkYXk=","IGNvbmNlcnRz","0L7Qu9C9","Y2xpbg==","IGNyb24=","IE5pbQ==","LnNldFZlcnRpY2Fs","IEVsbGVu","2LPYqg==","IFNBTQ==","RWZm","Z3o=","c3RlYW0=","IGFudGlxdWU=","cGh5c2ljYWw=","IEZvcm1EYXRh","LnNldHRlcg==","IFBPSU5U","Qm9u","IGZsYXZvdXI=","ZXJ2ZW50aW9u","X0VOVElUWQ==","CSAgICAgICAgICAgIA==","IGludHJpbnNpYw==","IOaO","YXBwZW5kVG8=","YXJhbWVs","KV0p","IFJlY29tbWVuZA==","KW0=","T3V0T2ZSYW5nZQ==","IGtuaWdodA==","IHNhdGVsbGl0ZXM=","IFRpdGFucw==","IHdlaWdoZWQ=","IERhbmE=","ZWFzZQ==","IHNpcA==","U0lN","IERldmVsb3BlcnM=","bWFsaW5r","L2NoZWNr","X1BMTA==","bnVuZw==","IGRyeWVy","PUE=","LmR3","X1NRTA==","IHN1YnBsb3Q=","RFJPUA==","IHByb3RvdHlwZXM=","IGhvdXJseQ==","ZGlzcGxheU5hbWU=","IGFzaQ==","IFZpb2xlbmNl","IGFzdHJvbmF1dA==","IGRhdGF0eXBl","IGluZm9ybWF0aW9uYWw=","IGludmVzdGlnYXRpdmU=","ZXRlcm1pbmVk","cmVuYWw=","Oyc+","CWNvbA==","Vkc=","X2Jvb2xlYW4=","cmVjZW50","ICopCgo=","IFJhaW5ib3c=","b21tZW4=","IGx1cg==","IG9wcHJlc3Npb24=","KCIsIik7Cg==","IEZhY2lsaXR5","REVGSU5FRA==","IG5lb24=","IG9mZmVuZGVy","QUZQ","IENsZWFuaW5n","W10pOg==","IHVuZG9jdW1lbnRlZA==","LlJlcG9zaXRvcmllcw==","IEd1aXRhcg==","0LDRgdGB0LjQsg==","U2tpbGxz","IHRlc3RpbW9u","cnlwdG9ncmFwaHk=","IEFtYmVy","IFN0YWxpbg==","IGxvbmU=","IGFwZW5hcw==","IGRpZXNlcw==","IEFyZHVpbm8=","6L2s","PT0t","X0FjdA==","IGNvZGVk","4pag","YW1idXJnZXI=","LWxpbmtz","IGFybW91cg==","LkhpZ2g=","Z2V0Q29udGVudA==","c3RhZw==","IGhlY2s=","IOyXhg==","IE1jQ29ubmVsbA==","IENvbmNlcnQ=","IEFsbG9j","w6RyZQ==","LnJlcGxhY2VBbGw=","IHBhcnRpdGlvbnM=","cm90dA==","IEZsZQ==","X1RSRUU=","cmVhc29uYWJsZQ==","IFJlcG9ydGluZw==","IGJpbGxpb25haXJl","c2NvcmVz","bWlucw==","LWV5ZQ==","TU9SRQ==","YWJvcnQ=","IFNXVA==","IGludmVydGVk","IFRlYWNoZXJz","O24=","IGFzdHJv","0L3QvtCy","0LDQvdC40YY=","cHJvZHVjdG8=","Y291bnRyaWVz","IE93ZW4=","IGNvbnRhbWluYXRpb24=","IHZpYmU=","IEVsbGk=","LnNjcmlwdA==","IE9saXZl","RE1B","dmllcg==","OnNlbWljb2xvbg==","LW1vZHVsZQ==","Z3Jlc3NpdmU=","YWd1","X3BsYXllcnM=","IHJlc3VsdGFkb3M=","c3RhcnRlZA==","c2Nyb2xsVG9w","PT09PT0=","IHdlaWdoaW5n","IFtbWw==","emFobA==","KE5T","IEFzc2VydGlvbg==","bGVhZ3Vl","LnNldFRleHRDb2xvcg==","CU1lc3NhZ2U=","IG1vbXM=","X0FG","Lndo","QUxT","IGF1dHJl","XQoKCgo=","Lm9wYWNpdHk=","IEJ1ZGRoaXN0","IGRlYWY=","IE9yZ2FuaXNhdGlvbg==","KEdsb2JhbA==","ZW5zY2g=","IGhlYWRhY2hl","IEFsaWVu","X2lub2Rl","IFN0YXJr","IOaJ","LWxuZA==","b3JlZg==","X2ZlYXQ=","IHBlZGVzdHJpYW4=","IG5vbWluYWw=","IGJhbGxvb24=","IHNwcml0ZXM=","UHJvdG90eXBlT2Y=","IEFwb3N0","IEZFQVRVUkU=","T0g=","IHJlY2Vzcw==","IERvbm5h","Y29uc3VtZXI=","JEdMT0JBTFM=","IEdJRg==","LWZyYW1l","SW5pY2lv","IHBhc3NhZ2Vz","RGF0ZVN0cmluZw==","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA==","LmJ5dGU=","QnVn","aW5pdGlhbGl6ZXI=","cGt0","b2RpdW0=","IERFUg==","Lm9wcw==","bGVyaQ==","IGdpZnRlZA==","IGRldGFjaA==","dGVycmFpbg==","ZWx0ZXJz","44GP","LmxvYWRlcg==","IE5HTw==","c3RybmNtcA==","S2g=","KGZvbnRTaXpl","cm9ja2V0","IHByZWNlZGVudA==","IEF1cm9yYQ==","IEV4cGVyaW1lbnQ=","aXNwaGVyZQ==","RW5jb2RlZA==","IOKAkwoK","IHB5cmFtaWQ=","IEFubml2ZXJzYXJ5","b2ZpbA==","658=","KHBsdWdpbg==","Q29lZmY=","IGNvb3BlcmF0ZQ==","IHByZWRvbWluYW50bHk=","SVNN","UGhyYXNl","X0RFRklORQ==","RmxpcA==","QU1JTFk=","IE1hcmtldHM=","IFN0cmVhbVJlYWRlcg==","IENvbWJpbmU=","IG1hbnVzY3JpcHQ=","enph","LHRw","V2hhdGV2ZXI=","SVRJQ0FM","aWdoYm91cg==","RGF0YVByb3ZpZGVy","LlRleHR1cmU=","cHJpdmFjeQ==","LlNESw==","IHJlY2hhcmdl","IGNwcA==","IENGRw==","KGhvbGRlcg==","KHB5","bW90","IHNhdm9pcg==","IFJvc2E=","IFBDcw==","IO2Z","Lmhlcm9rdQ==","IGZyZW4=","IFJpbGV5","YWdhdGU=","IHNvbmQ=","Lnhsc3g=","IGhhY2tlZA==","c3RhZA==","R2k=","IHNhbml0eQ==","IFNxbERhdGFBZGFwdGVy","Li4uIiw=","IFB1c3N5","ICoqKioqKioqKioqKioqKio=","IGhhc3NsZQ==","X1BBUkVOVA==","IFVBRQ==","IGJlZ2lubmVycw==","KENsaWVudA==","IHN0YXRpc3RpY2FsbHk=","LmhvdXI=","ZWRlbHRh","IHRyYWN0aW9u","dWVsdmU=","YXJhdA==","IHNhdW5h","SU5WQUxJRA==","IGluZGljdG1lbnQ=","QUxMRQ==","IGRpc3NlbnQ=","IFR5cG9ncmFwaHk=","IGludGVudGlvbmFs","c2l0","IEFuaW1hbHM=","IGNvdW50cnlzaWRl","IHVhcnQ=","fVwi","IHNlYW1sZXNz","vuekug==","IGF1dG9z","ICInIjsK","Rmx1c2g=","QU5OT1Q=","IGFsZ2VicmE=","YXNzb2M=","IFdhdGVycw==","IHByZXBhcmF0aW9ucw==","cm9ueW0=","Wyxd","U2Fucw==","IGFybWllcw==","aXBlZw==","IGNyZWFteQ==","LmFydA==","ZXRyZQ==","IEFuaW1hdGVk","IHVucGxlYXNhbnQ=","ZW1lYW4=","Z3JlYXQ=","acSF","IEVhcmxpZXI=","IGNoaWM=","IHByZXNlcnZpbmc=","KGV4ZWM=","IEludmVzdGlnYXRpb24=","CUdQSU8=","IHJpZ29yb3Vz","aWpv","PW51bQ==","IHRvb2xTdHJpcA==","KXNldA==","KyIm","IEFjY2VsZXI=","IGRldmVsb3BtZW50YWw=","aXNwb3NhYmxl","IGZsYXdlZA==","cmVuZQ==","VXBkYXRpbmc=","IHdhdGNoZG9n","IGRlbm9taW5hdG9y","IHN1YnVyYnM=","IC4uLik=","IGNvbnZpY3Rpb25z","Y2xvc3VyZQ==","LklQ","IHRyYW5zbGF0ZXM=","LnN3dA==","LlRyYWNl","IG1ldHRyZQ==","LmlzRW5hYmxlZA==","IEVmZmVjdGl2ZQ==","LnRvSW50","IGVuY2hhbnQ=","IHN0dW5uZWQ=","IHBvaQ==","L2NvZGU=","YWRt","LmRhdGFiaW5kaW5n","IExvcmVt","X19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX19fXw==","IGxlZGdlcg==","IGNhcmE=","IEdpcg==","IHdhaXRz","VW5v","IGN3ZA==","6L6R","IFRSZXN1bHQ=","IHJlam8=","IGVtaXR0ZWQ=","IFdlc3RtaW5zdGVy","5LiA5Liq","bmVr","X1Rpcw==","IGVuYWN0","CXdpdGg=","b3JnaWE=","IGp1ZQ==","UGVyZm9ybQ==","U1BBVEg=","LnRvcGlj","IERhdGVu","4bqn","IHNpdGlv","X01N","IlNv","YmlhbA==","IHNjb3BlZA==","UmVxdWlyZXM=","IFRPVEFM","IENoYW5jZWxsb3I=","KGNvbnRlbnRz","IHN0ZWFsdGg=","ZGV2aWNlcw==","LXBhc3M=","aWxpaA==","IE1hbGNvbG0=","IERlcG90","IGNvbmZpZ3Vy","YXVzc2lhbg==","X2NvbnN0cmFpbnQ=","0LLQtdGC","R1JB","IFJhdGVz","LmRhdGFHcmlkVmlld1RleHRCb3hDb2x1bW4=","IE5vYmVs","aXRpY3M=","IGlnbm9yYW50","IFJlcG9ydGVy","IEVib2xh","IFNob2Nr","X3JlbGF0aW9u","IE5pbmph","KWM=","IHRpY2tlcg==","LmlzQ2hlY2tlZA==","IFN1cHBsaWVycw==","IFJhcGlk","TGV2ZWxz","4oKs4oSi","CXF1ZXVl","IGNob3A=","IFVuaXg=","cmVqZWN0","LWNhbGVuZGFy","KHNvcnQ=","w6huZQ==","ZXJjaWNpbw==","IGhlY3Q=","Q0FMTFRZUEU=","cm91cG9u","IHJlbnRhbHM=","YXV0aG9ycw==","e25hbWU=","IEZJRk8=","IGxhc3Nlbg==","IE5vdXM=","IHNuYXBwZWQ=","IGZlcnRpbGl0eQ==","ImxvZw==","Y2xpY2tlZA==","IHBsYW50aW5n","IGdi","L291dHB1dA==","UEVBVA==","IGNhdGVnb3JpYQ==","IGJhY2g=","UHJvZmVzc29y","aW50aA==","Il0NCg==","UmVjb3JkZXI=","c2VyZGU=","IFRyYW5zbWlzc2lvbg==","dHJhZA==","IHR1cmJv","X1ZFUlRFWA==","XEV2ZW50","aWx2ZXI=","IGJvZGlseQ==","IFNvdXJjZXM=","IGtpbGxpbmdz","LnhyVGFibGVDZWxs","IGZvbGRlZA==","L2xlZ2Fs","dW5lcg==","IFJpZmxl","IE1JREk=","X1NlbGVjdGVkSW5kZXhDaGFuZ2Vk","LlNpemVUeXBl","IFdlYlNvY2tldA==","IHNlbGVjY2lvbg==","U2FuZA==","b3Ryb3M=","IGVudmlzaW9u","L2V0Yw==","IE1lbGlzc2E=","U3BvdA==","0L3QvtC1","X0FSTQ==","QXR0ZW1wdA==","IEJJ","44GU","IERV","IGJhY2tsYXNo","c3RyaWRl","L2NsYXNzZXM=","IHRleHRDb2xvcg==","X3N0YWZm","b2JsaW4=","YWdlbnRh","LmNvbGxlY3Rpb25z","aWxsYWdl","Jw0KDQo=","ZmxhdHRlbg==","X3NhbGVz","X01BU1RFUg==","VFc=","X2Rh","UGl0Y2g=","cGhpZXM=","IHpvbWJpZXM=","IFZFUlk=","IFBoYXJtYWN5","IHByb2dyZXNzQmFy","IGhhc2h0YWc=","U2lkZWJhcg==","QHN0b3A=","KHBj","0L7Qu9C2","TUFLRQ==","IENvcm9u","IGt2aW5uZXI=","IE1haWQ=","Ym9i","LnRpdGxlTGFiZWw=","IHN1Y2Nlc3Nlcw==","IERlbW9jcmFjeQ==","IFN1cmdlcnk=","IGNvdWdhcg==","IGN1cnNv","IGxvcm8=","aXN0ZW5jeQ==","U2VuaW9y","w6Zr","IEFBQQ==","IEJPT0s=","0LrQvg==","V1NUUg==","ICovLAo=","b3lhbA==","LnZlY3Rvcg==","IFNQRUM=","U1NG","IGNvbXB1bHM=","IEFwcGVhbHM=","IFdpbnN0b24=","IE1vY2tpdG8=","Y29udHJpYg==","LmF2YWlsYWJsZQ==","ZW50aXR5TWFuYWdlcg==","YXJpYXM=","X3NhbGU=","X3Jz","IGRlY29kaW5n","IGxvY2F0b3I=","b2xpdGg=","IGtvbA==","IGFzY2lp","IFJ1dA==","L2ludGVyZmFjZQ==","CQkJCQkJICAg","IE51bWVy","LmZsaXA=","LWRlbA==","IGJvbHN0ZXI=","b25vbWlj","IHpt","TEc=","RmluZEJ5","IGFkYXB0aXZl","bG9v","IHZ1ZQ==","KHJldmVyc2U=","X2NhbnZhcw==","LnJvbGVz","aWZpY2Fkbw==","dmVuaWVudA==","IkFz","IEVudHI=","YWxpZ25lZA==","IGJlcmVpdHM=","Ly8vCgo=","Lmd3dA==","LmVtcGxveWVl","X2NsaQ==","IGFudGljaXBhdGU=","6ZmQ","IHBpaw==","IG11c2hyb29tcw==","KHR0","IG9tYQ==","IFNhbmNoZXo=","X2dvb2dsZQ==","LlZhbGlk","IEZpbGVOYW1l","aXZhdGl2ZQ==","a2Vk","LXdhcg==","IG1hdHVyaXR5","0LjQtA==","IG1pbmVy","UmVkdWNlcnM=","IExhdExuZw==","X1NURA==","RGlnaXRz","Q2FsYw==","LXVwbG9hZA==","IGhhbmRpYw==","4Li14LmI","ZWdyYXRlZA==","IFNUTQ==","Q2xpZW50cw==","IFR1cmJv","U1lOQw==","IHBob3RvZ3JhcGhlcnM=","Lk91dA==","LmNoYXJhY3Rlcg==","QlVJTEQ=","LnVubG9jaw==","IGFyaXNlcw==","IENvbW1hbmRz","KCIiKTsNCg==","X0ZPUkU=","Oycs","KyIn","LkltYWdlcw==","Iil7","IE1leWVy","IG5lZ2F0aXZlbHk=","IERMTA==","IGV4ZQ==","IGRlZmljaWVuY3k=","IHdpbGRseQ==","LXN3aXRjaA==","Y29uc3RydWN0aW9u","IGV4Y2VwdGlvbmFsbHk=","IExpeg==","L2phdmE=","IHRoZWlycw==","IENvbnRlbXBvcmFyeQ==","bGlz","LmZpbGxSZWN0","IE5GQw==","IHJlaGU=","KG51bWJlcnM=","IHJhc3Rlcg==","IGZpZ3VyaW5n","IHNob3dj","IEppbGw=","IGFyY2FkZQ==","IENvbnN0cnVjdHM=","bWRs","KCd8","IGlkZW50aWZpZXJz","IHN0ZWxsYXI=","KENvbm5lY3Rpb24=","ICJ7ew==","eW9y","KG15c3FsaQ==","IGRvdmU=","T2ZCaXJ0aA==","LmRpc2Nvbm5lY3Q=","X2hp","IHp3aXNjaGVu","IEdydW5k","aXJvcw==","X0FycmF5","Lm9uY2xpY2s=","YW5zb20=","QW5zd2Vycw==","CXJlbW92ZQ==","RmE=","IGh1cnJ5","LWluZg==","IGdldENsYXNz","IFJlZ3VsYXRpb24=","IEZMQUdT","bWlzYw==","S2Vu","X2hlYWRpbmc=","R0h6","LWVudHJ5","IGJpb2dyYXBoeQ==","U2ln","LW1m","V2F0Y2hlcg==","4oCcQQ==","fXB4","IHNwaWN5","X3Nx","TG9zdA==","KHRyYWNr","0LDQu9C4","RGVzY2VuZGluZw==","PGJpdHM=","cXVpbmU=","IEFkdm9j","X1NO","IEhhbm5haA==","UE9Q","IGVtaXR0ZXI=","IGN5bg==","IENBRA==","Pyku","L3NldA==","IFNpc3Rlcg==","IEVuZHBvaW50","IG1lbm9y","IGludGVycA==","cms=","aWRsZQ==","IG91dGZpdHM=","LnZlcnRleA==","IGNsaWM=","QVJFTg==","IHBvc3R1cmU=","IE9wcG9ydHVuaXR5","dng=","IEZvcmJlcw==","LkRpcmVjdGlvbg==","IHJlc2lkZQ==","IHJlbWVtYmVyaW5n","bmVzdHk=","QXV0b3Jlc2l6aW5n","cHJvdmlkZXJz","IEFI","IGh1cnRpbmc=","IExpbHk=","ZXZhbHVhdGU=","bGlqaw==","cGFwZXJz","IFNtYXNo","IExBU1Q=","IHdlbGxz","d2FzaGVy","X1JPTEU=","IERhbmdlcg==","Kigo","X3JlcG9zaXRvcnk=","IFJlc29sdmU=","IFJvb21z","X1JH","IFFU","b29w","IEhlYXA=","IHNsb3dpbmc=","IGdyYXR1aXRl","X2NhdGFsb2c=","IHBvbHlub21pYWw=","THk=","cGNz","Rm94","IEN5cg==","IGRpbWlu","L21vbnRo","U2FsdA==","IGhpbmQ=","LlBFUg==","Rm9ydW0=","Y2Vu","X3BvbA==","7Zi4","IGluc2Vy","KH4=","QHRlc3Q=","IEdvbGRtYW4=","IHVwbG9hZGluZw==","RmM=","IGtvbW1lcg==","IG1pdHQ=","X2xvZ2dlZA==","IGJ1Y2tz","LWxheWVy","KX07Cg==","IE9N","IHZlZw==","Y29sb3Vy","INC+0LHRig==","U3RkU3RyaW5n","X3F1ZQ==","IFRpYW4=","IHNwZWNpYWxpemU=","0LjQvw==","INC60Ls=","dHJpYWw=","LWVkZ2U=","IG1hcnM=","T0dMRQ==","IGVtcGF0aHk=","IEJvbQ==","IGNvbGxpc2lvbnM=","IGNhcnRl","IFRlaWw=","IE1QTA==","IHBvcm7DtA==","IGFpcmxpbmVz","QXdz","TnM=","IFNwYXdu","KHVzZQ==","6buY6K6k","IHlhY2M=","c3Rvcg==","IGNvbmZlc3M=","IHBlcXVl","cmFnZQ==","PyIK","L2RhdGF0YWJsZXM=","IFNob3dlcg==","X18v","IGNyeXN0YWxz","IGJ1c2Nhcg==","IEhhdXM=","aXphw6fDo28=","X2VudGl0aWVz","lYw=","mow=","eGNj","dmlydA==","LWNoZXZyb24=","KFJlc3VsdA==","Y2FrZQ==","Q09NRQ==","IHByb2hpYml0","IENoZXNz","IGJlYXVjb3Vw","INGH0YLQvg==","UlVO","IElL","w7PFgg==","X1VwZGF0ZQ==","IHNsZWVr","IFNwZWNpZnk=","X2NyZWRlbnRpYWxz","xZ90","IFVzZXJOYW1l","CVZhbHVl","IGFycmF5TGlzdA==","IGV4Y2hhbmdlZA==","aXBzaXM=","LnJlbGF0ZWQ=","IFNlaXRl","X0JBUg==","IExlbQ==","IFdBVENI","IENsaWVudHM=","IC4q","IEVhcmw=","LXJlcG9ydA==","IGZvcmVpZ25lcnM=","IHN0cmVuZ3RoZW5pbmc=","CURlc2NyaXB0aW9u","KGdv","LnRvb2xiYXI=","IGNhbGN1bGF0ZXM=","CXNvdXJjZQ==","IGN6YXM=","IHJlY2w=","YWJv","IGxvY2FsaG9zdA==","IF57Cg==","LlBvcA==","IERlc2lnbmVk","XEFic3RyYWN0","SG9sZA==","IEd1aWRlbGluZXM=","aXBsaW5l","IGNhY2hpbmc=","LlJlYWRlcg==","X2V4dGVybmFs","LnN0cnB0aW1l","IFdlZWtlbmQ=","LU1hcg==","IEJlaQ==","IHsqfQ==","IFJ1ZA==","IGV4cGxvcg==","IEJvdWxldmFyZA==","Q2FzaA==","IHByZXBhcmVz","IHNlcmlhbGl6YXRpb24=","ZXdhdGVy","IGFkYw==","OgoKCgoKCg==","UmVmZXI=","IHNjYW5uZWQ=","fX0KCg==","IEZ1bA==","IHRvdXJpbmc=","44OD44Kv","Pigo","c3VydmV5","IO2Y","Li4uJykK","IERpdmlkZXI=","b3Ns","X0NBTkNFTA==","X3ByZXBhcmU=","c3Rpbg==","IEhlYXRo","LlByaW1hcnlLZXk=","IOKGkA==","IExvY2FsRGF0ZVRpbWU=","IGNvb3BlcmF0aXZl","TGVhcm5pbmc=","LmVucXVldWU=","IGdvb2c=","IFJlZ3Jlc3Npb24=","aW1hdGVz","IHZveWV1cg==","IERyaW5r","cGx1Zw==","IGxlbmRlcg==","bWFuYQ==","IHBlcnNvbm5lcw==","eXBzZQ==","IHVubGluaw==","IFJhdmVucw==","IGh1cmQ=","IHBlcmlvZGljYWxseQ==","QVJHUw==","IEdI","Y2hhcmFjdGVycw==","Li4uIgoK","LWVzdGFibGlzaA==","IGRu","KGNvbmRpdGlvbg==","IEdyYXZpdHk=","IGVzdGFz","X2ZvY3Vz","Q3JlYXR1cmU=","KHNpdGU=","IGNhcnI=","IFJM","IFJJ","IE1vdG8=","QVNG","IEx1Y2tpbHk=","CVJvdXRl","IGVudHJvcHk=","KCIsIg==","Q29sbGVjdA==","KGNvbnRhY3Q=","IEZsb3JlbmNl","IHByZW1pdW1z","IGxpZmVjeWNsZQ==","IGJhbnM=","eGVm","V2ViS2l0","IEZsb2F0aW5n","IGNvc2E=","U3BlY2lmaWM=","IExvYW5z","YnJlYWQ=","IGRlc2NyaXB0b3Jz","IHs6Lg==","VEhSRUFE","IFRyZW50","IHNjb3A=","UUE=","IEFudGFy","cGVs","X2RpZmZlcmVuY2U=","X2NoYW5nZXM=","KC4uLik=","IFJvdGF0aW9u","IExHUEw=","IEpVU1Q=","KFRhc2s=","X3N1YnNldA==","IFRSQU5T","5Yqb","IFNjb3V0","LXBvcHVw","IHNtb2tlZA==","X0NsYXNz","IHR1cm5vdmVy","YnJha2s=","IFJvY2t5","dGFz","LlJlZ3VsYXJFeHByZXNzaW9ucw==","IEVsbGlvdHQ=","IFNwaW5uZXI=","RFVDVElPTg==","IGxpYnJl","IG1vbHRv","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg","IEZUUA==","bXBlZw==","KGZlYXR1cmVz","IGJhbGQ=","IFZpZA==","IHNob3V0aW5n","TGludA==","IHNvY2tldHM=","IHByb3c=","IG5vdXZlbGxl","aXNjYXJk","IFNwb25zb3I=","IGNvbnN1bHRh","KSkpOw==","SW5kaWFu","IFJhc3BiZXJyeQ==","IHRlYW1tYXRl","IEpXVA==","IEdoYW5h","IGNha2Vz","cHJpbWVy","Zm9ybWE=","ZXJnYXJ0ZW4=","X01hbmFnZXI=","IHByZXNlYXNvbg==","R0FNRQ==","fCI=","IEJyb2Nr","IG9jY3VweQ==","IGRlY29yYXRpb25z","w6FuZA==","IGNvdA==","IHBhcmFu","RGlzaw==","cmVtYWlu","Pj8=","U3Ryb25n","IGZyYW5jZQ==","IEVyYQ==","LWNy","LkJ1ZmZlcmVkUmVhZGVy","IFBhcmFkaXNl","IFZBVA==","IEFuZGVycw==","IGxpbWI=","YW1wb28=","IGltcGVyYXRpdmU=","VVRJTElUWQ==","IFJlY29nbml0aW9u","IHJhZ2F6emU=","IHBvcHM=","eXByZXNz","IGVtYmFyZ28=","Ly97Cg==","IHN5bGw=","UFRS","5a2Y5Zyo","IGRpZG50","TWFpbGVy","IGFjYWRlbWljcw==","IEZyYXVlbg==","bmVpZGVy","LXJlbA==","IHJhaW5ib3c=","KElu","IHNsaWNlZA==","PT09PT09PT09PT09PQo=","KHNlbmQ=","TlNNdXRhYmxlRGljdGlvbmFyeQ==","dm9z","KHBhY2thZ2U=","IG9yZGluYW5jZQ==","dmlld2Vy","IFNhbnRvcw==","LXNlbGxpbmc=","IGdvdg==","ZXR0bGU=","IGZvdW5kZXJz","IHdha2luZw==","c2xhc2hlcw==","LXBvdW5k","cmVjaHQ=","2KfYqg==","Lm9uQ2xpY2s=","IG5vcmQ=","c3TDpG5k","X3doZW4=","VVRFUlM=","aWNj","IGNhcHN1bGU=","IFdpZA==","TWFyYw==","4Li4","cm9yZWQ=","VUdF","TE9VRA==","IEF1ZGl0","aXBpZW50cw==","b3BpYW4=","IFN1ZQ==","IHd1cmRlbg==","LkhlbHBlcnM=","IGZhY3Rpb25z","W25w","LXRoYW4=","IHJlY28=","IGthcw==","IGNtZHM=","L25ldHdvcms=","eGJm","Z2V0Q29sb3I=","IGJpYXNlZA==","IExhaw==","RGF0YXM=","dmVudHM=","IOuy","X1BT","LlZhbGlkYXRl","SW52b2tlcg==","IG5ldWVu","IGp1dmVuaWxl","VklTSU9O","IGRldm90ZQ==","IGxpbmhh","IGRpc2NvdW50ZWQ=","XENvbmZpZw==","IHdvcnRod2hpbGU=","IHNraW5ueQ==","IENvdXJzZXM=","bGV5cw==","IE1vcnRnYWdl","S2V2aW4=","IGFubm91bmNlcw==","XSkq","cmVzZXJ2YXRpb24=","IOaVsA==","IHByZWp1ZGljZQ==","IFN0cmluZ0NvbXBhcmlzb24=","IGJlYXJk","LXdpbg==","IFPDo28=","CW1z","amFs","IEVhcm4=","X3BvcnRz","IE5vbWJyZQ==","X0NPUg==","IEJVSUxE","LnNvdW5k","WWVsbG93","IGxpbmViYWNrZXI=","IGNoYXJpdGFibGU=","anVn","X05PTk5VTEw=","IERlbnRhbA==","Ij4kew==","CW1hdGNo","UnVzc2lhbg==","IHZlcnNjaA==","IHBpbm5lZA==","IGFkb3B0aW5n","T3B0aW9uc01lbnU=","UGFn","IHBhaXJpbmc=","IHRyZWFk","ZXJjaXNlcw==","IFNwcmVhZA==","KWk=","IEJBRA==","X3Rm","VUlJbWFnZVZpZXc=","cG9wdWxhdGU=","YmFi","IM+D","Wysr","IG9waW9pZA==","ICMjCg==","ZHR5cGU=","IFN0YXJ0cw==","KCcvJyk=","IHBlcnNvbmFscw==","LW1hcmtldA==","IHJlZHVuZGFudA==","IEVzc2VudGlhbA==","IHNjcmFweQ==","INC40Lw=","YWNs","IGNyZWFy","IEJlbmQ=","IHJlbGlldmU=","LXJvb20=","d2lmZQ==","IHbDoA==","IFFQb2ludA==","IHF1YXNp","IG1ldGhvZE5hbWU=","XHhj","IFBlcnU=","L1RoZQ==","Lm9ybQ==","IHZpeg==","L3BkZg==","TG9jYXRlZA==","IGNvbmZyb250YXRpb24=","IENoYW1waW9uc2hpcHM=","IGh5cGVydA==","IGRq","IFVzZXJJbmZv","IOWIm+W7ug==","XHhi","KHNpbQ==","ID09Cg==","IHN0YWdpbmc=","IGRyYXN0aWNhbGx5","5a2m","bG9yZHM=","Lmxlc3M=","0LLQtdC00LjRgtC1","IEJ1Y2tldA==","IE1hbQ==","LnRlcm0=","X3Bp","Y3p5","LnB1Yg==","cHJlY2lv","IFZpcnQ=","IHJvbWFu","aXRhdA==","TGV4","X2luZm9z","xLA=","Lm90aGVy","VkVMTw==","IHBvbmRlcg==","IGhhbm5v","KFBhZ2U=","ZG9p","IHBvbGl0ZQ==","IHByb2dyYW1tZXI=","RGllcw==","JGQ=","IHJlcGxpY2F0aW9u","YWRkQ29sdW1u","ZnJpY2Fu","IGxlbmc=","YmVlcg==","b2l0","IHdhc3Rpbmc=","eWxpbQ==","bWVhc3VyZQ==","TmVn","IHBhcnRpZQ==","LmNvbnNvbGU=","IEd1aW5lYQ==","VEVM","X2ZhY3Q=","LmNodW5r","IGxlbnQ=","IGFsbGVy","IOCklQ==","X2lkbGU=","IGFkbWlzc2lvbnM=","SlNPTkFycmF5","IHZpYnJhdGlvbg==","LmhlbHBlcnM=","5aSW","IGhlbg==","am9obg==","IOyDnQ==","IGp1ZGdlbWVudA==","IGdlZW4=","dGVycmE=","Xns=","IEl6","IGPDog==","aW5zdGFuY2Vz","IHRocmVhdGVucw==","IG3DvHNzZW4=","S2luZE9mQ2xhc3M=","IHN0b3J5dGVsbGluZw==","X2RlbW8=","cmlhcw==","UHJpdmFjeQ==","aGlmdA==","IFlp","ZXNvcg==","7ZWg","ZW5zaXRpdml0eQ==","LldyaXRlcg==","4LiC","RGlzdHJpY3Q=","LmdldEpTT05PYmplY3Q=","SW1wcm8=","KGdldFJlc291cmNlcw==","IFNQRUxM","cm9kdWNl","IHNsb3dlZA==","IGxpbmV3aWR0aA==","IGhvbmVzdHk=","IENvb3Jk","IEZvcms=","IERpc3BhdGNoUXVldWU=","IENsaWZm","IFdpcmluZw==","X1RJTUVTVEFNUA==","b2xsYWg=","YXZvaWQ=","KytdOwo=","c2VtYW50aWM=","LWNzcw==","IHZldG8=","IE1lcnI=","IGxlZ2lzbGF0b3Jz","Q0VFREVE","IHF1ZXN0aW9ubmFpcmU=","IFBpbGxz","Q2FsY3VsYXRl","KGNvcmU=","J2U=","IGRpc2xpa2U=","IFByZWZlcmVuY2Vz","X0VYVEVSTkFM","6LCD","IGRvZGdl","5pyN5Yqh","Lm5hbWVz","LmRyYXdJbWFnZQ==","X3Byb20=","dWNrbGFuZA==","IDwkPg==","xLF6","L3NpdGU=","6aG5","cm9waGU=","IGNvbXBlbGxlZA==","IGxhcHRvcHM=","IHVuaQ==","Q0xPU0U=","IGNhc3VhbHRpZXM=","IFVuaWZvcm0=","VGVybWluYWw=","LiIsIg==","REFU","KFRyZWVOb2Rl","IEdhbmRoaQ==","KHN0bXQ=","QVhC","Kk0=","IHVtYnJlbGxh","YW5pbWFs","IGdycGM=","IHdoZXJlYnk=","IGZsb2F0cw==","CWFyZw==","IGRiZw==","IGV4Y2VlZGluZw==","RXZlbnRUeXBl","LlNhdmVDaGFuZ2VzQXN5bmM=","IHt7ew==","IG93ZWQ=","YWhyZW5oZWl0","IOyn","IGVxdWlwbw==","dXJhaQ==","IGlkb2w=","XSIpCg==","X21ham9y","IGVudGlyZXR5","aW5nZXJwcmludA==","w6dvcw==","L2FjY291bnQ=","CXJpZ2h0","dXJzb3M=","IEVEVA==","X0lOU0VSVA==","IHNoaW5pbmc=","IDw6","RWRnZUluc2V0cw==","IGNvbG9uaWVz","LklN","CSAJ","Uk9BRA==","Q0NDQw==","cGxhY2luZw==","IGdldEFjdGl2aXR5","ZW1hY3M=","JyUo","LmNsaWNrZWQ=","IFRoZW0=","aXNpYQ==","QnVzY2Fy","LnJlbmFtZQ==","IG9hdGg=","IGFmdGVyd2FyZA==","IFVGTw==","QVBT","IEphY2tzb252aWxsZQ==","LnNvbWU=","Q29uZmlybWVk","LnNjYW4=","aWdJbnRlZ2Vy","RGVjb3JhdG9y","c2hpZWxk","cmVzc2l2ZQ==","LmRpZA==","6K+36L6T5YWl","IHNodXR0ZXI=","RGFt","IHBhcmVudGluZw==","ZXllZA==","JGl0ZW0=","LWRldmVsb3A=","IGV4dHJhY3Rz","IGRlY2VudHJhbGl6ZWQ=","IEVsc2E=","X3NwaW4=","XSkr","LWluaXRpYWw=","IG11bHRpdHVkZQ==","IHNlbnNvcnk=","IE1PREVM","IHNhZmVndWFyZA==","7Lk=","IGh1bnRlcnM=","IFRpbnk=","SU5P","ZGVjb3JhdGU=","IE5vU3VjaA==","SG8=","KFJlc3BvbnNl","IHJ1bGVy","CXNob3J0","IGNhc3Rlcg==","IGNsaWVudElk","IHBkYg==","64+E","aXRpYw==","IEdhbWVTdGF0ZQ==","IG5ld0l0ZW0=","KQoKCgoKCg==","b3Vpcw==","bm9j","LkJMQUNL","X1ZFQ1RPUg==","LS0tLS0tLS0tLTwv","IGV4YW1pbmVz","CWJsb2Nr","IGFkZG9u","IHN1cnZleWVk","IExpc3RlbmVy","IGZyb250aWVy","IGxhY2tlZA==","SlVTVA==","INGN0YI=","IHRpbnQ=","IE15c3Rlcnk=","ZGF0ZVRpbWU=","IFR1dG9yaWFs","IGZ1bGxOYW1l","IERyYWdvbnM=","X0ZJTEVT","IFByaW50V3JpdGVy","IGJlZXQ=","IExhZGllcw==","X3RpcA==","IEphaHJl","b3JhbWE=","IGluc3VsYXRpb24=","KEVudmlyb25tZW50","X2FzdA==","YmVyZ2Vy","bGVuYQ==","b2dlbmVvdXM=","X01PTlRI","LXByZXNlbnQ=","IGZyYW1ld29ya3M=","UVE=","UEhQRXhjZWw=","IGNvdW50ZG93bg==","IEZX","KGNsdXN0ZXI=","OmM=","IG9raHR0cA==","b2JzZXJ2ZQ==","W3BsYXllcg==","Lmhl","IFBhbmFtYQ==","QXVzdHJhbGlh","IG91bmNlcw==","IGFnZ3Jlc3NpdmVseQ==","IHdhcm5z","IGN1c3RvbWl6YXRpb24=","X1F1ZXJ5","d2lz","IGludmFs","QUZG","KGNhbWVyYQ==","V2ly","IG5lZ290aWF0aW9u","CU8=","IHJlc3BlY3RmdWw=","IGRpYW1vbmRz","J2F2","YXBwcm94","L2Ry","IGdyYWJz","IGFjY29tcGFuaWVz","Y29uc3RyYWludA==","IHJleg==","KHJlZ2lvbg==","IGJhaXQ=","dGVybWluYXRl","IEJlbGdpYW4=","YXNzaXVt","IF0NCg==","U3lzdGVtcw==","b3VzZWRvd24=","LmJ1cw==","U2V0VmFsdWU=","IFByZXA=","IGNvbnZlbmllbnRseQ==","Lm1pZA==","Y2FzZWNtcA==","TnVtZXJv","ZGFpbHk=","IENvZGluZw==","KGRlc3RpbmF0aW9u","IyQ=","dWrEhQ==","IGVtZXJnZW5jZQ==","X3BhcmE=","X0lOQ0xVREU=","Izo=","IHJlY29nbml6aW5n","IGZ1Zw==","In19LAo=","IGJ1aWxkZXJz","IFRlcnJpdG9yeQ==","IGluaGVyZW50bHk=","IGRlcml2aW5n","LmV0aA==","IERpbm5lcg==","LnNldE9iamVjdE5hbWU=","IGNlbGVicmF0ZXM=","IHF1ZXVlcw==","IE1hcmtz","QUxURVI=","IERhcnQ=","cG9rZQ==","X0NIQU5HRUQ=","IHBhYXI=","bGllcw==","LnZvbGxleQ==","IE1lYW5pbmc=","IE9GRlNFVA==","ZW5zaW5n","IGZyw6Vu","LmxvY2FsU3RvcmFnZQ==","IOup","KHt9KTsK","ZGVjb2Rlcg==","IHJvdWxldHRl","IGRpc21hbnQ=","SXI=","IGluc3VyZw==","ICcnOgo=","LuKAnQo=","IGJydW5ldHRl","LmFzc2V0cw==","X05FVFdPUks=","4LiK","bnlt","X1NvdXJjZQ==","XFRlc3Rz","RXNjYXBl","Y3J5cHQ=","LlhNTA==","IHNvdW5kaW5n","b3Bjb2Rl","IGNsYXNzaWZ5","IGVtYmFycmFzc2Vk","IExPR0lO","IHJlc2lkdWU=","IE5FRUQ=","LmRlZXBFcXVhbA==","cGVyYw==","LWNhbA==","UmVkaXM=","VHJh","KF8p","YXNrZXRz","Z3JhZGF0aW9u","IGVuenltZQ==","IFN0ZXBoYW5pZQ==","LkludmFsaWQ=","J10/Pjwv","IGRpc3BsYWNlZA==","IGVsZW1lbnRvcw==","KGR1cmF0aW9u","cm93Q291bnQ=","IEZTdGFy","bGV0YQ==","L3BvcHBlcg==","IHN0YXRv","IHBlcmZvcm1lcg==","IGRpc2NpcGxpbmVz","IEZ1bGx5","aWN1bGFybHk=","IGVyc3Rlbg==","IFBvbHlnb24=","IGRpc2NpcGxlcw==","LmlzZGly","IHRlc3RpZnk=","X1NS","cHJpc2luZ2x5","IEdMaW50","IHdpcGVk","IGNhcnZlZA==","IERpc2g=","Lmhlcm9rdWFwcA==","c3RpdGlhbA==","IE1BVENI","Y2xhaXI=","IERheXRvbg==","LycpCg==","SURETEU=","IGluZnJh","IGxpdmVseQ==","IGRlcHM=","IFsuLi5d","CQkJCQkJCQkJCQkJCQkJCQk=","IExvbg==","RXh0cmFz","VHJhbnNpZW50","0LLQtdGA","L21vZHVsZQ==","IGVuZHVyYW5jZQ==","X3RleA==","ICJ+Lw==","X3lsYWJlbA==","IG9iZWQ=","L2dhbWU=","b3BzeQ==","IGZpcnN0bmFtZQ==","LmZvcmNl","IG1hcnQ=","XENsaWVudA==","IGxlZ2l0aW0=","LmZsYXR0ZW4=","Iics","b3NleHVhbA==","IGpvdXJz","TUg=","ZXhwaXJlcw==","IHN0eWw=","LmludGVydmFs","S25vd24=","IGZvbGxvd2Vy","IGRhbGxh","cGlyeQ==","X3NzbA==","aXNobGlzdA==","IFJleQ==","IHN1cGVybWFya2V0","T2J2aW91c2x5","LWVudGVy","IHByb2JhYmlsaXRpZXM=","IEhW","IENpbmVtYQ==","IGN0eXBlcw==","IEJDTQ==","X1RBQw==","O2E=","LmJ1dHRvbnM=","IHJldHJpZXZpbmc=","aWxhcml0eQ==","IHVuZGVydGFraW5n","CXN0YWNr","IGtlbA==","IFhlbg==","KHBoaQ==","IHRvdWdoZXI=","IFNlbGxlcg==","Y2Fwcw==","IEVtYmVy","IENoaW4=","IGxhdWdocw==","Q29udmVyc2lvbg==","Lmxpc3RlbmVy","JkI=","IHBhcmFkaWdt","IGp1bmN0aW9u","JC8sCg==","W28=","IENvbnNlcnZhdGl2ZXM=","z4A=","bGF0ZXM=","X0V4Y2VwdGlvbg==","IG1laWxsZXVy","IHN0cmFwcw==","cXVpc2l0ZXM=","CXNu","IG1hc3NhY3Jl","b3R0ZXM=","X2dyZWVu","VGl0bGVz","Ly8tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQ==","IFJlZ3VsYXRpb25z","YXJs","X3Nob3J0Y29kZQ==","IERyYXdlcg==","IHBhcm9sZQ==","IHdpbGRlcm5lc3M=","aXNzb24=","IEFGVEVS","Q3JlZGVudGlhbA==","QmxvY2tpbmc=","IEhUQw==","U2lu","KGF1dGhvcg==","IGNvcnRleA==","Jyl7DQo=","77yJ77yM","IGR1bXBlZA==","IFNodXQ=","IEtleUV2ZW50","CVBsYXllcg==","LmdldFBsYXllcg==","IGlnbm9yZXM=","dG9nZ2xlQ2xhc3M=","IEV4Y2x1c2l2ZQ==","PigpOw==","LmdldFA=","YW55ZQ==","IG5ldXJvbg==","aWZvbGQ=","IEtub3du","Qml0Y29pbg==","QW55d2F5","YXlldHRl","ICdbJw==","w6BuaA==","bWdy","IGNvcnJlbGF0ZWQ=","IG5hdXNl","IG1lbnRhbGl0eQ==","aGFzTWFueQ==","IEZH","YW1waWU=","SVRV","RnM=","LlNw","X2JldHdlZW4=","RGVwZW5kZW5jaWVz","b3Vn","UGxhY2Vob2xkZXI=","PXRleHQ=","IE1hbmFnaW5n","b2NhbHlwc2U=","5YyX","X21hZw==","Zmxk","4pE=","Q0FN","IEhlbHBlcnM=","IGRvc3Q=","L291dA==","IGFzc2Fzc2luYXRpb24=","LmdldEltYWdl","IEtlbm55","LicpCgo=","KXsvLw==","IFJhbmdlcg==","IGdlaw==","IHNpbmNlcmU=","PFZhbHVl","IERPVA==","IFZpY3Rvcnk=","IGxlZ2VuZHM=","IHByaXNvbnM=","KGV4cHJlc3Npb24=","IFJhYmJpdA==","X3NlbnRlbmNl","IGJpdGVz","IG9uRmFpbHVyZQ==","IOKIiA==","S2lt","LmdlbmRlcg==","IM67","IFsu","Il0pOw==","bGFuZGluZw==","LWRpZ2l0","VEVNUA==","CWVudHJ5","IHN0cnRvaw==","IGRlc2NlbmRhbnRz","dW1ubw==","IGxlYW5pbmc=","IHNwZWNpZmljcw==","cW4=","IFNwYXJ0","IHBvcnI=","RURJQVRFSw==","IHNlcGVy","J2F1dA==","IFNURVA=","IEJvcmRlckxheW91dA==","IHJldHJvcw==","IFNhbHZhZG9y","IEVOR0lORQ==","eGRj","VHdlZXQ=","dms=","IOyy","XTw8","aGV0aWNz","Y29kaW5n","UmVhY2g=","LnJlcQ==","Z3VpZGU=","LnNjb3Bl","c2hpcnQ=","cm9nYXRl","U0VUVElORw==","IFByb3RlaW4=","IGVpbmc=","LkVNUFRZ","LmRm","IGNsZWFyZXI=","IGNyb3Nzb3Zlcg==","IFRveXM=","IGNvYXRlZA==","Lk1vbnRo","IEF0dGFjaA==","L3J1bg==","LnRhYnM=","IG9nc8Ol","QnJvd24=","LkRBVEU=","IGZvcw==","5a2X56ym","V29vZA==","LXRocmVl","aGVyaXRlZA==","IHJvcA==","KGFj","IGVtYm9kaW1lbnQ=","IEtlbm5ldGg=","IGNhbm5vbg==","IGJpZGRpbmc=","PElFbnVtZXJhYmxl","CXNldFRpbWVvdXQ=","X2RpZ2l0","IGVsaW1pbmFy","KG5l","YnVkZ2V0","Q1NJ","IOyVhA==","IEFTUA==","R3JvdXBJZA==","X0NPVU5URVI=","Y29uc3VsdA==","IGlmcmFtZQ==","bGVnZW4=","X0RFQ0xBUkU=","U2hhcnBlcg==","IEZyaWVuZGx5","dWxldA==","LWNvbW1hbmQ=","INCg","Y3ljbGVz","IFdhc3Rl","IHRhcHBlZA==","CUJ1ZmZlcg==","4oCUaW4=","IAogIAo=","IElkZWFs","IENhbmR5","X1N5bnRheA==","w6p0","7J2M","YWJvdmU=","IE5hemlz","IGZzdA==","c2Vpbg==","IGt1bm5lbg==","d2lr","IFNhdmluZw==","LmV4dGVuc2lvbnM=","IERlc2VyaWFsaXpl","b3VyZw==","LmF0dHJpYg==","77yaCgo=","IFdpbnM=","LmVxbA==","Unlhbg==","X2Fjaw==","T1VSQ0VT","IG9ucw==","Z3Jlc2U=","YWZpYQ==","TW9kZXJu","IGFkaGVyZQ==","IGJpb3M=","KGFjYw==","a2Jk","VGhyb3du","qeuLiOuLpA==","CUh0dHA=","CXhtbA==","RW5kRGF0ZQ==","KHBhcnNlZA==","LmdldGVudg==","cmVnaXN0cg==","bmVsbA==","aW9uYXJpbw==","LmlubmVyV2lkdGg=","cnRs","UFY=","X3BpZWNl","IERlcG9zaXQ=","eWVycw==","IE5TTnVtYmVy","IGdpbnQ=","ZW5zZW1ibGU=","IG5ld2NvbQ==","IFZpZXRuYW1lc2U=","X2hw","IGFjY3VzaW5n","IHF1aXM=","IGludmVzdGlnYXRvcg==","ZXNzZW50aWFs","IENY","LmZvck5hbWU=","ZGVmcw==","IGFuYWx5c2U=","X2FuaW1hdGlvbg==","IHRoYQ==","dGFib29sYQ==","IFRIQw==","w61jdWxv","IGdsb3dpbmc=","IGhvbm9ycw==","YnN0cmFjdA==","a3A=","SVRFUw==","ICMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyM=","I2dldA==","L0Rlc2t0b3A=","CWdsbQ==","IHppbmM=","w6F0aWNh","IDw8Cg==","Vk1M","IFVubGltaXRlZA==","dnJl","LWJlZA==","X25vbmNl","IEdJ","dHJhdmVs","IGlzS2luZE9mQ2xhc3M=","IGFub255bWl0eQ==","RmlyZXN0b3Jl","IGVtYWlsZWQ=","X0ZMQVNI","IGbDpXI=","4piF4piF","IDpd","SHVt","LnJlc2VydmU=","w7xt","IGtvc3Rlbmxvc2U=","IFNDUA==","dXRhbg==","IEdvcmU=","IGNoYXRz","Lz4NCg==","LmdldFJlc291cmNlcw==","IGx1bXA=","X2NvbnN0cw==","KGV4dA==","CWRpcg==","4p0=","IHBhZGRpbmdUb3A=","IG9ic2Vzc2lvbg==","IGJhbm5pbmc=","IEFwcE1vZHVsZQ==","IHBhcnRpc2Fu","IGNhdGFsb2d1ZQ==","IG1pbm9ycw==","IHBpdGNoZXM=","d2VlcA==","IHVuZGVydGFrZQ==","IHRoZW1lZA==","YXVkaXQ=","LnNjcm9sbFRvcA==","IHJlcg==","IHN5bXB0b20=","IG9wZW5pbmdz","LmJsb2Nrcw==","b3Blbmlk","IGFzc2g=","LXNhdmU=","IFBpZw==","IHJlZ2Fpbg==","IGluaWNpYWw=","L2Zhdmljb24=","CWV4cA==","IHNwaWNlcw==","aXNrYQ==","Y2xhaW1z","bWFr","ZGVmaW5pdGlvbnM=","IGNvcnJlc3BvbmRlbnQ=","IENhbm5hYmlz","X18sCg==","IEx1Y2t5","IEdhdXNzaWFu","IE5lYXJseQ==","Q0FE","J11dCg==","IGFkZXF1YXRlbHk=","IFRJVExF","Y29uc3RpdHV0aW9uYWw=","LW1t","X292ZXJyaWRl","IGJsYXM=","LnJlYWR5U3RhdGU=","IHJlbWluaXM=","IHJlaW5mb3JjZWQ=","IENvbGxhYm9y","IGRlY29yYXRpbmc=","IGJhY2hlbG9y","RVJSVVBU","IHVwcmlnaHQ=","aXBhdGlvbg==","IE5vYmxl","IHZhbHVlRm9yS2V5","IHNldExvYWRpbmc=","Lklnbm9yZQ==","5YE=","R2xvYmFscw==","IE1lbnQ=","QVNTRVM=","IGxpbWJz","IEhVRA==","aW5jaQ==","Lml2","IFFNb2RlbEluZGV4","RnVzZQ==","IHBlZGFs","X0ZSRVE=","KHZlcmJvc2U=","IGxvbmdpdHVk","IENoYXJ0ZXI=","6re4","IGJ1bmRsZXM=","Lmlnbm9yZQ==","dW1ibw==","RU1B","Li4uLi4uLg==","c3g=","LkNhcmQ=","IGhldXRl","IHN0ZWVy","anVtbGFo","IHtf","X0NoZWNrZWQ=","IGZheA==","IEd1c3Q=","aXRjaGVucw==","ICkpCgo=","IHJlbWFya2FibHk=","L1hNTA==","LXJlbW92ZQ==","X2J0","IGluY3Vi","LnBhY2thZ2U=","LmN1cnJlbnRUaHJlYWQ=","IEhpZ2hsYW5kZXI=","LnNpZGU=","c3BsYXNo","IGljaQ==","PUQ=","IHB1Y2s=","IGJhbGxvdHM=","IGh1Z2VseQ==","Y29lZmY=","IHBEYXRh","LkNPTFVNTg==","IEhlYWxpbmc=","IG9yZGlu","ISks","ICcnLA0K","KG1k","IFNhc2s=","PHN0cm9uZw==","IHN1cnZpdm9y","LnNlcmllcw==","IGNhZmZlaW5l","IGAo","LlRSQUlMSU5H","X0lucHV0","KCJe","emQ=","Jik7Cg==","IFBpbmc=","IHZvdWNoZXI=","LnJhdGluZw==","LXNoaXJ0cw==","IFJldHJpZXZlcw==","LmFsaWJhYmE=","T3JhY2xl","X01PVg==","T2xkRGF0YQ==","IC8qDQo=","IGdib29sZWFu","ID0+DQo=","IHLDoQ==","IGJsdW50","IEltYWdlSWNvbg==","aWZpaw==","UlRD","IGZpYmVycw==","IHRvaWxl","LnNlbnQ=","IFB5UXQ=","JGFwcA==","IG1lZGlv","IGdyYW50aW5n","IHRzbGludA==","IE3Dtg==","KGZpZ3NpemU=","IGh1cnJpY2FuZQ==","IGxpZmVz","IMOE","cm9jZXNzaW5n","X3N0YW5kYXJk","LW9wdGlvbg==","JykpKQ==","IHZhY2FudA==","5bel","IEhvbGxvdw==","aGFuZGxlQ2hhbmdl","IGRpdmlkZXI=","IEVuZ2luZWVycw==","IHN2ZW5z","IGNvbXBsaWFudA==","dGFuZ2dhbA==","IENyZWRpdHM=","IEVtaXJhdGVz","UnVsZUNvbnRleHQ=","IHJlYWxpemF0aW9u","IGRpc3RyYWN0ZWQ=","XSs9","IGF1Z21lbnQ=","IER3","b3Rw","b3JyZW50","RWRpdGFy","LnN0b2Nr","U3R1ZHk=","cGVjdGlvbnM=","IEdhbWVNYW5hZ2Vy","PWN1dA==","IGZsb2Nr","IFJvbWFucw==","dGhlbQ==","LWhvcA==","IHNjcmVlbnNob3Rz","IC8qIQo=","IGNvbnZlcnNpb25z","IG5vcm1hbGl6YXRpb24=","KGNvbmZpZ3VyYXRpb24=","IGFlcm9z","X3NlY3VyaXR5","IScK","Qm9udXM=","IERSSVZFUg==","CURhdGU=","dGll","IFd5b21pbmc=","U3RhbmQ=","aXRyZQ==","IHNob3BwZXJz","IGRpc2FkdmFudGFnZQ==","IGxpa2luZw==","56yR","IHVuZGVyc3RhbmRhYmxl","U0VF","IGhveQ==","IG5pbmV0ZQ==","IGNvbmZlcg==","IG5vd3JhcA==","IFZlcm4=","LA0KDQo=","aW1lc3RlcA==","TGF5b3V0TWFuYWdlcg==","4Lc=","CXdhaXQ=","UExFVEVE","SmFwYW4=","IGluZHVjZQ==","IOWv","0L7Qt9Cy","X0VORFBPSU5U","Lmhvcml6b250YWw=","IGFjY2VsZXJhdGVk","cmltb24=","SVZFUw==","VHJhbnNhY3Rpb25z","TGVhbg==","IFNPVVI=","d2hldGhlcg==","eWc=","IG9pZA==","IEVudGl0eU1hbmFnZXI=","T1VOVFJZ","IGZpbGE=","T0xVTU5T","SU5VRQ==","IEFuY2hvcg==","VFJBTg==","d29v","YmxvY2txdW90ZQ==","IE51cnNl","IENhcnA=","IHJlZGVlbQ==","LnRyeQ==","IEpQ","IHRpbWVzdGFtcHM=","ID8+Ij48","IFJFTU9WRQ==","IFN0YXJidWNrcw==","UmVhbGx5","IGZsb29kZWQ=","LkNhbGxiYWNr","RHJvcERvd24=","aXBybw==","IHRlbmRlZA==","bHRl","IHByb3BvcnRpb25z","LXRl","IFJlbmE=","bGljYXRl","Zm9yY2Vz","LmV4dHJh","LmF1dGhlbnRpY2F0ZQ==","0LLQvtC0","obA=","IGZvckNvbnRyb2xFdmVudHM=","IHNlbmhh","IGtlaW4=","IG1pbmlzdA==","IFByZWZlcmVuY2U=","IFRlbGVncmFwaA==","0YPQvw==","c3RycG9z","IGlsbG5lc3Nlcw==","IHBpZ3M=","IGdldEludGVudA==","U29s","IMKh","KGNwdQ==","W3Byb3A=","c2NyZWVucw==","Jyk7Pz4=","IEFjdHM=","IHN0cmR1cA==","IGF2ZXJhZ2Vz","YW5hbA==","IENhc3VhbA==","R3JvdXBCb3g=","IEhhbmRib29r","L2NvbW1lbnRz","IG51bWJlcmVk","IGJyb2FkY2FzdGluZw==","55uR","Lm5hdGl2ZUVsZW1lbnQ=","Lm11","IHVwZGF0ZWRBdA==","IERvZXNu","LkFD","LmNvbGw=","IHJlY29yZGVy","X3NoYQ==","Qmc=","Ymls","IGJvbHRz","IOes","IGltcG9zaW5n","IEluZm9ybWF0aW9uZW4=","X2ZsYXNoZGF0YQ==","ZWNvbm9taWM=","UmVtYXJr","dWNhcw==","IE9mZmljZXJz","IFRFUg==","V2Fsaw==","IG1lcmNhZG8=","X2dlbmVyYXRl","SFk=","Q2FsbGluZw==","c25hcA==","c2NyaXB0SWQ=","Lm9wZXJhdGlvbg==","IEZsYW1l","bGluZXNz","IHJlbnRlZA==","X3RvZ2dsZQ==","LWNoYW5naW5n","IFRZ","J3V0aWw=","RUVQ","IGdyYXBocWw=","IFVuaQ==","IGltcHVsc2U=","LkJhc2lj","IGVuZXJnaWVz","TUFSWQ==","IE1hcmNlbA==","IG1vcnRhbA==","IGZyZXM=","bWVucw==","bW90aW9u","IHNhbXBsZWQ=","4oCcVGhhdA==","aWRheQ==","cXVpcG1lbnQ=","Z2V0SW50","IEFic29sdXRl","LCci","dW5lZA==","LnNoYXJl","IH0pKA==","bW1t","IFJpc2luZw==","5Lu7","IHVuZW1wbG95ZWQ=","eGZh","LmZvbGxvdw==","CQkJCSAgICAgIA==","c2x0","LlBob25l","IGtuaXZlcw==","IGV2ZQ==","b25DbGljaw==","XSkpDQo=","IFdpdG5lc3M=","CU5T","IEVPUw==","IFN0ZWZhbg==","IFByaWVzdA==","4oCUd2hpY2g=","R2V0U3RyaW5n","LkJ5","IHVwc3RhaXJz","IGRldHJpbWVudA==","YnJva2Vu","ZW1icm8=","IG5pY290aW5l","aWxpb24=","IGFzdG9uaXNoaW5n","X2FmZg==","IExlc3Nvbg==","IGFjY2lkZW50YWw=","b2Rvcg==","IGRlY2ly","IG5ld05hbWU=","Ky4=","55u4","aWdzbGlzdA==","IEdpdGh1Yg==","IHN1Y2Nlc3NpdmU=","cmFjaWFs","IGVudmlyb24=","6aqM6K+B","IHJlZGlyZWN0ZWQ=","VE9UQUw=","IGdyYWJiaW5n","IExhbmNl","IGZvcmZl","X0NC","5b6u","RWxhcHNlZA==","X3dheQ==","KERpYWxvZ0ludGVyZmFjZQ==","X21lYXN1cmU=","eGJi","RG9n","RGVwYXJ0","LXNyYw==","cmVzb2x2ZXI=","d2l0aHN0YW5kaW5n","X3NoZWxs","IExhc3ROYW1l","IEF2aWF0aW9u","IGJlZ2lubmVy","KCIlLg==","KHRvb2w=","INC90L7Qsg==","OmluaXQ=","KEFQSQ==","IE1vcnJpc29u","dnRDb2xvcg==","IHN0YXBsZQ==","L0lORk8=","IHN1cGVybmF0dXJhbA==","IHN0ZWFr","dGltZWxpbmU=","enpsZQ==","ImAKCg==","U2Vjb25kYXJ5","IE5lcGFs","LlN0cmluZ1V0aWxz","IGFkYW0=","ICguLi4=","IHN1YnN0aXR1dGlvbg==","IGJvYXJkaW5n","IEtleXdvcmQ=","IEFzc2F1bHQ=","ZGJjVGVtcGxhdGU=","IG9yZGVySWQ=","KGVuZ2luZQ==","LmFzc2VydFRoYXQ=","IFZlbnVz","IGhvbWljaWRl","IEF2YWw=","IGd1dHRlcg==","IFN1cHBvcnRlZA==","L3BhcnQ=","IGFjY2xhaW1lZA==","SGlzdG9y","IG1lc2Vz","w7xiZXI=","IFJlbmV3","IGdyYXM=","IEVr","IGluZmlsZQ==","aW5keQ==","Lm11c2lj","LlNjcm9sbA==","IEFnZXM=","IE5hcnV0bw==","IEdhdGhlcg==","IGNvbmZpcm1pbmc=","PSgi","IHBpdGNoZWQ=","b2xleQ==","RnJhbmNl","Kyci","JHRvdGFs","IG9uZGU=","IGRpdGNo","X3NpZ21h","IGNvbnRpbnVpdHk=","cmV3YXJk","LWxvYWQ=","IHByb2Nlc28=","TG9ja2Vk","c3Rhdw==","IHNwaW5hbA==","bGF6eQ==","IT09","amVzdA==","IGR1bg==","IFJvZGdlcnM=","CWdyaWQ=","IGxvZ29z","IEJlbmdhbA==","LnN1cGVy","UHJvdmlkZXM=","IG51dHJpZW50","LlRpbWVzdGFtcA==","SVpBVElPTg==","5YaM","IGZhdHM=","IFh4eA==","Y3RpY2E=","VGFyZ2V0cw==","IGNvbnRvdXJz","IHJlb3JkZXJlZA==","OkFycmF5","IHRvbGVyYXRl","Vmly","IHRlcnJpYmx5","IGJyaWNrcw==","KCZf","aGI=","UG9ydGFs","IEJyZWFk","LndoaWNo","wq10","YXNJbnN0YW5jZU9m","IGpvYmplY3Q=","CWxlbmd0aA==","X01U","OyI+DQo=","X0VYSVNU","IG1hdGVybmFs","UkVM","IOqyveyasA==","aGVl","IGxheW91dHM=","IExhcA==","YWlzeQ==","IHN0dW1ibGVk","IFVJRw==","IFNjbw==","IGltcGFpcmVk","UkVTU0VE","IGFidXNlcw==","VkY=","QVJC","Lk5BTUU=","cmNo","cHJpbWly","X2NvbXBsZXRlZA==","IHBlbm55","Q2hyb21l","KGJlZ2lu","ZXJuZW4=","LWNoZWNrYm94","UGxhaW5PbGREYXRh","IExQQw==","cmFkZQ==","c3Bpcg==","IGNvbmNlaXZlZA==","VGlwcw==","IElvVA==","IEdhbg==","6IGU","IGJpYXNlcw==","IGNvbnN1bHRhbnRz","cGxlZA==","X2h0","YXNzb2NpYXRlZA==","XSwKCg==","IGRlbGlnaHRmdWw=","INGC0LXQug==","SGVsdmV0aWNh","KGxvYWQ=","LWV4cGFuZA==","X1dJREdFVA==","dG9h","IEFrdA==","IG9tbg==","IGNsYXVzZXM=","SW50ZWw=","Ki99Cg==","X3JlZ2lzdHJhdGlvbg==","IG9sZFZhbHVl","IHJlc3RvcmluZw==","IHVucmVhbA==","T1ZFUg==","CQoJCgkK","QVRT","X3Byb2Jl","IGRpdmlzb3I=","LnVwZGF0ZUR5bmFtaWM=","5bmz","UHJvZHVjZXM=","c3RhbXA=","Lmpib3Nz","CXRhc2s=","ISg6","IHBzeWNoaWM=","QGNsYXNz","TWFydGlu","IFBhc3NlZA==","Y2xhcmF0aW9ucw==","aGVs","0LDRhw==","CWNvcHk=","LWJpbg==","emFu","aWdyYW0=","4Ka+4KY=","KHNpZw==","IENhdmFs","XyMj","ICU9","b3V0bGluZWQ=","IEFjaWQ=","IHVucHJlZGljdGFibGU=","LWRhc2hib2FyZA==","SGV4U3RyaW5n","K2M=","LlB1YmxpYw==","4bqp","IGNvbnZleW9y","IEVC","IHNlbGVjdHM=","IGtub2NraW5n","IENlYw==","SUJVVEVT","b3dhxIc=","Z2F0c2J5","KnY=","ZW50cm9weQ==","IGRpc3BhdGNoZWQ=","IGNhbWVs","IFNhdHVybg==","IG92ZXJ3ZWlnaHQ=","KHBob25l","cGFyYWJsZQ==","JUI=","X3ZlY3RvcnM=","IGJyZXdpbmc=","IFRr","IERvd25sb2Fkcw==","IFNhdmVk","LlByaWNl","IGN1cnZlZA==","IFBhcmVudGhvb2Q=","6LY=","LnBubA==","cGxldGVseQ==","LkRheQ==","IGFkdmVydGlzZXJz","IGVqZWM=","IHByemVk","668=","ISc7Cg==","IEt1c2g=","IFRBQg==","IHF1ZXN0cw==","IGNvaW5jaWRlbmNl","dW1taWVz","IEthc2htaXI=","IEV0aGljcw==","X2dyb3d0aA==","IGFrdGl2","IGdyb3VwaW5n","5aKe","X3RydXRo","5ZCs","dG9kb3M=","aXNldA==","VGV4Q29vcmQ=","w6R0dA==","IFp1cg==","cm95cw==","X01BR0lD","IGJyZXdlcnk=","KFN0YXRl","IFNNQUxM","IFBsYW50cw==","aXRiYXJ0","ZWFjaGVy","IEFkZWxhaWRl","THU=","IGZpY2s=","dW5kbGVz","X2xvYWRlZA==","0LjQtQ==","UG9sbA==","cml0aWM=","RUxZ","ICsn","IFByb2Zlc3Npb24=","IHN0YW1wcw==","IFNldw==","c2Nyb2xsVmlldw==","IGNvbW11bmlzdA==","L3Byb2JsZW1z","fQ0KDQoNCg0K","LG8=","IHVkcA==","IG9iZXNl","YXBwcm92ZQ==","YW5jZWxsYXRpb24=","X0dhbWU=","IEhhc2h0YWJsZQ==","YWRhcHRpdmVTdHlsZXM=","IHBvc3Nlc3Nlcw==","Lm1hdGNoZXI=","ZnVuY3Rpb25hbA==","TXJz","CXNhdmU=","IERiVHlwZQ==","IGtlbg==","Z2V0Q29udGV4dA==","IG1hbnM=","KHJlbA==","IEJyb3RoZXJob29k","KWAK","6Kej","LkluZm9ybWF0aW9u","T3V0T2ZSYW5nZUV4Y2VwdGlvbg==","IFNlaw==","Q2Fz","IGJsb2dnZXJz","RWl0aGVy","KCIiIg==","IHBpbmNo","IGNvYXJzZQ==","KXA=","IFB1bHNl","IGxlYXJudA==","IGRlbnRpc3Q=","IG9uY2hhbmdl","IGRpcmVjdGl2ZXM=","KGFjdGlvbnM=","bnlkZXI=","IFNoaXI=","VHJhaXQ=","X2RlcA==","IFBFVA==","IFJFUA==","LkFwcFNldHRpbmdz","Y3VhZG9y","aWRlbmF2","IGVudmk=","IHNsYW1tZWQ=","IFNob290","IGRhdGVGb3JtYXQ=","LmpvZGE=","dmV5cw==","ICkuCgo=","IGNhcmVn","IFBhcmFsbGVs","X3RyYW5zbGF0aW9u","LmZ1bmN0aW9ucw==","Lm9icw==","UnVudGltZUV4Y2VwdGlvbg==","W109","b3ZlcnZpZXc=","IFNjaGw=","IG5vaXN5","IE9uUHJvcGVydHlDaGFuZ2Vk","U2VuZGluZw==","IHVuZmFtaWxpYXI=","VXBvbg==","IFByaW50cw==","LnR5cA==","IGZsZWVpbmc=","CW1vdmU=","KFVu","IHFy","15w=","X2JldGE=","IHNraWVz","CW1l","V05E","IHN0aWNrZXJz","Ymxhcw==","IGluc2VydHM=","IHZlcnNlcw==","IERldw==","IHRhbmdpYmxl","IGhlY2hv","UE9M","IHRlYXJkb3du","b21uaWE=","SUJF","LmNvdmVy","X3N0cmF0ZWd5","Xi0=","c2V0UG9zaXRpb24=","dWFsZQ==","U2lnbmVk","IGlmYWNl","YXNlbGluZQ==","LnNldFRpbWU=","IE1pbmVyYWw=","IEZpZ2h0aW5n","c2tpbnM=","IGRpc2NyaW1pbg==","IGRhbnNr","IFByaW5jZXRvbg==","YWNpc3Q=","ICgpKTsK","dHJhY2tz","aW1vbmlhbA==","YWRlY2ltYWw=","RVBST00=","dWdnbGU=","Lk5vdGlmaWNhdGlvbg==","JG1haWw=","Y2FudGlkYWQ=","IEp1bmc=","IHNlZWtlcnM=","IHBsYXVzaWJsZQ==","dGllcg==","0LXQtg==","IHJhcHBlcg==","IE1hbmE=","IEh0dHBTdGF0dXNDb2Rl","IGJ1cm50","bG9zZXM=","IEZvdG8=","IEpzb25PYmplY3Q=","SW5zdGFncmFt","IHN5c2NhbGw=","IHJlYWxpdGllcw==","IE1BVExBQg==","Ol57Cg==","VEVSTQ==","IENiZA==","IFBhcmFncmFwaA==","IHRyYXbDqXM=","IGNvbnN0cnVjdGluZw==","IHN3YWw=","IHBpZ2U=","TExMTA==","LWV4aXN0aW5n","R2V0cw==","IG1lbHRlZA==","IG1pdGlnYXRl","SGVu","IGht","aW1hcw==","IEFv","IFBlcmV6","IERBTA==","IOuLpA==","IGRpdmlz","U3Rvcnlib2FyZFNlZ3Vl","IE1vZGlmeQ==","IMOcYmVy","X09WRVJSSURF","LnBlbQ==","dW50b3M=","IGVzcGHDsQ==","IHs/","IFBBWQ==","X2lwdg==","IEZ1cnk=","X18uX18=","ZWxvdw==","LWNlbnRlcmVk","Y2hlY2tz","X1JlZw==","LUphdmFkb2M=","CWxvYWQ=","IExpa2V3aXNl","2KfZhQ==","VU5F","LnNlbQ==","eGNi","IENhdmU=","X3NsZWVw","IHNpbGVudGx5","IEV4dHJlbWU=","LlRvVXBwZXI=","CUNIRUNL","IGN1ZQ==","IFFCeXRlQXJyYXk=","IGNvcnJ1cHRlZA==","IETDqQ==","IGltcGVk","R2V0TmFtZQ==","IGluYWNjdXJhdGU=","IHNvYmVy","0LXQtQ==","IGJhcmNvZGU=","LS0pewo=","aW5raQ==","IMOpcA==","IGRyaQ==","IEFMVA==","Pj4+Pj4+Pj4=","b250YQ==","W0w=","IGludGVyZXM=","dmVydGluZw==","IGRpYWdub3N0aWNz","cGRldg==","6Kk=","IEludGVncmF0ZWQ=","KS4n","X2dj","JHRleHQ=","LmdhbWVz","IFRlcnJh","J1Jl","LnRyYW5zZmVy","X0ZJRk8=","Z2V0TW9kZWw=","IGJsYW5k","IENvbGVtYW4=","IHByaW1lcw==","IOaI","IGNyb3NzZXM=","bms=","R0lORw==","ICde","IEJsb2I=","IGludGVyY291cnNl","IEJsdmQ=","IHdlaWdocw==","X3JlZ3VsYXI=","IFBlcnRo","IHNlcGFyYXRpbmc=","IGJpbGxlZA==","LnRhYkNvbnRyb2w=","IHB1cHBldA==","IHV0aWxpemF0aW9u","IOKWoA==","IHN1Y2Nlcw==","IGxhbXBz","X3Byb2o=","RXJpYw==","IHJlbm92YXRpb24=","IEZhbWlsaWVz","IEJpdHM=","cGFydGlhbHM=","LU1lbg==","c29sdXRpb24=","IGR3YXJm","LklOVEVHRVI=","IExPQ0s=","LmN0","IGV4Y2VycHQ=","IFBpeA==","IEZpcnN0TmFtZQ==","QU5URUQ=","IEFkbWly","LWhlbHA=","UHJpb3I=","IEFsaWdu","LklOU1RBTkNF","TGluZUVkaXQ=","KCcvOg==","IGluZXQ=","b2R1cw==","LnBrbA==","IEtZ","dXBlcnQ=","IG5lcnZlcw==","X2dyYWRpZW50","fScsJw==","X3VucmVm","IHNhdHVyYXRlZA==","IENvbm5lY3RlZA==","IEZO","RVhJVA==","IHRlbGVwb3J0","IGF2YWl0","UGFnZVJvdXRl","IGRpdm9yY2Vk","KGxhbmc=","ZnN0","IFR5cg==","IG1lc3Nlbmdlcg==","aWZzdHJlYW0=","WFM=","IEJhbmtpbmc=","IGluZmVjdGlvdXM=","IE1vbnM=","X0xPT1A=","IHp1csO8Y2s=","IG9idGVuZXI=","L3JlcG9z","VmVs","YWNybw==","IHVzZXJSZXBvc2l0b3J5","c3R5bGVUeXBl","IFNSQw==","Vk1MSU5VWA==","cmVjdXJzaXZl","L2Jhcg==","X2NoaXA=","b21pbmF0ZWQ=","IE5pdA==","4oCUdG8=","IEJ1ZGRo","0L7QvNC10YA=","IE1BRw==","IENIRQ==","X2Rlbg==","LnJhaXNlcw==","X2RlZ3JlZQ==","IHB1bXBraW4=","X3RlbXBsYXRlcw==","X01FRElB","IFRpbWVsaW5l","IGJvdHM=","T2JqZWN0VHlwZQ==","IGJ1eXM=","LnBvc3Rz","Q0FM","d2FpdGluZw==","IERhbmllbHM=","IGRhYmVp","IFNpZ21h","aWxvcg==","aWdlbA==","LFc=","QURT","KHBhbmVs","7LK0","aXRhdGluZw==","LnBhbGV0dGU=","IG1vc3F1aXRv","IHRlZ28=","KHBhcnNlSW50","IGRlc3B1w6lz","cHJvbWlzZQ==","IHdpag==","dHlwZXNjcmlwdA==","IFR2","X0lERU5USUZJRVI=","KS4KCgo=","X2ZsYXQ=","aXRzdQ==","VVNS","ZXhwZXJpZW5jZQ==","LWZpdA==","cGhpbng=","X3RocmVzaA==","IGlkZWFsbHk=","IEZyZWVtYW4=","LERC","X3J3","562J","VWI=","X3N0YXRpc3RpY3M=","PSIiPjw=","IGNob3Jl","IHlvcms=","aW5zdGFsbGVk","QWRkaXRpb25hbGx5","IHBzdG10","eWxrbw==","OjoK","Rm9yZXN0","IGhlYWRzZXQ=","IGdhbGxvbg==","0YDQtdC8","IHdpdGhkcmF3bg==","IENhbmRpZGF0ZQ==","IG1lbHRpbmc=","IGZyZWV6ZXI=","IGhs","X0hFTFA=","bWltZQ==","KC8q","IHRoaXJzdA==","JHJldHVybg==","bWVtYmVyb2Y=","0LXQsQ==","IEh0dHBTZXJ2bGV0UmVxdWVzdA==","KG9i","X1Jlc3VsdA==","IGFzc2VydGVk","IGZ1bGZpbGxpbmc=","IHN0cmV0Y2hlcw==","cGFyYXRlZA==","LWZ1bmRlZA==","IOWb","aW5nbGVz","X2Nh","LmNvbmRpdGlvbg==","IERpc3BsYXlz","IG9yYW5n","IENSRQ==","IGdsQmluZA==","IFNlbGVjdG9y","L3R5cGU=","IEFsZXhh","Y2hlZHVsZXM=","IFBlbmluc3VsYQ==","IHBhcml0eQ==","CWRlc3Q=","IERvb3Jz","DQoJDQo=","X2RpbWVuc2lvbg==","IGFsb2Fk","LlN0b3JlZFByb2NlZHVyZQ==","KHBhcmVu","IEJ1cmtl","JyldCg==","LWVuZ2luZQ==","IHF1aXI=","IEh5YnJpZA==","IERvZQ==","IG91dGxpbmVz","IFRyZW5kcw==","X05W","cGVyaW1lbnRz","IEhpbg==","Pycs","CVRleHQ=","RlVM","IHNtZWxscw==","IHNsaWNr","IG1pc2VyYWJsZQ==","IEFycmF5QWRhcHRlcg==","IHBhcmFtU3RyaW5n","SG9t","X2xpdGVyYWxz","dXN1YXJpb3M=","IHByb21wdGluZw==","X2xhenk=","IEFjdGl2YXRpb24=","X29j","V2Vhaw==","IGFuZWNk","IFVDTEE=","PXJl","aXNzZW1lbnQ=","IEVzY29ydHM=","RXhjZWxsZW50","IFBhdXNl","IHJlcG9zaXRvcmllcw==","VE9S","YXJpYXRl","X2lzbw==","dXBkYXRlcw==","aGFsYg==","dWRpYW50ZQ==","66Gd","IG5haXZl","IFBlZw==","IExvdW5nZQ==","QVJHSU4=","KGJpbg==","T25DbGlja0xpc3RlbmVy","IEZBSUxFRA==","IGxpdGU=","IGR6aWU=","IExpdGVyYWw=","aXZvcg==","ZmNudGw=","IGVhdHM=","IHFlZA==","VW5sb2Nr","cmlkaW5n","dW5kYWk=","PU0=","QVRURVI=","Q29uZmlndXJlQXdhaXQ=","aWNpYXM=","dXN0b21lZA==","IHN1Y2Nlc3Npb24=","ZW5kVGltZQ==","IEp1cGl0ZXI=","IGp1ZGdpbmc=","ZHJhdGlvbg==","X2RvY3M=","Lm1v","IGVkdWNhdG9ycw==","IFZpbmU=","Q29uZA==","W291dA==","cWI=","XFZhbGlkYXRvcg==","IG1lYW5pbmdz","IHByZXNlbnRseQ==","IGRpdmlkaW5n","b3R0ZW5oYW0=","YXNjdWxhcg==","IHRyYWlsZXJz","IENMT1NF","0LDQvNC4","4oCZYWk=","IEdhaW4=","d29y","IHBsYW5uZXI=","IGRpc3RyaWJ1dGluZw==","dmF0","bW9udGhz","eGxhYmVs","SEY=","VmlvbA==","LkJBU0VMSU5F","0LXRgtGB0Y8=","IFJvdGF0ZQ==","IHR4bg==","OmJvbGQ=","IGJsb3Nz","Rm9yZ2VyeQ==","KGVtYmVk","IGpha28=","c3ByaW50Zg==","dGhlaXI=","IGV4aGliaXRz","LXN0YXRpYw==","aGVjeQ==","Z2V0QWN0aXZlU2hlZXQ=","LmNsaWVudHM=","44GN","X2hpZGU=","W3dvcmQ=","Q2I=","YWRkSXRlbQ==","YXhl","X3JhZGlv","YWxpb24=","bW9kaWZpZXI=","IHNhdHVyYXRpb24=","IGRlbm9t","X3BpeGVscw==","bWVzcw==","KGZs","YXRpZg==","IHNlY3M=","IHByb3N0aXR1dGlvbg==","IGdyYW5kY2hpbGRyZW4=","IHBhcmFkaXNl","IEZlbGQ=","X0JJTkFSWQ==","aXRvdXM=","4LmE","IGZsYXNoaW5n","LXNpZGVk","IGNvbnRyYWRpY3Rpb24=","LyoKCg==","eWxhYmVs","IFRldA==","IGFkbWlyZQ==","cmVzbw==","IGxldHo=","IFNFQVJDSA==","c2xvdHM=","IFJld2FyZHM=","IEhvZw==","IE5TRGF0YQ==","c3Rhc2g=","RmFsbA==","IEFtZXI=","TGluZWFyTGF5b3V0","L3Bob3Rvcw==","IGZlYXRoZXI=","IHwNCg==","RG93bmxvYWRz","LlN0YXJ0c1dpdGg=","IC8vIw==","aW5lVHJhbnNmb3Jt","IGFmZmlk","VnRibA==","IFJvZ3Vl","c2NyaWJlZA==","IGZhdWM=","IE1vbnJvZQ==","IGRlY2xhcmVz","bW9kZXJu","cmVvbg==","YXliZQ==","UEFTUw==","ZmVycw==","X01VTFRJ","IE1hdGhlbWF0aWNz","IHN1ZGFo","X0FUVEFDSA==","IG51bWJlcldpdGg=","IFNvbG9tb24=","amlu","b2dyYWZpYQ==","w7Zs","X2Rlc2lnbg==","Y3VsYXRlZA==","IEx1bmE=","aWVzeg==","ID0+Jw==","IHJldmVsYXRpb25z","QWxvbmc=","KGVk","IEZpbGVuYW1l","IHlsYWJlbA==","U2VjdXJl","IGJ1c2Nh","YWdub3Npcw==","X1JFQ0U=","IG92ZXJsYXBwaW5n","RXh0ZW50","IGFudGljaXBhdGlvbg==","Q2hlY2tz","IEFMU08=","b3Jj","aWxpbmd1YWw=","aXRhdGlvbmFs","IGFkdmFuY2VtZW50","b3Vybw==","IFByZWRpY2F0ZQ==","5b6X","ZXJpYQ==","IFBpZXJjZQ==","b3Jpbw==","IG1lcml0cw==","IHBlYW51dA==","LlBhY2thZ2U=","IENvbmR1Y3Q=","X1NFTlNPUg==","IGJvaWxpbmc=","IGludHJh","IElHTg==","IEZ1cg==","LlJlZnJlc2g=","IFJlYWNo","X2RlY29kZXI=","LkV4cA==","INGC0LDQug==","cGlsbA==","LFE=","IEdyaWxs","IHBvcHBpbmc=","LkFn","IHByb3llY3Rv","IG1pbGVhZ2U=","IGVjb2xvZ2ljYWw=","XV0pOwo=","IMKt","c3VicGxvdA==","YWNhZA==","IFRyeWluZw==","cmVjaXBlcw==","JGNyaXRlcmlh","IFBlcnNpYW4=","LWJvdW5k","TUFTSw==","IEdlc3R1cmU=","IGtr","IFBWQw==","IHByb2hpYml0aW9u","IGNvbWFuZG8=","IExPT0s=","U2hvcHBpbmc=","IGRpc3RvcnRpb24=","PEJvb2xlYW4=","LkdldExlbmd0aA==","dW1wdA==","XFByb2R1Y3Q=","ZWxsZXJ5","IGZpcmV3YWxs","Zm9ybWF0dGVk","LnJlZGlz","IGVzYQ==","IFJob2Rl","U29t","Lm5vbg==","ICcpLg==","IGdldFZpZXc=","4bqhbg==","cHJ1cw==","TWF0dGhldw==","IHNpYQ==","IEZvcnM=","R1BV","aWVudHJhcw==","X0lOU1Q=","IG9sYXJhaw==","IGltcG9ydGluZw==","VENQ","LyIpOwo=","ZWl0aGVy","IGZyZXNobHk=","Y2FzY2FkZQ==","KGNoYXJhY3Rlcg==","IEplZXA=","b3RpY3M=","X1VUSUw=","Llh0cmFQcmludGluZw==","LmZpcnN0Q2hpbGQ=","IEV4Y2VsbA==","IGR2ZA==","IHRhbGxlcg==","IHJhcw==","eXBhc3M=","IGFzc2lnbnM=","IGdyaWV2","LW1vcmU=","SkQ=","IEJ1cm5z","Jz4NCg==","LkRlcGVuZGVuY3k=","LlF1ZXJ5U3RyaW5n","Lk93bmVy","IGV4cGlyeQ==","VGh1","KFZlYw==","IGhhemFyZG91cw==","IHJwbQ==","QVBPTg==","IGFkZFRhcmdldA==","c3ZpbGxl","cE5ldA==","IEltZw==","IFRJTUVS","LkFuaW1hdGlvbg==","IGJlaw==","IGFzc29ydA==","IGxlYmlo","IGJvZHlQYXJzZXI=","IHZpYnJhdGluZw==","SURM","IGJ1dHRlcmtuaWZl","aW50ZXJz","IHBlcnN1YWRl","IExHQlRR","6Is=","LnNvZnQ=","IGJlYW1z","X3N1cg==","LkRlZg==","IGxhYnM=","CXBsdA==","IHNraW5z","IHRyYW5zZmVycmluZw==","IGltYWdpbmFyeQ==","X0VuZA==","O2JhY2tncm91bmQ=","IGxhcHM=","X0NPTU1FTlQ=","KFNETA==","b25kcw==","LlJlY29yZA==","IEltcGxlbWVudHM=","X3RpY2tz","KCkpKQoK","IGFyb3Nl","XT8=","IE1w","IElDb21tYW5k","IHNjdWxwdHVyZQ==","IGNvbnRyYWN0ZWQ=","PEhUTUw=","IGNhbGVuZA==","YXR5","L1N1Yg==","IGt2aW5u","X0lHTk9SRQ==","IFNoYW5l","TUxT","IHN0aW11bGF0ZQ==","UGFydGl0aW9u","IG11bg==","w7Nt","ZXJhbGE=","LWFjY291bnQ=","LkJpbmFyeQ==","Y8Op","IHNlaXpl","Y29ubmVjdGlvbnM=","IAogICAgICAgIAo=","IERpYWdub3N0aWM=","VklTSUJMRQ==","IFJ1bnM=","IGltcHJlc3Npb25z","c3VpdGU=","b2JsZQ==","fi0=","YWt1a2Fu","PFBlcnNvbg==","IE5vcw==","IEd1aQ==","LndhaXRGb3I=","UkVTRVQ=","IHBvc3Rwb24=","RGlzY292ZXI=","YXJyaXNvbg==","c2hhdw==","Ymxvb2Q=","QUpPUg==","5pu05paw","IE11c2U=","5pS2","IHJldGFpbmluZw==","b3R0ZQ==","IG1vc3F1ZQ==","IFNuZQ==","IHN0YW5kYXJkaXplZA==","IG1haW5sYW5k","X3RocmVl","dW5nZW9ucw==","Z2V0RG9jdHJpbmU=","IHdoYWxl","IGFnZw==","IFBvcnNjaGU=","bm93bGVk","bGF0ZW50","IFJlbGF0aW9u","IC8vJw==","IHNodXR0aW5n","IFJlbWl4","X2Nvdg==","IHNhaWxpbmc=","IHZvd2Vk","IHBvdHM=","b3V0dQ==","IGhhaXJ5","Y2FzdHM=","UmVsb2Fk","IHJlY29ubmVjdA==","dGVyYQ==","LmNoaWxkTm9kZXM=","IFJhY2s=","IGN1cnJlbnRJbmRleA==","IGFsbGVu","IOeUqOaItw==","IEN1YnM=","W1g=","X1NFUQ==","X1JFTU9WRQ==","LmdldEFjdGlvbg==","KC9e","ZXJyYXI=","IGV0aGVy","Y3VydmU=","IHNsYXA=","IHVvbQ==","T3RoZXJz","IGVuZ3I=","RGlzcG9zaXRpb24=","IHN0YWdlZA==","RXll","IEF1eA==","YXV0aGVudGljYXRl","ICQ/","IEFuZHJlYXM=","IHNldHc=","LkFydA==","IGZvcmVjYXN0cw==","IGF1bnQ=","LW1pZGRsZQ==","IG1pc2Q=","ZGVzaw==","IGVzY29ydGU=","IENhc2E=","cm9waWNhbA==","IGV4ZW1wbGU=","cGxhbmV0","KFVJTlQ=","IHdoaXA=","IFBDQg==","Y2xpZGVhbg==","PSJc","IG94aWRl","IHN1Y2NlZWRz","ZGVyaXZlZA==","IEVjb25vbQ==","X2Nvb3JkaW5hdGVz","aXJhcw==","RHJhZnQ=","IHZpc3VhbGl6ZQ==","QnJpYW4=","X0FTU1VNRQ==","IE9iamVjdElk","IHRyYWluZXJz","X0ZPUkNF","IGNvbnNvbGVz","LXByb2Nlc3M=","bGljaGVy","IFNpbW1vbnM=","VGFraW5n","IENsYWltcw==","IGRpZmbDqXJlbnQ=","QWN0aXZpdHlSZXN1bHQ=","IHNucw==","6YCJ5os=","IENydXM=","IGxsYW0=","cmFi","IEpvYW4=","QUFB","CWZpbHRlcg==","aXNob3Bz","Z2V0dGluZw==","4LU=","IHF1YW50bw==","UGFzdA==","b3ZpY2g=","IGluanVzdGljZQ==","IEZMT0FU","IGFscmlnaHQ=","XERC","KEdhbWVPYmplY3Q=","dWlzaA==","KGJvdA==","IGdhbGxvbnM=","IFLDqQ==","IFNhaWQ=","IFNURE1FVEhPRENBTExUWVBF","YWlzaW5n","X3Byb2Nlc3Nvcg==","ZWxsaWRvcw==","dGVyZGFt","IEJlYW0=","VGV4dEFyZWE=","IHJldG9ybm8=","Lk1ha2U=","ICQoIjw=","IGxvY2tkb3du","IHJlbWVkaWVz","IHZlZWw=","eGVl","ZG9jdHlwZQ==","Rmls","IEV4cGFuZA==","IGVtcGxveXM=","IHNlc3Npb25TdG9yYWdl","UGhw","UHVibGlzaA==","IHJldGFs","ZmFicw==","eW5hbWljcw==","IHRvc3NlZA==","IG51bWJlck9mUm93c0luU2VjdGlvbg==","eHBhdGg=","XG1vZHVsZXM=","IGRpc2FzdHI=","IE1VTFQ=","Lk1lc2g=","LXN0YWdl","IHNkZg==","aXR1bmc=","dWdlcw==","ID8+Ij48Lw==","X2luZGV4ZXM=","IHZhbHVhdGlvbg==","IGxpZmVsb25n","IGV4cGVkaXRpb24=","KFlpaQ==","IHBhaW5z","IFBSSQ==","IE1peGVk","IENoYW5naW5n","R2VybWFueQ==","Y29tbXVuaWNhdGlvbg==","Lm9yZ2Fu","IE1hcmF0aG9u","Z2V0UGF0aA==","IEFjY3VyYWN5","IEdsb2JhbHM=","Jyl9fTwv","IE9XTkVS","4oCm4oCd","IHN0YWJiZWQ=","IHNjaGl6b3BocmVu","IEZu","IENPUkU=","IERhdGFSb3c=","IExURA==","IG15dGhz","IGZhbW91c2x5","fCwK","IFNlb3Vs","U2ly","IEJlcms=","UmVnRXhw","LmdldFJvdw==","IERlY29kZQ==","Uk4=","IG1hbmc=","IGVtcGxveWluZw==","X25vbWJyZQ==","PFRhc2s=","IEd1eXM=","IEFydGlrZWw=","QmVycnk=","enVyZQ==","IHZhbGV1cg==","aGl0cw==","IGx1Y3JhdGl2ZQ==","IGluZm9ybWF0","Q2xpbnRvbg==","IHRlcw==","IENlcnRpZmljYXRpb24=","X3dz","IG9mZmVuY2Vz","ZWJyYQ==","IEF4aW9z","cmVzdGFydA==","TE4=","LkVuY29kZQ==","bWl1bQ==","IEZlYXR1cmVk","0YjQuNCx0LrQsA==","IERlcHQ=","OyYj","IE15ZXJz","CXRyYW5zZm9ybQ==","VGV4YXM=","16g=","IFlvcmtzaGlyZQ==","bG5hbWU=","QnJl","44GT44Gu","IHNjZW5lcnk=","IGbDvGg=","CQkJCSAgICAgICA=","IERvb20=","IEFETUlO","KGVz","INC80LDRgdGB0LjQsg==","X2FzY2lp","L0RhdGE=","bGVzaG9vdGluZw==","QmFu","IG1lbW9pcg==","INmG","IEF1c3M=","KXBhcmVu","IGd1aWRpbmc=","IGJheg==","w7h5","QURN","IGRtYQ==","LlF1ZXVl","IFN1cHBsaWVz","IE1jRA==","IEFnZW50cw==","X2Ji","c2xhc2g=","IGhhc2hlcw==","IGNyYW5r","IFJhZw==","IGF1dG9ub215","w610dWxv","IHJlY3Vyc2lvbg==","IENyYXp5","X3RyYWNrZXI=","IE1i","X3BoeQ==","Zm9vYmFy","CXNwZWVk","IGNhbXBvcw==","IG1vdWxk","IGNoYXJpdGllcw==","SEVJR0hU","IGVhdXRv","X3NvbHV0aW9u","IERH","bWFydmlu","WWVzdGVyZGF5","IEJlY29tZQ==","PGxs","b3Jpcw==","W25leHQ=","IGluY3VtYmVudA==","IER1cA==","CW92ZXJyaWRl","5a6J","CWNmZw==","IHPDtg==","IGRlc2U=","LWRp","IG9udHZhbmdzdA==","IGRlY2lzaXZl","5Lu3","X2tlZXA=","KERhdGFiYXNl","Xy8=","IENMTA==","LW1ldGhvZA==","CVBvaW50","IEJ5dGVCdWZmZXI=","IHRyYWNlZA==","YWRkVG8=","7IS47JqU","YW55YWs=","IGVtcHJlc2Fz","KHJlcG9zaXRvcnk=","LmNyZWF0ZVN0YXRlbWVudA==","IGVsYQ==","Rm9yZ2VyeVRva2Vu","IGlzZW1wdHk=","YXNpbg==","IExvb2t1cA==","0LXQvdCw","IHZpb2xhdGVz","IFNtYXJ0eQ==","IHphaw==","KCQu","U0hPVw==","INCi","YXJ1cw==","KFRFU1Q=","cGFja2Vk","IGhpc3Rvcmlh","IGNhbmNlcnM=","IEtyZW1saW4=","UmVkdWNl","L2hvdw==","IMSQ","VElUTEU=","LmxvY2FsUG9zaXRpb24=","bGlhYmxl","IOesrA==","IGZyYW5jYWlz","CWhhc2g=","IGluaWNpbw==","IENyYXNo","IHsu","IGNsb2Nrcw==","ZHVjdG9yeQ==","IFB2","6528","IGRvaXM=","XC0=","IGphYXI=","IE1heWE=","bW96aWxsYQ==","CXJlc291cmNl","ISEK","YXlzY2FsZQ==","ICctJyw=","5Y+W5raI","IHN0YWxl","Q29ybmVy","w6hsZQ==","aXRpdmVz","emFz","aWNvcm4=","LkV4cHJlc3Npb24=","w7N0","QXBwbGljYXRpb25z","UmVzdHI=","X0luZGV4","jbDsnbTthLA=","IEpGcmFtZQ==","c2l4","X0lNRw==","6JeP","IE51bWVyaWM=","IHdpcms=","X1NVTQ==","PERhdGVUaW1l","IHB5bGludA==","IGxhbWVudA==","IFBvc2U=","X2VudHJvcHk=","IGVuY291cmFnZW1lbnQ=","IGxhaW4=","5Yib5bu6","LWZy","IGNvcnJlY3Rpb25z","cGhhcw==","dXVy","YXRlZ29yaWFz","IGNhdGFseXN0","LmFsdA==","IEZlcm5hbmRv","LkRhdGFHcmlkVmlld0NlbGxTdHlsZQ==","IGhlcmJhbA==","IFJH","U1RFUA==","SUZu","IFRvbmc=","xb5l","IElOQ0xVREU=","IGhj","dHJhY2tlcg==","CVN0cmluZ0J1aWxkZXI=","IERlc3Rpbnk=","IHNvcGhvbW9yZQ==","IERlZA==","IFBBUkE=","aXpvbnRhbGx5","LWNoYW5nZQ==","ZW5kaWQ=","6YCJ5oup","aWprZQ==","IEF0aGxldGlj","YmFp","Z2V0UG9zaXRpb24=","Lm5hbWVzcGFjZQ==","6K6i5Y2V","UkFDVA==","IHJlbGlldmVk","IHBvdXJpbmc=","IGl5","cm92ZQ==","IGFkb2xlc2NlbnRz","IGF3ZQ==","cmVhcw==","QW50aUZvcmdlcnlUb2tlbg==","cm93bmluZw==","IFVuY2xl","LkNvbm4=","IE1lZGlhVHlwZQ==","Lm9yYWNsZQ==","SU5URVJOQUw=","LGFuZA==","IGZhdXg=","aXBtYXA=","JG1vZGVs","IEdlb2Zm","X0FYSVM=","KCgpKQo=","IG5lZ2xlY3RlZA==","IHF1YXJ0ZXJseQ==","IGRpZXNlbg==","IGRyYWdvbnM=","TmlnaHQ=","L1dlYg==","PFZlYw==","CSAgICAgICAgICAgICAgICAgICAgICAg","IE9icw==","YmRk","IGhlaXI=","LWFuZ3VsYXI=","TWVudVN0cmlw","ICciPic=","a2luc29u","INC60L7Quw==","b2duaXRpdmU=","X2xp","IGltbWluZW50","IGFmZmluaXR5","LnNpZ25hbA==","IG5vdGNo","IFN0ZWVsZXJz","bWF4bGVuZ3Ro","S0s=","IEV1Z2VuZQ==","X1BXTQ==","cm9p","IOKXjw==","IEhhbWJ1cmc=","Lk11c3Q=","IGF4ZQ==","ZW5lZg==","IGFtYml0aW9ucw==","IFNwZWNpZXM=","IFN0cmVzcw==","IGF3aGlsZQ==","INCx0YPQtA==","IHdpdGhzdGFuZA==","IERlY29kZXI=","X2ludmVudG9yeQ==","IHsNDQo=","IHRndA==","IHJhaWxyb2Fk","V0FTSElOR1RPTg==","IG5lZ290aWF0ZWQ=","TlNU","LXBob25l","LFU=","IGV4ZXJjaXNpbmc=","4bul","X1BJWEVM","YXZvcnM=","aXRlcmF0ZWQ=","IHZhbXBpcmU=","YWRhbA==","SW5ncmVzZQ==","IHVuZw==","amVjdGl2ZQ==","LmNlbGxz","IG5hbm8=","IG1hcmtkb3du","X1JVTEU=","KGV2ZW50cw==","IGx1Z2dhZ2U=","TUVTU0FHRQ==","aWdrZWl0","JGNvdW50","QXR0cmlidXRlTmFtZQ==","SUdJTkFM","X0VudA==","IEJG","IENPTU1FTlQ=","X2luaQ==","IEV1cm9wZWFucw==","IEJlbGxl","5ZG9","KVsn","5bqU","IFVzZWZ1bA==","LnJlZmVyZW5jZQ==","KCkiLA==","X2dyYWRl","IEthdw==","IHNlbnRlbmNpbmc=","IHNvY2lhbGlzbQ==","bW9uc3Rlcg==","X0xBWUVS","IGRlZXBlc3Q=","d2s=","IE5vaXNl","IyMjCgo=","IHByw6lj","b3RsZQ==","0YLQtQ==","YXVm","aWJhbA==","IGNvbnF1ZXI=","PkVtYWls","IGFtYnVsYW5jZQ==","T0FE","ICgiJQ==","IEZJ","LmZpeHR1cmU=","IHRlcnNl","ICAgIAkJCQk=","IHNhbmN0dWFyeQ==","dWdp","IENvbXBhcmF0b3I=","RGVmaW5pdGlvbnM=","IGFzdGhtYQ==","IGxhY3Q=","IGhhcmR3b29k","LmNsb2Nr","IGF0dHJhY3Rpbmc=","IE1vdXI=","KGRpc3RhbmNl","aWNpdHM=","IGJvbm5l","IEFDQ0VTUw==","LkRlc2VyaWFsaXplT2JqZWN0","IFR5cGVk","IGpldQ==","IGFwcElk","IENsYXJh","IEhG","IFJlaWNo","aXBwbGVz","Ly8tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQ==","X2RlbGl2ZXJ5","ZXJpYWxpemF0aW9u","IHBsYWludGlmZnM=","U2NpZW50","c2hvcHBpbmc=","IER1bW15","IFdhbGQ=","R3JvdXBOYW1l","IGluc2NyaXB0aW9u","ZWxvZw==","Ojo6Ojo6Ojo=","X2xk","QmFja1ByZXNzZWQ=","LlJhdw==","IE9uVHJpZ2dlcg==","IG11c2V1bXM=","IEJlZW4=","IEFkdmVudHVyZXM=","IHNsYXRl","IGxldHQ=","IHN1bmQ=","IEdpbg==","IE1lY2hhbmljYWw=","LnNoaXA=","QXBwQ29tcG9uZW50","IGRlc3RpbmVk","IGR3ZWxsaW5n","UHJvZmlsZXI=","UHJlcGFyZQ==","emVpY2g=","IHNpbGljb24=","KGhhcw==","ICMl","VklERU8=","IGNvbGxhYm9yYXRl","TGlu","IHNjb3Blcw==","KGNsYXNzTmFtZQ==","KHNk","YW5kaW4=","LmhhbQ==","U2VydmljZUltcGw=","LWRlc2NyaWJlZA==","IGlyb255","c3RpYWw=","IEh1YXdlaQ==","KHJlcG8=","IHVuZXhwZWN0ZWRseQ==","IEthaQ==","Lmluc3RhbGw=","XHhm","IGV4aGliaXRlZA==","X1RDUA==","IE94","X0NITw==","IHByb3N0aXR1ZXJ0ZQ==","IHbDpA==","IHNpdG8=","IGNvbnN0aXR1ZW50cw==","IENvbnRpbnVlZA==","IFNBVkU=","cnNz","L21lc3NhZ2U=","dWJlcw==","IG1pc2RlbWVhbg==","IHRheGF0aW9u","IHN0b3J5bGluZQ==","aGFpcg==","IEZpbmRz","U0lH","dmVyaWZpY2F0aW9u","fj0=","Lmhw","SXRlcmFibGU=","0YvQtQ==","YXRvcmk=","IGN0cg==","Ung=","Xyk7Cgo=","ZGFn","LnBpbg==","IHBzZXVk","IGludm8=","0YHRgtGA","X3BpeA==","5Li656m6","IHN3b3Ju","4oCUb3I=","X3JlZ2lzdHJ5","IGRpc2FzdGVycw==","IFJPSQ==","IOKAlQ==","YWt0dQ==","Zm9yZXN0","YmVpdGVu","4oCUSQ==","dWV2YQ==","ZWd0","IHNwaWtlcw==","VVJFUw==","IFJlY29tbWVuZGVk","IGV4cGxvaXRlZA==","IEZyZWRlcmljaw==","X0NPTVBMRVRF","IERydWdz","ISEhISEhISE=","IFJpdg==","U1RPUA==","Uk9PTQ==","IFBBU1NXT1JE","Q29va2llcw==","LkVs","4but","IEJlcnQ=","IGhhc2hlZA==","aWNlc3Rlcg==","IGRlY29yYXRvcg==","IHF1ZXJ5U3RyaW5n","OjsK","ICJbIg==","b3RvcGU=","LUFtZXJpYw==","IE1hdHRoZXdz","VVJBTA==","4oCcLA==","U3VtbWVy","Zm9z","X0NPTlRBSU5FUg==","X0FDSw==","IGZpbHRy","X2Rpc3A=","X1Jl","IGZhY2lsZQ==","0LDRiA==","IOyVig==","IGViZW4=","IHNwcmluaw==","IFF1aW50","PlY=","IGhpc3RvcmlhbnM=","b3VybWV0","IE1vbml0b3Jpbmc=","bGVkZ2Vy","Y290dA==","IHdhcmU=","R0dMRQ==","Y2Fycw==","IE1FRElBVEVL","IHZvbHVwdA==","X1ZpZXc=","SEVM","KGNvcHk=","KHN0YXRz","IGNocm9tb3NvbWU=","IEN1cnRpcw==","LWNvbmY=","KGFzc2V0","IGh2b3I=","RmlsZVN5c3RlbQ==","PD4oKTsNCg==","b2NvZGVy","IENhbm5vbg==","KXg=","IFNtb290aA==","IFNBUw==","X2Nl","CXByZXY=","X21vdmll","RWM=","X3dhbGw=","PEJ1dHRvbg==","IEZBU1Q=","IG9uVmlldw==","dWxhbg==","IFNVUFBPUlQ=","IGdlc2NoaWNodGVu","IFNvbnM=","SW1t","JElGbg==","IGZhaXJuZXNz","IGRwaQ==","YXRzdQ==","Sm9zaA==","RXF1YWxpdHk=","IH0oKQo=","X2xlc3M=","IFJhdGlv","IENhdHM=","IFN0ZXJu","TW9uc3Rlcg==","IG1lcmN1cnk=","w7xocg==","IHBsdXNpZXVycw==","LmRlc2VyaWFsaXpl","c2NvcHk=","LkZhbHNl","KWFuaW1hdGVk","IEV4cGVydHM=","ICIiKXsK","LldoZW4=","c2VlYWxzbw==","LnVucGFjaw==","TEVN","LnNlbGVjdEFsbA==","IHBlcmNlcHRpb25z","dWRpbmc=","aXJsaW5n","IFByaW50aW5n","Z3JhbXM=","IEZpbGVTdHJlYW0=","ZXJ2aWxsZQ==","aWxvZw==","aWNtcA==","X0NvdW50","IGxpdmVzdG9jaw==","LWNh","ZG9jdW1lbnRz","IHBvbGVz","CXdhbnQ=","IGZsdW9yZXM=","IHN0YW5kcG9pbnQ=","IEh1Z2U=","IHJhZGlhbnM=","IFVJQmFy","RURJVU0=","IEhpc3Rvcmlj","X2hvbGRlcg==","IE1hcmluZXM=","IHTDpA==","LkxpZ2h0","cXVpcmVy","YXNvbnJ5","ZGl2aWRlcg==","IEZsdXR0ZXI=","X2Zi","cmVzdHJpY3RlZA==","IEV2ZXJ5Ym9keQ==","TsOjbw==","IGtub3Q=","IFR3aXRjaA==","IGhhbGx3YXk=","KENvbGxpZGVy","SW5wdXRFbGVtZW50","PykK","L29mZg==","Lyk=","cGxheWVk","W09G","IGJhdHRpbmc=","X2Rs","IGNvbWVkaWFu","IMOpdg==","IERFTQ==","IEVkZW4=","OndoaXRl","Jycs","Q29uc3RydWN0aW9u","YWNlcmI=","IHRhc2tlZA==","Lm1hbmFnZQ==","UmVsYXRpb25zaGlw","IHBob24=","bno=","X0JHUg==","VmFsaWRhdGVBbnRpRm9yZ2VyeVRva2Vu","X2Fpcg==","4oCcV2hlbg==","IGdsZnc=","IENvbnZlcnNhdGlvbg==","X1RPVEFM","LFo=","IGdyYXo=","IGl0ZXJhYmxl","IFBBU1M=","IGFkdmVydGlzZQ==","IG3DtmdsaWNo","L3RyYWlu","IFZvbGtzd2FnZW4=","IGNyZWVweQ==","ICIpDQo=","UVVFTkNF","IGFsdGFy","IGVkaXRz","Y29tcGlsZWQ=","YXduaW5n","IER1bmdlb24=","IG9zZw==","TmF2aWdhdGlvbkJhcg==","IHRyZW5kaW5n","IEVjbw==","b2dnbGVz","Y2RvdA==","fC0=","U2ll","ZWNyZXQ=","IE5lZ2F0aXZl","IExpbmc=","IERJTQ==","IENXRQ==","IENhcnJpZXI=","IGNhcnRyaWRnZQ==","X3VzYg==","PW9z","IEphY2tpZQ==","IG90cmFz","IGNvbW1vZGl0aWVz","IFByZXNlbnRhdGlvbg==","KSYmKA==","IE1hcnRoYQ==","IENhdGhvbGljcw==","IE1vbmQ=","0L7QsdGL","X2Fic29sdXRl","IGFzaGFtZWQ=","cG9uc29ycw==","dGFs","IHNhZG5lc3M=","IHB1w7I=","RmFkZQ==","LXByZXZpZXc=","IFJlcXVlc3Rz","IENhbHZpbg==","aG9ybg==","UmV1c2VJZGVudGlmaWVy","KHByb3ZpZGVy","L2FwcHM=","aW1lbw==","CUNsYXNz","U2Ftc3VuZw==","IFdPUkxE","IGNpbm5hbW9u","ZG90ZW52","IElVc2Vy","IERFVg==","X0NoYXI=","LmliYXRpcw==","ZXRp","L21l","c3N0","LnN5bQ==","IFJ1Z2J5","LW1hc3Rlcg==","YWphcg==","IFlFQVI=","IG9kcA==","IFJvbGVz","IGJpcGFydGlzYW4=","YWlsbGU=","IGJsb2NrZXI=","IGdyZWVucw==","LlNFQ09ORFM=","IGJlbGlldmVycw==","IExpa2Vz","RkxPQVQ=","IG1haw==","IGdjYw==","4pWQ4pWQ","KCJ+Lw==","U0NSSVBUT1I=","IHRvbm5lcw==","IFNhbmc=","IHRyYW5zcG9zZQ==","ZW5uYWk=","UHJlZA==","IHNvbGx0ZQ==","LmdpdGh1YnVzZXJjb250ZW50","KHByaW50","IEhvbGU=","55yL","YWRnZXQ=","IHByb21wdHM=","IGdlbmV0aWNhbGx5","IEhvZA==","IHZlcnRpY2FsbHk=","X2NvbnRyb2xz","0YHRgtCw0L0=","Iil7DQo=","JHRpdGxl","IH0pLAoK","IHN0YXRld2lkZQ==","IENvcnJlc3BvbmQ=","IEF0dHI=","aXRhbnQ=","RWxlbWVudFR5cGU=","IG91dHdhcmQ=","IGZhbWlsaWE=","KGFydGljbGU=","IGJsYXQ=","wqAK","IGdsR2V0","IFJlY2VpdmVy","ICUt","YWRhbQ==","V2lubmVy","IHRhaWxvcg==","X3B3ZA==","ZXJ0ZW4=","U3Rhbg==","CWFsbA==","YWxpdmU=","c3RydG90aW1l","77+9cw==","c2Vzc2lvbnM=","JGNvbm4=","YXNzaXN0","IGNoYXR0aW5n","IE1hbnQ=","ICVA","ICIiKTsKCg==","IGRndg==","IO2VqA==","LnJlcGVhdA==","X01lc3NhZ2U=","IGFkdmlzZXJz","L3BhdGg=","IGtlcw==","KX08Lw==","TWlzYw==","IGJzb24=","IHRyaW1tZWQ=","IEFjaw==","VmVydGV4QXR0cmli","57Si","dWF0ZXM=","Lm15c3Fs","IGRlc3Rpbg==","IHByb2Js","KENvbnN0YW50","YXNzZXM=","LWltYWdlcw==","X0FSRUE=","X18qLw==","W10o","IHNpZ25Jbg==","xJE=","eHI=","YWhpcg==","LmZpcmVzdG9yZQ==","IHNlcXVlbnRpYWw=","IElkZWE=","LWJhc2lj","X3BhZw==","IGluc3RhZ3JhbQ==","b3Ryb24=","X2FsaWdubWVudA==","XFxcXA==","LkZhY3Rvcnk=","LnJ1bGU=","LmNoZGly","IGxpYnJv","KGdhbWVPYmplY3Q=","LlRvb2xTdHJpcEJ1dHRvbg==","IGRpc2NvdmVycw==","LkFyZ3M=","ZG9i","IHZu","4oaS","IGTDvA==","IFhN","IGFsdW1uaQ==","IGhvbmU=","IHNlY3VyZWx5","X2Ryb3Bkb3du","RGlzY2xhaW1lcg==","IGR6aQ==","KHRpbWVzdGFtcA==","Jyld","IGN1bHRpdmF0aW9u","Li4uCgoK","IFRyZWF0eQ==","IERpc3M=","IGNvbmZsaWN0aW5n","LmdldFNlbGVjdGlvbg==","IHBsYXlhYmxl","IFNpbGs=","IEVxdWFsaXR5","IG1veQ==","IGZsYXR0","IG1vdGl2ZXM=","UGVyZmVjdA==","LmV4aXN0","IHR3ZWFr","IG9taXQ=","IFR3aWxpZ2h0","IGtpc3Npbmc=","IGNocmlzdGlhbg==","KFNF","X2RlZmluZQ==","IFBlbmc=","U29ydGVk","J2lu","TG9ncw==","4buHbg==","IG55bG9u","RHVtcA==","SW1hZ2luZQ==","cmVuYW1l","IGJlZm9yZWhhbmQ=","cHlnYW1l","IGJweQ==","IERq","IHRpdHVsbw==","IG5sdGs=","IFNjaG1pZHQ=","IENhdg==","KG9uZQ==","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA=","LmdldE1vZGVs","IFB0","YXRvaQ==","LmxvY2Fscw==","YnVyc2VtZW50","UHJvdmluY2U=","IEFwcHJvdmVk","KCk8PA==","w7NyaWE=","dXNjaA==","IEplbm55","YXJyYW50cw==","IExpYmVydA==","TG9yZA==","IFJlbW92ZWQ=","X2NvZGVj","LmJ1bmRsZQ==","IEdvbnphbGV6","b3BlcnM=","neWni+WMlg==","ZXR0aW5n","IGdvZGRlc3M=","cmlwZQ==","IG11c2N1bGFy","CQkJCQkJCQkg","IEh1Z28=","IG1lam9yZXM=","bG9pZA==","cml0ZWxu","Z2lz","YWRkb24=","ICgoKCg=","YXBwb2ludG1lbnQ=","cmVzZXJ2ZWQ=","CWZyaWVuZA==","X2F2YXRhcg==","Qk9PTEU=","YWhp","LUVORA==","IGlmZg==","w7Ni","IEJydW5v","cm93c2FibGU=","IFBvaXNvbg==","KGZsYWdz","dXJ0bGVz","IEFuaW1l","IG1pZ3JhbnQ=","CXN0cmNhdA==","KHJlcGx5","IFJlZnVnZQ==","IEJX","ZWZ1bA==","JHZhbHVl","ZmVk","ICAgICAgICAgICAgICAgICAgICAgICAK","6LWE","KGNt","IHZ1bG5lcmFiaWxpdGllcw==","IFsoJw==","IHVuYmVsaWV2YWJsZQ==","c3RyaWN0aW9u","ZW50aWV0aA==","IHByYXlpbmc=","Q2xhaW1z","IGthdWZlbg==","bsOp","IHBvaXNvbmluZw==","Y29sbGVjdGlvbnM=","IGluaXRTdGF0ZQ==","IFNldmVyaXR5","IGNvbnRlbnRpb24=","IAoJCg==","LmNvbnRyb2xsZXJz","c3RydWN0dXJlZA==","aWN0aW0=","IE9iZXI=","IC8qI19f","X09U","IEFtZXJpY2Fz","IEFkYQ==","UHJvZHV0bw==","Lm11bHRp","IGdyYXBl","YmVn","5p+l6K+i","IHF1YXJ0eg==","IFJvbWFuY2U=","IE1pZHdlc3Q=","IGhvdXNlZA==","IGZ1cm5pc2g=","aWNvbnQ=","LnVuc2hpZnQ=","b3RyZQ==","IMO6bg==","aXBwbGU=","IHN1YnVyYg==","dWFsaQ==","Vm9pY2U=","LklzQW55","LGNvbHVtbg==","IFByb3NlYw==","SURB","CXBvc3Q=","cHRvbXM=","dsOp","IEluZ3JlZGllbnRz","w7ZmZg==","Lm9wZXJhdG9y","IDw8PQ==","bGFzdGlj","IHJlc2VtYmxl","VW5hdXRob3JpemVk","IHR1dHRv","X1NXSVRDSA==","X1JFQURZ","fT0=","bm93bGVkZ2U=","IGFwcGVuZGVk","dW5nYW4=","4oCZZW4=","IExvcmVu","cHVibGlzaGVy","IE1H","fSwi","IFdhbHNo","VGVtcGxhdGVz","X3NvY2lhbA==","IHBhcmlzaA==","IFNwbA==","bWluYXRlZA==","KEZBTFNF","IGZvcmVmcm9udA==","bW9kaXR5","IGJpbGF0ZXJhbA==","IGNvbXBldGl0","IGNhbmRsZXM=","LmRw","IGNvbGxlY3Rz","dGVsZWZvbm8=","IGF0dGVudA==","IExlbW9u","aXphZGE=","IHRoZXJhcGllcw==","IHBhcmFkb3g=","IHRhcw==","LXN1Ym1pdA==","ZWtlcg==","SU5hdmlnYXRpb25Db250cm9sbGVy","IG1ldGF2YXI=","IHNld2luZw==","IFppbWJhYndl","IGxhd2Z1bA==","IGxvcmU=","IExvYWRz","INGB0L7Qt9C0","LnByb21pc2U=","IEZhY2Vz","LlBsYXRmb3Jt","LmdldExvY2F0aW9u","IHRyb3VibGluZw==","IHbDrWRlbw==","IEZlYXR1cmluZw==","5Lqn","cWVk","IG9uQmluZA==","IHRvZGRsZXI=","Q2xv","RGl2aXNpb24=","LWdhbGxlcnk=","IEdlbGQ=","c3BlY2lmaWM=","RmllbGROYW1l","X2V4Y2Vs","XGh0ZG9jcw==","IERW","ICY6","IHR3aWc=","IENvbmNlcm4=","IHNob3RndW4=","IG5pY2tlbA==","IEx1eHVyeQ==","X0tFWVM=","Lm5weQ==","xa8=","IGZvcmVoZWFk","zrI=","IGVuZGFuZ2VyZWQ=","L3RoZQ==","cGlwZWxpbmU=","xbE=","bmVv","RXhwbG9yZQ==","U3BlY1dhcm4=","IGludGVyY2hhbmdl","KHBp","YmlydGhkYXk=","RGF0YVJvdw==","IFNQUg==","IG9zdGU=","ICJ+","YXRpc2ZhY3Rpb24=","Tkg=","b3Jkbw==","LWZvY3VzZWQ=","J0E=","lok=","LmJlc3Q=","IFNwZWNpZmljYXRpb24=","Lz4uCgo=","b2dlbmVzaXM=","IE9QVElPTlM=","dXB0b29scw==","IG1pbGl0YW50","IGV4aXRlZA==","aWdhcg==","IENPTU0=","IERpc3Bvc2FibGU=","YXljYXN0","IHJvd3NwYW4=","IHN5bnRoZXM=","IHNvbmRlcm4=","IDwhLS08","IEVuZGU=","LnZhcmlhYmxlcw==","IGNvbnNlcXVlbnRseQ==","c2Rr","U3VwcGx5","cmVzcG9uc2l2ZQ==","T3BlbmluZw==","cGhvdA==","IH1c","IGJ1bGxzaGl0","IGJlYWNvbg==","X3NhdA==","IHNuYXBz","IEdIeg==","TE9ORw==","PHBhaXI=","IFsKCg==","IFZlcmc=","IEVpbmU=","L3Bvc3Rz","IGFyYWI=","IHN1bWE=","44Oz44OI","IHNjYXJj","IG9sZWg=","ID8/Pw==","IE9mZmVycw==","eGVk","IGZ1bGxXaWR0aA==","LWFjdGlvbnM=","T3V0ZXI=","IEV4cG8=","w6lyZXI=","Lkhl","REg=","IGhpbA==","IE1pbGxlbm4=","0LXQvdGM","SWNl","X2dyYXk=","INC/0L7Qu9GD0Yc=","IFB1bms=","IHRpbWV2YWw=","IGlzYQ==","IENIdG1s","LkRhdGFQcm9wZXJ0eU5hbWU=","IGRpeQ==","dG91cg==","IGpUZXh0RmllbGQ=","IGplbGx5","IGFra2E=","LWVyYQ==","RGVwcmVjYXRlZA==","X0lNUEw=","IE1vbnRocw==","X0lURVI=","IGFydGU=","IEhlYWRpbmc=","IEJvaA==","IHByYWc=","IGRvd25zdHJlYW0=","IEJPQVJE","X2tleXdvcmRz","IE1ldHJvRnJhbWV3b3Jr","KS0o","PEV2ZW50","4bqldA==","IFByZWNpc2lvbg==","IE1SSQ==","aGVyZW5jZQ==","aXhv","KSkpewo=","KCk/Pg==","IHNhYXQ=","IFdhcmVob3VzZQ==","X2F0b21pYw==","IHZvaWNlZA==","SXRlbUNsaWNr","ICAgICAgCQ==","LlJlc3VsdFNldA==","L3BsdWdpbg==","IGhhbGxz","PWZvcm0=","IFdhZ25lcg==","ZW1haWxz","JSUK","VU5LTk9XTg==","IFJpbQ==","dWludHB0cg==","IExpYmVyYWxz","IHRlcnJpdG9yaWFs","IE11cmRlcg==","IExhZGVu","IHByZXNpZGVudGU=","KGNhcA==","IH0sewo=","YXZvdXJpdGU=","ZmluZEFsbA==","IGFwcGxhdWQ=","IOuplA==","L3Bob3Rv","X3N5bg==","LndhbGs=","IHN1bnNoaW5l","IHN0dWJib3Ju","IGRvd25zaWRl","IExURQ==","LWJ1aWxkaW5n","UXVlcnlCdWlsZGVy","X2Rpc2FibGVk","VGVycg==","YWtyYQ==","UmVmcmVzaGluZw==","X3Byb2Jz","IGZvbGw=","PmI=","IGNvbGxhdGVyYWw=","JGVycm9y","IGFjb21wYW4=","X2l2","K2Q=","YWp1","IOKd","c3VybmFtZQ==","LmFydGljbGU=","IGJpY3k=","IjoKCg==","Pjw/PSQ=","0LrQu9GO0Yc=","ZWNvbWU=","RmluZGluZw==","KHBk","IHJlY3Rhbmd1bGFy","ZXN0bw==","aWhpbA==","PScnKQo=","IG1hbnNpb24=","X2ZpbHRlcmVk","YW5lZA==","UFJPRFVDVA==","TE9HWQ==","X2ly","LlJlbW90ZQ==","IGV4ZWN1dGVz","b3RlY2hub2xvZ3k=","IFBST0NFU1M=","IHJvd0luZGV4","Z2V0WA==","TXV0","aW5za3k=","KHN0cmluZ3M=","IE1veg==","Rmxvb3I=","LlN0cnVjdA==","X3ByZWRpY3Rpb24=","IGNhcnJpYWdl","IGNvbGxlY3RvcnM=","IFdoZWVscw==","IGJ1bmRsZWQ=","YXhlZA==","a29s","X2Nyb3A=","IGJsb29t","QmVzaWRlcw==","IG92ZXJyaWRkZW4=","IHN1Ym5ldA==","aWVuaWE=","Kj46Og==","IFByaW1pdGl2ZQ==","IOag","LkNoYXJhY3Rlcg==","6KGo56S6","IEFESEQ=","Uk9Z","SmFwYW5lc2U=","T1VT","OlVJQ29udHJvbEV2ZW50","IFBBTA==","aXphY2lvbg==","IGNoZXJjaGU=","b3J0aW5n","IG9yZ2Fz","LlV0Yw==","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA=","XERvbWFpbg==","T1JB","IHRlcnJhY2U=","IHByaXM=","CQkJCQkJCQkJCg==","IHJhaWRz","X2luY3JlbWVudA==","IHVuanVzdA==","JG9wdGlvbnM=","b25DaGFuZ2U=","Qmxvb2Q=","RmlsbQ==","IGhhbmRpbmc=","IG11Zw==","U09MRQ==","44OV","aWNvbmR1Y3Rvcg==","IElzbGFtaXN0","ICIiKTsNCg==","LW92ZXJsYXk=","LGNvbA==","6Zw=","YXJyaW5ncw==","X2NvbnRyYWN0","CWxs","cGlw","X2VtYmVkZGluZw==","IHBlcm1pdGU=","IG1vZGVt","IHRyaWdnZXJpbmc=","KGh3bmQ=","LiIpXQo=","IHNhbnQ=","IGV4dGluY3Rpb24=","IGNsYXNoZXM=","LkF1ZGlv","IHN1bw==","Lm11bHQ=","IHNlYXNvbmVk","LlZhckNoYXI=","cG93ZXJlZA==","ImNvbnRleHQ=","IG1lbmM=","KEdyYXBoaWNz","JHdoZXJl","IHJlY3VwZXI=","YWNrbGU=","IG5ld0RhdGE=","IEJyZWFraW5n","ZXJnZWQ=","IENQUFVOSVQ=","IE11bGw=","IGtvbW10","IExlZWRz","JywnPQ==","Lm5leHRUb2tlbg==","IFJpZw==","UkVUVVJO","CXRpbWVy","fV97","IE1hcmluYQ==","IHNsb2dhbg==","SVpFRA==","T3BlbkdM","X1BhZ2U=","YXRpdmFz","IGhhemFyZHM=","J3ZhbHVl","IGNvcnBzZQ==","IEZsb3dlcnM=","X29ubGluZQ==","ZGFs","IENvbGxpc2lvbg==","w6BuZw==","IGZlcnJ5","IHBva2U=","IFRvdXJpc20=","aW5lcmFyeQ==","L1NldA==","LkVtcGxveWVl","PkA=","LHZhbA==","IE1pbGY=","YXZleg==","UmV0cnk=","LiIv","IHJvdW5kaW5n","LXBsYWNlbWVudA==","IGNlcnY=","TWV4","IE1zZ0JveA==","X3Npbms=","bWFuaWE=","X2NyZWRpdA==","R3VhcmRhcg==","IHZhbml0eQ==","IGltbXV0YWJsZQ==","IGNvbnRhbWluYXRlZA==","0LrQsNC3","5Liy","YWNoYQ==","IGhhdGg=","IGVudW1lcmF0aW9u","LmdldEJ5","4bq/dA==","IERhbw==","b2JpZXJubw==","IEd1dA==","X1BJUEU=","LmFkdg==","IEd1dGVuYmVyZw==","YWRo","66y4","ZnVzYw==","LlZL","cHRh","IEVNUA==","LkZpcnN0TmFtZQ==","IHJlYWxpemVz","LmNn","IHVuaXRl","UExJVA==","IEFiZHVs","IE1FRA==","UkFJTlQ=","IHF1ZXN0YQ==","c3RkaW4=","IGNhbG9yaWU=","CWdsQmluZA==","IGFybWE=","eWxsYW5k","T01Q","LXE=","IEtoYWw=","c2FsYXJ5","CUFORA==","c2dp","X3RoYW4=","LWJ1aWx0","ICsvLQ==","IG5hcmdz","X2xhdW5jaA==","IFNR","em9u","IEJlbmVk","X3VuaW9u","PigpOw0KDQo=","IFNpbXM=","IERhdGVz","CUNvbm5lY3Rpb24=","IFBlcmM=","Z3JhbnQ=","YW1waWw=","IGFnZ3JlZ2F0aW9u","ZXNlbGVjdA==","X1NVUA==","KHsKCg==","Lm9t","IHdt","LmNvbnRyYWN0","LU9yaWdpbg==","IGdlbWU=","ZnJlZXpl","TlVNQkVS","LmN1cnI=","IEdsYWQ=","c2xh","IFJlYg==","0LXRgdGC0LLQvg==","YXJib24=","L2NvbnRyb2xsZXJz","U2xvdHM=","LmRlZXBjb3B5","RlVMTA==","dWlyZQ==","QHN0dWRlbnQ=","4LmJ4Lit","VHJhbnNsYXRvcg==","IHByZWZlcmFibHk=","Y2hlbWlzdHJ5","IEphY29icw==","bmFy","ICgiXA==","bmVhcg==","aWZpcXVl","CWNvbHVtbg==","IG1pbnV0b3M=","aWdlcw==","IGVzdGFibGU=","LWRpc2M=","KENoYXI=","a292","ZXhhbXBsZXM=","X18oIg==","INC60LDQug==","IEJvcmlz","KGR4","c3By","IG92ZXJoYXVs","YXRvb24=","IEhhcmxleQ==","aWNhbWVudGU=","4paI4paI4paI4paI","ZXZpdHk=","dXNoZXI=","LlZpc3VhbFN0dWRpbw==","V2F2ZQ==","IE5vcm1hbGx5","c3Rvb2Q=","b3JuaW5ncw==","IGhhbmRtYWRl","KGxvZ2dpbmc=","IGNhcmNpbg==","YWNqYQ==","IHN1cGVycw==","IHNpZWdl","CUlm","IElMb2dnZXI=","VUFSVA==","QW5pbWF0aW9uRnJhbWU=","IHRhcGVz","IGFpZHM=","IENvbG9uZWw=","dmVlZG9y","IG1kbA==","cGhvbg==","RGlzbWlzcw==","QXZhaWxhYmlsaXR5","VW5pZm9ybUxvY2F0aW9u","IGlkZWFscw==","cXVldHRl","a2VpdGVu","IEVNQUlM","IE5lYg==","IHN1bW1vbmVk","IGdvdmVybm1lbnRhbA==","IEhvcnJvcg==","Y2hhbmdpbmc=","IEFjdGl2YXRl","SWxs","PHRib2R5","Y3JlYXRpdmU=","IEJMRQ==","IG1hZG5lc3M=","T3JOaWw=","IGhpbg==","xZM=","LkdldEtleQ==","X2NvbnNvbGU=","Ik91cg==","IGd1aW50","IGFtaQ==","IHJlZmxlY3RpdmU=","IGNyYWNraW5n","IFJp","UkFM","dXJzZWQ=","cHVyZQ==","IHJlcGFpcmVk","IHRpZ2Vy","IE5pY29sYXM=","VnM=","bnRo","LmV4cHJlc3Npb24=","IHNlYXM=","X0FDQ0VQVA==","IGZvcmM=","IEZyYXU=","IHRocmVzaA==","IM+A","KEJBU0U=","X09wZW4=","V3VudXNlZA==","IERvbWVzdGlj","KHByaXY=","Z3Vlc3M=","Ly8hCg==","Z2V0SXRlbQ==","KCkpCgoK","bXV0YXRpb25z","IHN0cw==","IGRlbWVudGlh","c3Bva2Vu","JHBhcmFtcw==","IHBhdHJvbnM=","IHJ1bndheQ==","IEJVWQ==","Lldhcm5pbmc=","IG5ldXRyYWxpdHk=","emhvdQ==","0YDQsNGJ","YWt0ZXI=","IENvbnN0cnVjdG9ycw==","w5NO","IFByb2dyZXNzaXZl","IEJ1cmdlcg==","IGluY3VycmVk","IGltcGxpY2l0bHk=","X2Vudmlyb25tZW50","IGV4YWNlcmI=","IGVuZHVyaW5n","c2lj","IFBhcnRpY2lwYW50cw==","X0Jsb2Nr","IGVucm9sbA==","X2VtcGxveWVl","IFBlcHBlcg==","bGF1Z2h0ZXI=","44OW","J107Pz4=","PScu","KHJlbmFtZQ==","IHNoZWx0ZXJz","IEFNQQ==","X2dhcA==","IFJFVVRFUlM=","eGFtcHA=","T01JQw==","IHBlZGlkbw==","IGTDqXZlbG9w","X18oLyoh","X29k","d2VyZQ==","X051bWJlcg==","X211bHRpcGxpZXI=","S0VFUA==","IHNob3dlcnM=","IG1hZ2U=","IHNpbm8=","Y3Jvdw==","LmlkeA==","X25vdGljZQ==","dWVpbA==","IG15cmlhZA==","IEF2YWlsYWJpbGl0eQ==","Y2VudHJhbA==","IEFCT1VU","IGluY29ycG9yYXRpbmc=","IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tCg==","X3dpZGdldHM=","IHN5c3RlbUZvbnRPZlNpemU=","w7ZydA==","L2pwZWc=","IFNNVFA=","KGJyb3dzZXI=","Z3Vucw==","c2V0dw==","X0FWQUlMQUJMRQ==","IGluY29ycG9yYXRlcw==","L2FuZHJvaWQ=","eXg=","5biD","X2xhYg==","IGxlYWtpbmc=","IEhpbnQ=","w7xuY2hlbg==","LlNjYWxl","IGZpcmV3b3Jrcw==","IGxQYXJhbQ==","YnNk","YXhvbg==","KHByZWRpY3Q=","Q29uZ3JhdHVsYXRpb25z","IFNwZWN0cnVt","SVJD","IEFkbWluaXN0cmF0aXZl","IGltcHJpc29uZWQ=","UlNwZWM=","IHJldGFpbnM=","IHNldHRsaW5n","IGNpdGF0aW9ucw==","IFdvcmxkcw==","c3RyY29udg==","b3VzYW5k","IEJlZ2lubmluZw==","IEFuZHJld3M=","IFNoYXJvbg==","RXhlY3V0aW5n","Z3JvdXBJZA==","YWRkRmllbGQ=","IGV4cGFuZHM=","IGtpbG9tZXRyZXM=","bGlua3k=","IGdycA==","SU5BVElPTg==","QnJpdGlzaA==","IGNvbXBvcnQ=","LkRhdGFHcmlkVmlld0NvbHVtbg==","IFByb2R1Y3Rpb25z","aWxkZW4=","IHVuaXg=","X2dhbGxlcnk=","X1BST1ZJRA==","b3JkZXJpbmc=","X2Fubg==","Ymg=","LkRlc2lnbg==","IHRyZWZmZW4=","IHVuZGVybGluZQ==","X251bXM=","7ZWc64uk","KXY=","dXNpemU=","IGRpc2FwcGVhcmFuY2U=","VG9Cb3VuZHM=","IHBjbA==","IFdpbm5pcGVn","IFNoZXJtYW4=","X2xhbWJkYQ==","bmFudA==","IHJvb3RWaWV3","LkZsYWdz","IGNlbnNvcnNoaXA=","c2VudGVuY2U=","LnJlYWRJbnQ=","X2Fzc2lnbm1lbnQ=","IHZlcnNjaGllZA==","IEZyYWN0aW9u","IG5hdGlvbmFsaXN0","IGp1ZWdv","IERlYWxlcg==","IHByZWRpY3Rpbmc=","YXVwdA==","aGVsbQ==","X1BSSUNF","X0RT","KCIjew==","bGlmdGluZw==","IHBvc2luZw==","IE5TTXV0YWJsZURpY3Rpb25hcnk=","IHNtYXNo","IGFraW4=","IGNhbXB1c2Vz","IE91dGxpbmU=","IEVsYXN0aWM=","X0NoZWNrZWRDaGFuZ2Vk","KElFbnVtZXJhYmxl","c3F1ZWV6ZQ==","cHR1bmU=","X0ZST05U","bWg=","IOyDneyEsQ==","UnVuV2l0aA==","IHR1cm5vdXQ=","c2libGluZ3M=","KWU=","X0FSR1VNRU5U","IEdyaWRCYWdDb25zdHJhaW50cw==","X1BPT0w=","LlJJR0hU","aWdnaW5z","dGVsZXBob25l","XEV4dGVuc2lvbg==","IEFyaXN0","aXR1cg==","IGZyaWVz","X2R1cA==","RXhwYW5kZWQ=","LXJv","IFdvcmxkd2lkZQ==","IENvcms=","w7Ns","TGlt","IGRlbm4=","UHJldHR5","IGZ5","VHJpYW5nbGU=","RmVhdHVyZWQ=","KENvbW1vbg==","X2VmZg==","ICIiDQo=","4bubaQ==","X0xJTkVBUg==","IFJpY2E=","IGNhZsOp","IGFwcGVsbA==","IG5pdmVhdQ==","ICYs","IGZhYnJpY3M=","X1BsYXllcg==","IGh5Z2llbmU=","IGRpc2FzdHJvdXM=","IHNoYXJlZEluc3RhbmNl","X3BpdGNo","cno=","ZW5tZW50","TmVhcg==","X1NUQVRT","IHN0YWlu","IEROQw==","IGlzc3U=","Xks=","CXRyZWU=","X2Jsaw==","c2V6","bGFpbg==","YW11","X293bmVk","VVNBUlQ=","Lmhhc0NsYXNz","SVNPTg==","IGZvZQ==","dXNoZWQ=","X1VOU0lHTkVE","IGluZGV4aW5n","IEZpcmViYXNlQXV0aA==","IGxpdGVyYWN5","IFNVUg==","IENvbHRz","YmVjdWU=","IEludHJv","IGNoYW90aWM=","IGFuaQ==","IEFubmll","xrDhu50=","LmR4","ZGlzY29ubmVjdA==","IGFyY2hpdmVk","W0xpc3Q=","PU4=","LnByZXNlbnRhdGlvbg==","UmVzdGF1cmFudA==","IHJvY2tldHM=","PWh0dHBz","L29w","IHB1cnNl","IEtyaXM=","IGNvcmFs","c2V0UGFyYW1ldGVy","IGlycmln","UXVlZW4=","TlNEYXRh","IHZhc3RseQ==","LkZpbGVz","IGZlbWluaXNt","KFN0cmVhbQ==","IGF0cmli","IGxpcXVpZGl0eQ==","PEZpbGU=","dHJhZw==","W2NvbnRhaW5z","IGhpbmRp","CWNw","aG9tZXBhZ2U=","IHN1cnBhc3M=","IGRheWxpZ2h0","YXV0aG9yaXpl","IENvbnNlcXVlbnRseQ==","QXN5bmNSZXN1bHQ=","IERpYXJ5","LlBhdHRlcm4=","LiovCg==","ZW5zY2hhZnQ=","IEp1ZGljaWFyeQ==","QWR1bHQ=","KCY6","IGplb3BhcmQ=","IEJsaXp6YXJk","IGdn","IjsvLw==","WEhS","IHBhc3N3ZA==","Pn0=","JyksJw==","IGNvbXBhcmF0b3I=","LmNoYWlu","IGluc3VyZWQ=","X0VER0U=","IHR5bGtv","X01BSk9S","d2F2","XEZpbGU=","RW50cg==","J2FwcA==","IGZvcmdpdmVuZXNz","CWRzdA==","Ijot","Lm1vbg==","ICgKCg==","IGNhcGl0YQ==","IGluaXRDb21wb25lbnRz","IHN3b3Jkcw==","IE91dHB1dFN0cmVhbQ==","IGhlYXJz","IFNQQUNF","LWluc3BpcmVk","X2Jvb3Q=","Lm5vbmU=","LmdldElucHV0U3RyZWFt","IGRldmlzZQ==","IHBlZGlhdHJpYw==","YW5zaQ==","X3BhcnRpYWw=","IHNoYXJk","IGZ1cmlvdXM=","IGRyYXdhYmxl","JSku","KGVt","IEJha2U=","CXBlcnJvcg==","IFJlbGlnaW91cw==","LSIr","CQkJICAgICAgICAgICA=","IFNlY3JldHM=","KG5vcm1hbA==","QUNFUw==","IFN0b2NraG9sbQ==","LW5vcm1hbA==","IGFjY3VzdG9tZWQ=","IGJvdXRpcXVl","IFN3aW5n","IGZpbQ==","IFBV","LlNvY2tldA==","ICciJw==","YW5q","TWFudWFs","IG11amVy","IHBoeXNpb2xvZ2ljYWw=","Y29udGFpbg==","TWVyZ2U=","IHN1YXM=","ICd7Ig==","bmVnbw==","IHN1YnNjcmliZWQ=","dG9hc3Q=","X1ZFUkJPU0U=","IGtuaXQ=","IEFydGlzdHM=","IGhlYXJ0YmVhdA==","IGZpcmVmaWdodGVycw==","c3Nh","W3s=","IHVuZGVyc2NvcmU=","IGhpc3Rvcmllcw==","aWdtb2lk","RmllbGRWYWx1ZQ==","VG9BZGQ=","LkNv","IEhhcm9sZA==","QXZvaWQ=","aWdoYm91cnM=","b3JkZQ==","IHRydXRocw==","L2Fs","IHdpcmVk","IEl0YWxpYQ==","IHNlcnZpY2lvcw==","IEFVRElP","ICciKw==","IHB1bXBpbmc=","IENsZW1lbnQ=","w4NP","5Y6f","Pm4=","IHN0clNxbA==","amRiYw==","4oE=","CVNFVA==","IEJVRkZFUg==","Oi8vIg==","IGNpcmN1bXN0YW5jZQ==","VUlUYWJsZVZpZXdDZWxs","LnZlcnRpY2Fs","IEpvaG5z","dG9saXN0","IGRyaXZld2F5","IGxlYXJuZXJz","dG9iZXI=","d2lubmVy","LXlvdXI=","LnN0YXRlcw==","SE0=","IGdyYWRpZW50cw==","IHNlaXp1cmU=","IG1hdGVy","IGRldGFs","IFJlZHVjZQ==","KG1vdXNl","IFJlU2hhcnBlcg==","LXJvdXRpbmc=","INi0","IGpvaW50bHk=","IEZhbWls","PE1lc3NhZ2U=","ZXhwaXJl","X3RyYWRl","4oCmLi4=","IEZVTkNUSU9OUw==","IHhlbg==","IHt9Ow==","RmFi","IGZlYXN0","KERi","Rmlyc3RSZXNwb25kZXI=","xLFsxLE=","IG1heFZhbHVl","IC06","YXB0aWM=","Lkdzb24=","IFJvdmVy","X2Nu","bG91ZA==","IGNoYW1iZXJz","INC30LDQtA==","LmZvcmVhY2g=","LmdldEVtYWls","55+l","Lk5vZGVz","IFZX","IFdhaXRpbmc=","KFF0Q29yZQ==","IHPDs2xv","cnE=","YW5ndWFyZA==","IHJlc2VtYmxlcw==","Oltb","IGdlZA==","X0VQ","KEFjdGl2aXR5","IElzbg==","IENydXNoZXJz","X1JVTlRJTUU=","CW9wZW4=","IEhpZ2hsaWdodHM=","w6lyYXRpb24=","IHllbGxpbmc=","IExJR0hU","UGhvdA==","dmVuZ2U=","IFN1c3A=","IENocg==","LkRpc3RhbmNl","YXJzaW1w","bGljYXM=","Lk1vbg==","IHN1Y2tlZA==","cHJpbnRlZA==","bXV0ZQ==","IHNldEVycm9y","Lk9wdGlvbg==","IGltcGFpcm1lbnQ=","bm9pc2U=","IHBhcnRuZXJlZA==","w40=","ZGVucw==","aWN6","IHdhaXRGb3I=","IG92ZXJsb29raW5n","IEZPUk1BVA==","IFRTdHJpbmc=","IHJlbnRpbmc=","CWNvbXBvbmVudA==","LkZyZWU=","IExhdW5jaGVy","PWRhdGU=","IFBvZHM=","QUdNRU5U","Q29kaWdv","Qml0RmllbGRz","IHViaXF1","LWNhcm91c2Vs","IFNpbXVsYXRvcg==","aW5vZGU=","J10pewo=","IEJhZ2hk","IG5vcnRod2VzdA==","aHRha2luZw==","PCY=","IHRyYW0=","IGZvcndhcmRlZA==","IGVycm9yTXNn","X0FTU0lHTg==","IEVudGl0aWVz","LlBhcnQ=","cmVhdHVyZQ==","KFVyaQ==","IERyaXZpbmc=","IGludmFzaXZl","aWdyYXRpb25CdWlsZGVy","b3NhdXJz","CXBvcnQ=","IGJyYW4=","aXR0aW5ncw==","RG9vcg==","IHsl","KGxpbWl0","IHNxdWFyZWQ=","IERJU1BMQVk=","LkFjY2VwdA==","LmJhc2VVcmw=","LkVudGVy","IC4uLikK","IG93bA==","IHNsYXRlZA==","LmZlY2hh","X1NFRw==","PXsk","IE9OTElORQ==","T05Z","INC00LDQvdC90YvRhQ==","b250ZQ==","X0NMSUNL","U2E=","SW1wb3J0YW50","IGNhcm91c2Vs","IGFwcGVhbGVk","IE5pZQ==","L2Jvb2s=","W10+KA==","IHhtYXg=","IGxhbmdl","LlN1cHByZXNz","IFRoaW5raW5n","QWRkcmVzc2Vz","IFNhbGx5","LVRW","IENoYXJsZXN0b24=","KSIKCg==","IHRhbGx5","IHVsbA==","IGxvY2FsZXM=","ZXdhbg==","IGluY3JlbWVudGFs","65Cc","IGNhcmV0","anVyZQ==","IGRvcg==","IGxvY2FsaXphdGlvbg==","IHNlYWZvb2Q=","IFJ1YmJlcg==","LlRoZXJl","IEZpc2hpbmc=","WVlZ","bWFnZQ==","IEZsZXhpYmxl","IEdFTkVSQUw=","ZWth","IHRocml2aW5n","IHNpcw==","IGJvdXJnZW9pcw==","RmFrZQ==","LFwi","INC+0LQ=","Q09S","LWVmZmVjdGl2ZQ==","IHNrdQ==","ZWRseQ==","IyMKCg==","IEhvbGx5","IEZMQVNI","L1RS","Lm5z","cHJvYmU=","Z2lmdA==","b3dpdHo=","LW5hdmJhcg==","IHNhY2s=","57qn","IFRocmVhdA==","WkE=","WE0=","JyksCgo=","IExMVk0=","YXN6","RWRpdGVk","V2l0aFN0cmluZw==","U2lsdmVy","eW5h","X3JlbmRlcmVy","CURFQlVH","KG9wZXJhdGlvbg==","IFNsb3Rz","IEF1YnVybg==","eGVj","IGhvbW9zZXh1YWxpdHk=","LlJlc3RDb250cm9sbGVy","ZXJzaXZl","IHByb2ZpbA==","IE15YW5tYXI=","cm9zc2U=","X0lSUW4=","IHNlbmRNZXNzYWdl","IHRlY2huaWNpYW5z","IG1hbmU=","Y29tbW9ucw==","IHNocmVkZA==","Qm9vc3Q=","IHN5bXBhdGhldGlj","LWVmZg==","IENlcnRhaW5seQ==","IHfDpGg=","IFJvY2hlc3Rlcg==","dWNjaQ==","dXJt","ZW1wb3I=","ICIiOgo=","LXNwYWNpbmc=","IHNpeHR5","IOKckw==","X3JlcG9ydGluZw==","V2ls","b3lv","IGRpZFNlbGVjdA==","LmdldExvbmc=","LnNldEVycm9y","X25j","IERvbmc=","CWFzeW5j","IEhpZ2hseQ==","XToNCg==","TGVha3M=","LC4uLgo=","dmFsdWF0b3I=","ZGljdGlvbnM=","b3hlbA==","IGdlc3R1cmVz","PSI/","YmFncw==","IFJlbGllZg==","c3Vic2V0ZXE=","KG5hbWVzcGFjZQ==","fXw=","IG1pY3JvYmk=","IHB1cml0eQ==","Y2hpbw==","fT8=","X01VVA==","X2FjdGl2YXRpb24=","IFBpcmF0ZXM=","ICUj","aWZpY2FjacOzbg==","5Ys=","IE5SQQ==","w6dvbg==","fSkoKTsK","IENoZXN0ZXI=","4oCT4oCT","Z2V0Q29ubmVjdGlvbg==","LmFyZ3VtZW50cw==","RmV0Y2hpbmc=","IEZyeQ==","IERpdA==","IHppY2g=","cGFzdA==","LWxpYnJhcnk=","IEhheWVz","IGJvdW50eQ==","IFNwcmluZ2ZpZWxk","UE9S","IEFQUg==","IEVtYmFzc3k=","UVVFU1RJT04=","IFNvbGRpZXI=","ZXJ0YXM=","IE5PUk1BTA==","IGR1cw==","Ym9sdA==","IGRvcnQ=","IExpZnQ=","IGdldFJhbmRvbQ==","LlJ1bldpdGg=","LCksCg==","IHZhcmFyZ2lu","IGhhbmRsZUNsaWNr","XEh0bWw=","IGhvbW1lcw==","Y2lkYWRl","KGVw","SmE=","L2RpYWxvZw==","LnJhdGU=","IFdlaQ==","ZnVsbHNjcmVlbg==","IE5Vbml0","Lm1lYXN1cmU=","VmFscw==","IFNpZ25lZA==","IHJ1cw==","IHJhZnQ=","IEJsb25kZQ==","IG5ldHM=","IE1ldHJpYw==","aWNoVGV4dEJveA==","IHVyZQ==","IGludGVycmFjaWFs","ICd9Cg==","KHN0b3JhZ2U=","SW50ZWdyYXRpb24=","IGJhbmNv","QVNZ","IGppbnQ=","IGRlZ3JhZGF0aW9u","IEhBTkQ=","dWVyZG8=","PScn","IHN0cm9rZXM=","cmV3cml0ZQ==","KFNldA==","IE1hdERpYWxvZw==","IGRvc3NpZXI=","CWFuZA==","QURESU5H","IG11dHVhbGx5","IHByZWNlZGVk","fX07Cg==","IHN1YnR5cGU=","IHJlc29sdmluZw==","IGdlb21ldHJpYw==","W2NvbHVtbg==","IENUUkw=","IEhM","IGRhaA==","ICg7Ow==","UmFpbHM=","w5w=","IEdlbmVyYXRlcw==","LUxlbmd0aA==","cGVkbw==","b2dlbm91cw==","IFJvYmVydHNvbg==","LkJvb2w=","b2RlcnM=","X0FHRU5U","cGFzc3dk","IE5vZGVz","LmJp","IFdC","IHByb3BoZXQ=","c2xhdmU=","IOW8","IHdlaWw=","JTwv","IGNhcmJz","5rC0","IGV4cHJlc3NseQ==","XHhk","LWV5ZWQ=","IENyZWF0dXJl","Y29udGFpbmVk","KFNJRw==","IEVuaGFuY2VtZW50","IENvcnM=","R2Fs","X1NJR05BTA==","cmVpbnRlcnByZXQ=","IFFQdXNoQnV0dG9u","X05vbmU=","IGdlbm9jaWRl","IFNlYWw=","5LiK5Lyg","KHBlcg==","0LvRjNGC","IMOgcw==","LlRlbXBsYXRl","ICkNCg0K","LnNpbmdsZXRvbg==","CXNsZWVw","IHNwYXduZWQ=","IHBvc3Nlc3Npb25z","Z2V0Q29uZmln","IHRhaQ==","bHVkZQ==","IE1ldGVy","IGJpYmxpY2Fs","bWFyc2hhbGxlcg==","LlRvb2xraXQ=","IExlc2JpYW4=","LnNtYXJ0","IGJveWNvdHQ=","IGZyeQ==","LWRlc2M=","X1NlcnZpY2U=","IG1hY2h0","IENhaXJv","w6Bp","X3ByZXZpb3Vz","LnRyYW5zcG9ydA==","TWVkaWNhbA==","Q0dQb2ludA==","UVVBUkU=","IGJyaWdodGVy","IGNoZWNrQm94","IEZPVU5E","LmJyYW5jaA==","IGJsYWg=","IFByZWx1ZGU=","T2ZmbGluZQ==","TGlzdGluZw==","LyoqLyou","IEpS","cGhhbnRz","Z2V0WQ==","LkZpbmRDb250cm9s","Ii4uLg==","0LrQtQ==","SFJFU1VMVA==","IGNoZWNrbGlzdA==","KGFzdA==","IGJvcnJvd2luZw==","4oCmYW5k","INCX","IHByb2N1cmVtZW50","LXRhc2s=","X2hhbA==","UGxheWxpc3Q=","LnN0YXI=","X1NVUFBPUlRFRA==","QVNN","JUE=","cmVzdHJpYWw=","INC40YHQvw==","IHBhZ2Vy","IERpYWJldGVz","IE1haGFy","dGFu","QWN0dWFsbHk=","Pi8v","IFhW","4KeN","IHNlamE=","LnZpc3VhbA==","a2tlcg==","XTsKCgo=","IHR5cGVOYW1l","LkJ1dA==","Q2xpZW50UmVjdA==","aWNhbHM=","IERqYW5nbw==","IFJhcGU=","IHBheWRheQ==","KHJlc291cmNlcw==","LmJpeg==","dG9p","KFJ1bnRpbWU=","IER5bmFtaWNz","IEludmFsaWRPcGVyYXRpb25FeGNlcHRpb24=","KHR5cGVz","IFRhYnM=","Lk1pZGRsZUxlZnQ=","eGFi","IF8o","IERyZWFtcw==","X0dyb3Vw","KGNvcg==","TGVhZGVy","IGdyYWR1YWw=","KEJpZ0RlY2ltYWw=","IHRleHRhcmVh","bGV0aW9u","IEZpbmlzaGVk","IFBvbGU=","IHRhcHBpbmc=","Jig=","IGZsaXJ0","IHRlcnJpZmllZA==","IHBhZHk=","ZXJlZw==","ZWxkb20=","IHN0YXRpb25hcnk=","IHBvbnk=","IFJFR0lTVEVS","X2FjY2Vs","IEhlcno=","IG1hdHJpeg==","IENhZg==","eGFj","YXNjdXM=","IGVubGFyZ2U=","QUNIRUQ=","eXl2YWw=","IHNpYw==","IENhbmFs","OnY=","PT8s","IEltcHJvdmVtZW50","P30iLA==","TlNPYmplY3Q=","IGVzY2FwaW5n","IE51bGxhYmxl","IGjDpA==","d2FudA==","RWxpbWluYXI=","IENMTG9jYXRpb24=","IHJldXNlSWRlbnRpZmllcg==","QnVmZmVyU2l6ZQ==","w59lcg==","IEFza2Vk","J11dLAo=","IHNoaWVsZHM=","Z3JhbmQ=","IFRvd25zaGlw","IFB1Yk1lZA==","ZWN0bA==","Zml2ZQ==","IFJlYWN0aXZlRm9ybXNNb2R1bGU=","IEdMZW51bQ==","RGFy","aWZhY2U=","LWluZGVudA==","Rm9ybXVsYQ==","LnNuYXBzaG90","Q09NUEFSRQ==","IGJlbHRz","CWNhY2hl","bGRhdGE=","IGVkYWQ=","IEJPWA==","KGNhcnQ=","X0xBWU9VVA==","IGZmbHVzaA==","IExPUw==","IFNvcnRlZA==","LnNsaWRl","IHRpamQ=","IFRleGFucw==","IFB1cmNo","IExldmVscw==","IHNlbWFudGljcw==","IFRlaHJhbg==","Ym1w","LnVybGVuY29kZWQ=","X3hsYWJlbA==","KGd1bHA=","IEJ1dHRvbnM=","IEJyb2tlcg==","55uR5ZCs","JGVtYWls","2ZA=","IGNsYXNzaWNz","Y29tcG9zZQ==","KGJz","IHVuaGVhbHRoeQ==","RXhlcmNpc2U=","Y3JldHM=","IFBhcnM=","IERldGVybWluZXM=","YWZvcnQ=","KG9icw==","IG5hc3Q=","IGlocmVu","IHJveWFsdHk=","c2VyaWFsaXplcg==","aWV1eA==","ICAgICAgICAgICAgICAgICAgICAgIAo=","ZXhlY3V0aW9u","IHZpZXdDb250cm9sbGVy","IHJlcHJv","LnBl","IGNhcGl0YWxpemU=","5Ye7","IHR1bm5lbHM=","LkRBVEE=","cGlyaXQ=","Q29sbGVjdGlvbnM=","KX19","IE9E","IGZ1enp5","SW1tZWRpYXRl","bGo=","Oz8+Ig==","W3Zhcg==","IHZvbGF0aWxpdHk=","cmVnbG8=","IHByb2xpZmVyYXRpb24=","IG9yYWNsZQ==","IEN2","IG51bmNh","UFJJTlRG","IGJyZWFrcG9pbnQ=","LkVO","IGJlc3Rlbg==","IHJlYmVsbGlvbg==","UGF1c2Vk","IGZsb3du","IHZpY2luaXR5","d3JpZ2h0","LGNw","aXNjaW5n","b3VjaGVycw==","QXNo","eWFy","IEVq","cmVwcmVzZW50ZWQ=","b2RpYw==","LmNyb3Nz","IGNyZWF0aW9ucw==","IFBhYmxv","ZmVzdA==","IEhpbHRvbg==","UmVwb3J0ZXI=","IERpbA==","aWxlbmFtZXM=","IGV4cGVuZGl0dXJlcw==","X0VESVRPUg==","IEFyaWFs","IHBsdW5n","IHVubmFtZWQ=","T3JFbHNl","IHJlY3JlYXRl","IEhlYXJ0cw==","PmFsZXJ0","LmdldFBhc3N3b3Jk","IE11c3Rhbmc=","Vks=","IGFjY29tcGxpc2htZW50cw==","QXBwZW5kaW5n","IENheQ==","IFVzZXJNb2RlbA==","IHN1YnN5c3RlbQ==","TGVnYWw=","eW5jaHJvbml6ZQ==","X1BFUk1JU1NJT04=","IEFwYXJ0bWVudA==","bGlnZQ==","IGFmZmlsaWF0aW9u","KERFQlVH","VHM=","IENvbG9yaW5n","IFdvaG4=","bmljZQ==","KGxpc3Rh","4LE=","cGxveW1lbnQ=","44G+44Gf","5aW9","c3Vic3Q=","J11dWyc=","YWJvbA==","PSdf","4KeN4KY=","b3JwaGlzbQ==","LmxpdGVyYWw=","IFBsdWc=","IG13","b21hbA==","ICInIiw=","dXNp","IHNpZ2hlZA==","aWN1bHR1cmFs","Lios","IFByb3N0aXQ=","KGNvbnNvbGU=","SVBMRQ==","IFRyYXA=","WFI=","IEVkaXRvckdVSUxheW91dA==","X3ZvY2Fi","IGluY29tcGF0aWJsZQ==","IHVuY29uc3RpdHV0aW9uYWw=","LWxh","IGVyb3RpcXVl","IGRlcHV0aWVz","cXVpc2l0aW9ucw==","bmV3VmFsdWU=","YWRpYQ==","IGh3bmQ=","Z2luZ3M=","IFZhcw==","IEluY3JlbWVudA==","IEZsaW50","YW1iaWE=","X1BvaW50","LWRpc3BsYXk=","IEZ1bm55","LnRvYXN0","LmRhcms=","QmluZGluZ3M=","IGRlc2NyaXB0aXZl","YXJlbmQ=","LlJldA==","IHJlY3Vyc2l2ZWx5","IE1r","IFRJTEU=","LmNyZWF0ZVRleHROb2Rl","IFJBVw==","IGluZmx1eA==","54mp","VG9r","LWJvYXJk","UmVjb3JkaW5n","U3RyZW5ndGg=","IHJhaW5mYWxs","KGRk","LmZ4bWw=","bmV0cw==","LkltYWdpbmc=","IEJJT1M=","XSsi","T0U=","IHJlc2lkZW5jeQ==","WkU=","V0I=","LnNwYW4=","X2RlZmluZWQ=","Qk9U","Pm51bGw=","Zm9ybURhdGE=","Q3BwTWV0aG9kSW5pdGlhbGl6ZWQ=","X1VTRVJT","IE5vdmVs","aW5za2k=","PntA","ZXR0bw==","bmF0dXJhbA==","IFN0cmljdA==","Onc=","LnNhZmU=","IHRvd2Vscw==","4bqtdA==","LmdzdWI=","66M=","aW5xdQ==","IGFpZGVz","IGluY29t","Z2V0dGVy","IHdhc2hlcg==","YWN0b3JpZXM=","IGdldHRlcnM=","bWl0ZQ==","X3NvdXJjZXM=","IGhhcm1sZXNz","IHVub3M=","cHJlaGVuc2l2ZQ==","IG5vZG8=","IGdlb2dyYXBoaWNhbA==","IFNlbGVjdExpc3Q=","LlNjcmlwdA==","LkVudW1z","IEVOVEVS","d2FsZA==","IEJhcm9u","IHBhcnRpY3Vs","LmN1cnJlbnRQYWdl","QFRyYW5zYWN0aW9uYWw=","W2xpbmU=","CWRlcw==","SmFzb24=","LmdldENvdW50","IFBlbm55","IFBheWxvYWQ=","c2hhcnA=","W3JpZ2h0","dmVudGE=","IGFwbA==","IHByb2R1aXRz","IG90dA==","VHJhY2tz","LkFuZHJvaWQ=","IHNpbGljb25l","IEVMU0U=","YW5pbWF0aW9ucw==","dWx0dXJlSW5mbw==","IGJsdWVwcmludA==","b2ZzdHJlYW0=","IFtdW10=","IFNlcnZl","IHRyaWc=","CXNlcnZpY2U=","IFN0cmF0","IFNhdmFnZQ==","IG9ianM=","IE5vdGlmaWNhdGlvbnM=","LHBvcw==","VGhpbmc=","IFJCSQ==","b3BhdGh5","IG5hdWdodHk=","bGJz","ZXByb20=","PiIu","IHBpb25lZXI=","IGphcGFuZXNl","QXVk","IGFsbGV5","IFBldHNj","J10/Pg==","IEtpbGxlcg==","LmdldEFic29sdXRlUGF0aA==","X2NhcHM=","xas=","IHN1YnN0cmF0ZQ==","LmFzc2VydElu","7JWE","IHRoeXJvaWQ=","IERlbHV4ZQ==","IGZhY3RvcmlhbA==","IHByZXNzZXM=","IEFjY29t","PW9wZW4=","LmdldFM=","IGV4cGxvcmVy","IHJlc2lkZXM=","QXNzb2NpYXRlZA==","IHRyYW5zZm9ybWF0aW9ucw==","VHU=","IFJpY2hhcmRz","X2JpcnRo","PSN7","LXNwZQ==","KG5k","IHZpc3VhbHM=","X3N0YW1w","IHRlcm1pbmFscw==","cm91dGluZQ==","KioqLwo=","IEphYg==","S0w=","Q29udHJpYg==","IHNvdXRod2VzdA==","IFBlcA==","CWVudGl0eQ==","IGxpbmVy","LlN0YXR1c09L","IFNjaHVs","KENM","IG1pam4=","YXN0b3M=","X2RpZ2VzdA==","IHBlcnNpc3RlZA==","LWNvbnRhY3Q=","IG9kb3I=","IGRpc2NvdmVyaWVz","X0ZJRUxEUw==","Rmx5","IHJ6","IExpc3Rh","UmVzZXJ2ZWQ=","dGF4b25vbXk=","KXNlY3Rpb24=","LyIpCg==","L3JlcXVlc3Q=","IHNvbWVkYXk=","Y2l0aWVz","L2ZpcmU=","IG9iamVjdGlvbnM=","CURFQ0xBUkU=","Lm5hdmlnYXRpb25JdGVt","LnNldGRlZmF1bHQ=","cmV0dXJuVmFsdWU=","VUNDRUVERUQ=","IG9ibGlnZWQ=","IFFhZWRh","IGh5c3Rlcg==","ZXN0aGVz","ZGlzdGluY3Q=","w6B5","IENvbWJv","CXNm","IOKK","IGRpc2NyZXBhbg==","IGluc2lnbg==","IFJFU1VMVFM=","IFZhbGlkYXRpb25FcnJvcg==","IEh0dHBSZXNwb25zZVJlZGlyZWN0","CVFTdHJpbmc=","IGF1dG9mb2N1cw==","RHVy","IFJFTEVBU0U=","LWRvbGxhcg==","LkNvbW1pdA==","IGtow7RuZw==","IGxhdW5kZXI=","Lj0i","IOaWhw==","IGJ5ZQ==","LkdldEtleURvd24=","IGdpbw==","X3NpZA==","IGdxbA==","LmNt","X1NMT1Q=","LkdldEluc3RhbmNl","cmV1c2U=","LnNodXRkb3du","IGplcnNleXM=","X01Q","cGF0aWJpbGl0eQ==","IOiuvue9rg==","IHJlcGxhY2VtZW50cw==","IHByZWNlZGVuY2U=","IGJ1ZmZlcmVk","LmJz","X0dSRUVO","YnJhaW4=","w6FjaA==","YXZhaWxhYmlsaXR5","IEVURg==","IGZyZXQ=","aXN0aW5l","IGxpZnRz","RXhpc3Rpbmc=","IHN0ZXJlb3R5cGVz","IGVtcHQ=","bW9uZ28=","LnRyYWluaW5n","YWxpc3Q=","LklzRW5hYmxlZA==","ICIh","PD8K","dWlkbw==","IGludFZhbHVl","LmVsYXN0aWNzZWFyY2g=","TE9HSU4=","IHJlbGlhbmNl","IHZpZXdUeXBl","IGRpbWluaXNoZWQ=","U2FyYWg=","IEFwcHJvYWNo","X1dFQg==","IGRybQ==","IGNvbHVtbmlzdA==","TWFya3Vw","IGFxdcOt","IERpYW5l","IGN3","IFRpY2s=","Lm9ic2VydmU=","SVJPTg==","SW5CYWNrZ3JvdW5k","IGVib255","IENvdXJ0ZXN5","Om51bGw=","KioqKioqKi8KCg==","L3Jlc291cmNl","SXRlcmF0aW9u","ZGVmYXVsdFZhbHVl","YXR0ZW50aW9u","INGA0LDQsdC+0YI=","IHdhaXZlcg==","IHByb2R1aXQ=","IEdyYWRpZW50","IHBlcmNlbnRhZ2Vz","IFNBTA==","IE1k","KHNuYXBzaG90","CWlv","aWtlcnM=","V2VicGFjaw==","IHNldFBhc3N3b3Jk","IGRlZmVhdGluZw==","IEplZw==","ZWxhcHNlZA==","aG9sZHM=","X3NoYWRvdw==","IG9mZmVuZGVk","IFBhbnQ=","IENhbGxhYmxl","X0lORk9STUFUSU9O","ZmZlZQ==","KGVtcGxveWVl","IFlBTUw=","cG9zc2libHk=","IG1heGltYWw=","ZWxsdWxhcg==","IFNueWRlcg==","ZGVzY3JpcHRvcg==","IFBMRUFTRQ==","RGxnSXRlbQ==","IGFydGlsbGVyeQ==","YH0K","cG9zaXVt","IGxlZXI=","JWM=","IGRpc3Bvcw==","Lm11bA==","IGdlb2dyYXBoeQ==","IGdyYXBoaWNhbA==","IGRyYW5r","IG1vdGlvbnM=","IHJ1dGg=","KioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKio=","IHByb2R1Y3Rpb25z","IGNyZWF0ZVRpbWU=","IFNjcmlwdHVyZQ==","YmJi","dWNocw==","5LiN6IO9","LkJpZ0RlY2ltYWw=","c2l6ZXM=","X3NvbHZlcg==","X0Zyb20=","X2pvaW50","IHBhdGhsaWI=","IGdlYXJz","INGE0L7RgNC8","IGNvbmNlYWw=","IGRpZmZlcmVudGlhdGU=","PEdhbWVPYmplY3Q=","IGplZGVu","IGFsbw==","Z2xvYmFscw==","ZXJ2YXRpdmU=","IHBhZGQ=","IFBseQ==","X3R5","IHByZXNlbnRl","IHByb3ByaWV0","X2xz","IFB1bmNo","IENyYXdmb3Jk","YmVsb3c=","Q3BwR2VuZXJpYw==","IENPTlRST0w=","IG9jZWFucw==","IFJPVVQ=","IHJhbmRpbnQ=","CWFkZHI=","IEhvbmVzdA==","IGVudmVsb3A=","IHRyYXVtYXRpYw==","IExBVA==","IHRn","7Iqk7Yq4","RXh0ZW5kZWQ=","IHVuY2hlY2tlZA==","IG9ic3RydWN0","X3RpbWV6b25l","UGVyc2lzdGVudA==","IGxsZXY=","LyoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKgo=","IEZsYQ==","LnBoeXNpY3M=","IGZvcmdlZA==","IExhdXI=","IG1vbm9wb2x5","IGNocmlzdG1hcw==","Z292","IFNtb2tl","W2Rm","IGJpc2hvcA==","bG9jYWxPYmplY3Q=","b3JyaA==","b250dmFuZ3N0","ZHJ5","IGVyZm9s","LWNl","IE9yZGVyZWREaWN0","IGh4","IFJFU0VU","U3Vj","IHJlY2tsZXNz","YWxhbWF0","QmlnSW50ZWdlcg==","IGJ1bGJz","IG11dGU=","5pS+","LlVsdHJh","TG9u","IGNsZWFyVGltZW91dA==","PFJpZ2lkYm9keQ==","c3dpcGVy","IENvbWVz","XGRi","CW1w","IHJlc3Rz","TW92ZWQ=","IExvcmU=","LkRpbWVuc2lvbg==","IE1hbml0","Lmh4eA==","PT09PT09PQ==","cGl0Y2g=","ZmZpZWxk","c2tpbGxz","X2FsYnVt","dHJhbnNsYXRlZA==","IFhJ","IHZlaW4=","IERhdmlkc29u","IEF1Y2tsYW5k","eXNzZXk=","IGF1dGhlbnRpY2l0eQ==","IEFzc2lzdA==","IGNvbXByaXNl","Q3JlYXRlVGltZQ==","IHRyZW5jaA==","LndlZWs=","LS07","IFVJQWxlcnRDb250cm9sbGVy","X3JlbGF0ZWQ=","Q01T","cmVtZWx5","IGxleGVy","aXJtd2FyZQ==","RWxlbWVudHNCeQ==","LXVwcGVy","IHN0YWdu","LS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQ==","X3NuYXBzaG90","L1hNTFNjaGVtYQ==","X09yZGVy","IGFubmV4","X0VOQ09E","IEFsdG8=","YXJpb3Vz","REo=","IGFib3J0aW9ucw==","Q29tYmF0","IExpY2VuY2U=","dWdnZXN0ZWQ=","W0s=","LCkpCg==","KCcvLw==","LkNhbg==","c2Vjcw==","cXVvdGVz","X3RyeQ==","IFNhZ2U=","IE1vdg==","J29u","cmVnaXN0","IFdyaXRlcw==","IERpZ2VzdA==","CWNvbnRhaW5lcg==","LXByb2dyZXNz","IGdvYXQ=","X3NjaGVtZQ==","LkdldENoaWxk","IGFzeW0=","Lm15YmF0aXNwbHVz","YXRpY2E=","cGdzcWw=","X2Fzc2V0cw==","Pks=","IGFmaW4=","TlNT","IE5BVg==","KCcuJyw=","IGAi","IGF1ZGl0b3I=","X01PVVNF","IHdhbGxldHM=","IG1vdQ==","cnVucw==","ZXRlcmFuZ2Fu","IFJlc2VydmF0aW9u","IGV4cGVyaWVuY2lh","CXByb2Nlc3M=","LWltcG9ydA==","X1JldHVybg==","IE1hY3Jv","IFBlbmlz","cGl4ZWxz","IHNldEVtYWls","KE1pZ3JhdGlvbkJ1aWxkZXI=","KHhz","IEVzdG9u","IEJ1YmJsZQ==","QUxMT1c=","CWhhbmRsZXI=","JHJldA==","IGNvbXBsaW1lbnRhcnk=","LWNpdHk=","IGVsbG9z","IFNPVVJDRQ==","IEFkdmlzb3I=","b2xvZ8OtYQ==","IGZhZGVk","LnBj","X1JHQkE=","QUZY","IHJlcGF5","IEZhbGNvbnM=","X2lzc3Vl","b21pZG91","LmJhb21pZG91","IGluZnJpbmdlbWVudA==","dXJuaW5n","L3N0b3JhZ2U=","X3F1YW50","IFF0Q29yZQ==","IG1lbGw=","X2RlbnNpdHk=","IEtub3g=","IFN1cnZpdmFs","LmdldFVzZXJuYW1l","IGNvbW1lcmNpYWxseQ==","Z3Jhc3M=","IG1laXM=","5Lq/","IFBlcm1pc3Npb25z","X1FVT1RFUw==","aXBob25l","IExPVA==","IHRocmlsbGVy","IENoYXBlbA==","IFJpcw==","Pmk=","LUlE","IHJpZ2h0bHk=","Q3J5cHQ=","IElzdGFuYnVs","cmVkcw==","X3Jlc2l6ZQ==","UG9wdWxhdGlvbg==","KGZldGNo","IEhPVA==","OmZpcnN0","IGdhZGdldHM=","UHlPYmplY3Q=","IG1lcmdpbmc=","ZHVjZWQ=","bGVnYXRlcw==","dWJlY3Rs","JS8=","YWxsZWU=","IHp1c2FtbWVu","LlByb3BUeXBlcw==","YXN0bw==","Oio=","cmVjZQ==","UmVzcG9uc2VUeXBl","L2dyb3Vw","IGJhcmJhcg==","IENhcm9saW5l","b3VyY2Vk","57uP","IGx1YnJpYw==","aW5zcGVjdGlvbg==","YW1tYWQ=","CUltYWdl","IGllcnI=","IGN1cnRhaW5z","X0FSQg==","IE9yYWw=","IGFsbGllZA==","IFN0YXR1c0NvZGU=","IENsZWFybHk=","UHJlZmVycmVkU2l6ZQ==","cXVpbmE=","IHNwb3M=","IG9wdGltaXNt","IGNvbXByYXI=","IGx1Zw==","IEJvb20=","Y29uZmlybWF0aW9u","X0RVUkFUSU9O","X2Jyb3dzZXI=","IHJlcGV0aXRpb24=","IGtlZXBlcg==","IGFkZFRv","KGpz","LlN0YXQ=","LkNvbmQ=","IEhlcm5hbmRleg==","cGFxdWU=","IHZvbHVudGFyaWx5","IGplcms=","IExleQ==","IGRvY3VtZW50bw==","X2RlYWQ=","IFRFQ0g=","IGluY2VwdGlvbg==","KCJ7fQ==","IG9uTG9hZA==","eGRk","IElTUA==","c3BlY2lmaWVk","IOusuA==","UFJPQ0VTUw==","KGFsZXJ0","Lk1N","IGNyZWF0ZVN0b3Jl","KHVuaXF1ZQ==","LmdldEJsb2Nr","656Y","dW5vcw==","IHRyb3BoaWVz","X2hvdmVy","IERhZGR5","Lk1l","IENPVVI=","T0JK","YXRlbWFsYQ==","IFBzaQ==","IG5vcm1hbHM=","YWNpZXI=","IE1CQQ==","IHBhd24=","z4U=","IHNwb250YW5lb3Vz","IGF1eGlsaWFyeQ==","IGluYXVndXJhbA==","IGZhc3Rpbmc=","IEZpbGVTeXN0ZW0=","IHplbg==","X0JMVUU=","IHN1YnRyZWU=","IHByZXByb2Nlc3M=","LXRyYWNr","Q2hhcmxlcw==","IGRlcG9zaXRlZA==","IHF1ZXJ5UGFyYW1z","0L7Qu9GM0LrQvg==","aWVtYnJl","IHByYXc=","eEZD","IHBhbmM=","X25vbQ==","aGVyb2Vz","Lmphdg==","OjokXw==","INin2YTZhQ==","U0dsb2JhbA==","5o+P6L+w","PXRlbXA=","ZXN0aQ==","IGNvbnN0cnVjdGl2ZQ==","IFNoaW0=","IERpcmVjdGlvbnM=","IEJpbmc=","ZGlydHk=","LXJ1bm5pbmc=","X2ZpbGVwYXRo","b3JkZXJJZA==","Z2FyZA==","X29yaWVudA==","IHNjb3V0","IHBzeWNob2xvZ2lzdA==","7LY=","IOWt","ZGVxdWU=","IEhlcm1pb25l","IFBvd2VyUG9pbnQ=","IGVsbGE=","IFVJQmFyQnV0dG9uSXRlbQ==","U3Vidmlld3M=","QFJlcG9zaXRvcnk=","IiIiCgoK","IHJldG91cg==","IGNpcmNh","R3JhcGhpYw==","IEdyYXR1aXQ=","ZGR5","IHRlY2huaWNpYW4=","IENsZWFudXA=","IHBlcnNvbm5l","IHJlc2lu","Lk11bHQ=","JG0=","IE9yY2hlc3RyYQ==","IHdoZWVsY2hhaXI=","LlND","CUdhbWVPYmplY3Q=","IG1vxbxl","T3BlbmVk","IGNoaWNrZW5z","b3Rhcw==","X3RlbXBlcmF0dXJl","IGRldGVjdGluZw==","IGFjcXVhaW50","IDw/PSQ=","Pl0=","IG1lbnN0cg==","IGR5ZQ==","Um9ib3Rv","LnVuaXRz","IFZpbnls","Y3VyYQ==","cnlwdG9u","ZWRk","PXRlc3Q=","IHRyb3Y=","Q29uZmlybWF0aW9u","IHRoZW9sb2d5","IEhvbGRpbmdz","dWF0aW5n","UHJlZGljdA==","W3VzZXI=","IDon","IFNlc3Nv","cGFyZW50SWQ=","Q29kZUF0","YWJibw==","IFRyZXZvcg==","IFF1aXQ=","X3NoaXBwaW5n","X1JB","IGtsZWluZQ==","56Y=","X0xhYmVs","IE9tYXI=","IEdSRUVO","LykK","cm9r","IHJvYXN0ZWQ=","X1JU","IOKAjg==","QFJ1bldpdGg=","Pk5O","IHRhbmQ=","Kycu","Y3J1ZA==","LmtleWJvYXJk","YXN0ZXJ5","QkFE","IENvbHVtbnM=","LkNvbXBhbnk=","IHNlbWluYXI=","IGdldENvbnRlbnRQYW5l","IGNhdGFzdHJvcGhpYw==","IGVtYnJvaWQ=","aWF0aXZl","IGNydWVsdHk=","Ymlz","IGluc2U=","IEJyb2tlbg==","CWZz","IG1WaWV3","0LDRhtC40Lg=","LWZhY2Vib29r","IGNhY2hlcw==","44CC44CCCgo=","IE9STQ==","IERpc3RyaWI=","IFNjZW5lTWFuYWdlcg==","X3RyYW5zaXRpb24=","b21leg==","IFNIRQ==","IHdvcmtsb2Fk","U3VwcG9ydGVkRXhjZXB0aW9u","IHJpZXM=","IOWc","KGNhdA==","SGFzTWF4TGVuZ3Ro","QXBwcw==","LlRBQkxF","IEtleVZhbHVlUGFpcg==","ZWRpZG8=","LlJlbmRlcmluZw==","IGVsZWN0cm9t","IGFyYml0cmF0aW9u","IHZhcmlhYmlsaXR5","YXBvbGxv","IHV0bW9zdA==","b3BlbnNzbA==","IGjDpQ==","KCcm","LlN0YW5kYXJk","IGRpc3RyYWN0aW9u","aWZheA==","IOuVjA==","dGhvc2U=","aXNwZW5z","dmFr","IFNVUA==","IElzUGxhaW5PbGREYXRh","LGtleQ==","ZnJhZ2lzdGljcw==","IEpveWNl","IEZpYmVy","LlNlcnZsZXRFeGNlcHRpb24=","X0FsbA==","IGJhY2tlcnM=","IEF0dHJpYnV0ZUVycm9y","ewoKCg==","QHlhaG9v","LWRpcmVjdG9yeQ==","IHVuaW5zdGFsbA==","IGZsdW9y","bGlxdWlk","IGzDoQ==","IGZyaWdodGVuaW5n","YWRhbg==","IEFVVA==","IHRhdHRvb3M=","IHByb3BhZ2F0aW9u","LnRyYW5zbGF0aW9u","0J/RgA==","X3NjaGVkdWxlcg==","44CC4oCc","IGNhaXJv","IEh0dHBDbGllbnRNb2R1bGU=","IE5EUA==","IEhpdHM=","IFRyYW5zZm9ybWF0aW9u","IENhZXNhcg==","c3RpbQ==","IEJ1cnRvbg==","d3lu","IGNvbW1hbmRlZA==","IENsb3RoaW5n","IFJ1bnRpbWVPYmplY3Q=","cmVhbGx5","Y2xh","LnNh","IFNoYW5ub24=","IGNvbW1pc3Npb25z","IEphbmV0","IGRpc2d1c3Rpbmc=","IG9wdGltdW0=","X3NvbA==","dXJvbnM=","IFNIQVJF","QXR0cnM=","IFNjaGU=","IEJpZ051bWJlcg==","IGNpZ2Fy","KGRlcHRo","IGZyYWM=","IEN1cnZl","TEFTVA==","IFNDUklQVA==","6rO8","TWFsbG9j","Lmdyb3VwYnk=","IExlc2xpZQ==","IHdoaWNoZXZlcg==","U21hcnR5","L3dl","IEFtcA==","LGlu","bG9wcw==","ZGVwZW5kZW5jeQ==","Y2VkdXJlcw==","IGB7","eGljbw==","Q29sbGVjdG9y","IGhhYw==","IERhcmtuZXNz","ZmZmZmZmZmY=","Jz0+Ig==","IHBsZWFzaW5n","Y29ubmVjdG9y","em9z","UENJ","dmFj","IEluY29ycG9y","IG5lZA==","X0ZBQ1RPUg==","LmZi","IG91bmNl","X3NhdmVk","INix","IGRlZWRz","IERvbHBoaW5z","IGJ1ZW4=","RVND","LHRpbWU=","X0FVVA==","ZWNz","IFNlbmF0b3Jz","Lm91dGVy","IFNlbGxpbmc=","IHJpbg==","PmAK","Lm9ic2VydmFibGU=","IGNvc3Rpbmc=","REc=","IHdpbmRpbmc=","IHNrYQ==","IGNpcmN1bGF0aW5n","IGZvcm1pZGFibGU=","YW1wbw==","IFJhaXNlZA==","IHZlZ2V0YXRpb24=","VUZGSVg=","S2lsbA==","cHRpdmU=","KHJ2","IENvdW50cmllcw==","IE5ha2Vk","IEpB","KSkiCg==","dWRhcw==","IGJhcms=","CWxldmVs","IGZvZXM=","PkFkZA==","WW91VHViZQ==","O3Q=","TkNZ","Q2x1Yg==","RWlu","LS0NCg==","IGNvbnN0cmFpbmVk","RVR3aXR0ZXI=","WUc=","RGVzY3JpcGNpb24=","VU5DSA==","IGVucXVldWU=","IGRpc2tz","IFdlbnQ=","IG11aXQ=","CWxvY2F0aW9u","IHJldmlzaW9ucw==","IEFDSw==","LWZpeGVk","dHJhc291bmQ=","XFRlc3Q=","U3RhcnRQb3NpdGlvbg==","LWh0bWw=","IHByb2JsZW1hcw==","X0lOVEVSUlVQVA==","IFNUT1JF","5qih","aWxpYXRlZA==","IFJQTQ==","W3RlbXA=","YWNodGVu","IGNpYw==","IEF1dG9tYXRpb24=","IGhpZ2hz","Lyg/","OicpCg==","c3Bhcms=","cmVscw==","CW1vdg==","VVRFUw==","LkF1dGhvcml6YXRpb24=","IFNjaG5laWRlcg==","IGNoZWVrcw==","YWRkcmVzc2Vz","YXJkaW4=","IHJlbW92YWJsZQ==","LkJhZFJlcXVlc3Q=","aWNpb25hcg==","IERpZXNlbA==","dGhhbg==","L34=","IGRhenU=","UmVnaXN0cm8=","ZmZp","X0RMTA==","IG5pZXU=","IG1vaXN0dXI=","LWV2ZW50cw==","IHRocmlsbA==","LmdldEVudGl0eQ==","IHRvZ2c=","IHdhdg==","KWRpZA==","YXRr","KHN1YnN0cg==","IEluamVjdGlvbg==","X21i","LkRpdg==","IGVuZGVhdm9y","ICjCow==","IGNsdXR0ZXI=","IHVyZ2VuY3k=","IGluc3RydWN0b3Jz","LScs","LXN0YW5kYXJk","Y2Vt","CWhhbmRsZQ==","LmZ0","U3RlcGhlbg==","Um9u","44GZ44KL","c2Np","IEF0bW9z","IGNhdGVyaW5n","IGZpYXQ=","LlBlcmNlbnQ=","IENvbmdv","eGRm","Lm1vemlsbGE=","IHNlaGVu","LnNob3dUb2FzdA==","T09U","LXJlc3VsdA==","zIE=","IGdob3N0cw==","IEJ1ZW4=","IFJpZGVy","IERvY3RvcnM=","IHVyYW5pdW0=","IGxvdWRseQ==","IHBvaXNlZA==","IGZhdm9ycw==","KEFQ","TEVZ","IHNpY2tuZXNz","IGNoYXR0ZQ==","IGludGVncmF0aW5n","IFl1cA==","Q2xvc3VyZQ==","IFRhbGVz","IGxpbmVh","IGV5ZWw=","LkNyeXB0b2dyYXBoeQ==","dW5leHBlY3RlZA==","YWxlbWVudA==","Y2l0","ZXRBZGRyZXNz","TGVhZA==","eGNk","X25lZ2F0aXZl","X2NvcnI=","aWdyYXBo","LWNoYW5uZWw=","IGRpc2Nv","U2VlZGVy","YmVhbQ==","X2Rw","Q0ND","IFByb3ZpZGVk","IGpzb25EYXRh","X1dI","RklORQ==","Qlg=","LkRhdGFBY2Nlc3M=","IHRlbXB0ZWQ=","IGZpbmVk","aXNDaGVja2Vk","IGZyYXVkdWxlbnQ=","RnJp","IGRvbWlj","UXVpeg==","IFVuZGVyZ3JvdW5k","YWJyYXM=","IElEaXNwb3NhYmxl","IFBlcnNvbmE=","IHJvZ3Vl","IEJleQ==","Z2V0Q2xpZW50","ZWtlbg==","ICcnJw0K","V2lraQ==","KEh0dHBTdGF0dXM=","U3RyZXRjaA==","IEdlc3Q=","IO2VmA==","IGVudGl0bGVtZW50","IGRvZW4=","YmxvZ3M=","IHZpdHJv","Ik9o","IFN1bW1vbg==","IEJhY2tib25l","IGfDvA==","Z2V0Q29sdW1u","IFdJTkFQSQ==","CXZh","X1JFUVVJUkVE","LnRocm93","IHNldEN1cnJlbnQ=","ZHVjdGVk","KEZ1bmN0aW9u","ZWxzaW5raQ==","X1Blcg==","ZmxpZXM=","IGluY29tcGV0","IGp1xbw=","KCkl","IC0tLQo=","dW1hcw==","IE9sZGVy","IGRpc3B1dGVk","X1JFUVVJUkU=","Lm1hdG11bA==","dW5rZW4=","5LmL","44GL44KJ","IHR0bA==","dW5kZXJzY29yZQ==","IFBhdHJpY2lh","IHRhcGVy","IHNlaW5lcg==","IHNheWE=","5Y+w","aWVyaQ==","LnNlY3JldA==","IHhvcg==","IG1pdG9jaG9uZA==","IGNhcmRib2FyZA==","fWB9","LUJFR0lO","IGRhdmlk","b3Vsb3M=","IFBldGVyc2J1cmc=","ICIiLA0K","c2hlbGY=","LXdhdGVy","LWJ5dGU=","INC+0LHRitC10LrRgg==","IHN0aXJyaW5n","7Je0","IGNvbXB0","IFBvdGVudGlhbA==","UkFGVA==","IGVhcHBseQ==","IHN3aW5naW5n","IGZlYw==","QVJB","IHdhbmRlcmluZw==","IHByZWZlcnM=","SmVzdXM=","IHBpcmF0ZQ==","IElzaXM=","Lk1pbmltdW0=","IFZhbGU=","X0JU","cmVuY2hlZA==","Y29ycw==","KGl0ZW1WaWV3","IGfDpQ==","LkNvbnRhY3Q=","Vmlld0NoaWxk","aW5kc2F5","Y29uZmlncw==","RHVwbGljYXRl","4oCmSQ==","enlzdA==","KHRvZG8=","LlJlbW92ZUF0","X0RJRkY=","IEJvdHRsZQ==","IHZvbHRh","dHJhZmZpYw==","TGVl","IOyk","IHR1bmVz","IEVjdWFkb3I=","IFl1bg==","IHVuZGVyd2VudA==","aWNvbQ==","ICcnKXsK","LXBvbA==","ZmxhbW1hdG9yeQ==","TXV0YXRpb24=","IHJlY2Fw","X3ZlcnQ=","T1RJT04=","Q0RBVEE=","aWNpbmU=","X2JvdW5kYXJ5","U2NhbGFycw==","IFVsdGltYXRlbHk=","RVE=","bWV0YWw=","a3Nlcw==","bXBs","IGNvbnRlbg==","U29sZA==","RVNTQUdFUw==","IGJpbmRlcg==","IGxpbmVu","IE15QXBw","LW1ldGE=","CXJhaXNl","b3VsdHJ5","CW1vZHVsZQ==","5pi+56S6","bsOt","IHlycw==","IHBoeXNpYw==","LXBsYXRmb3Jt","IHN3aW5nZXJz","KGhlYWRlcnM=","Licp","IEJV","IEluY29udHJp","U2NlbmFyaW8=","QW1i","IHByZW1pw6hyZQ==","L2FydGljbGVz","IE1ham9yaXR5","Q0xVU0lWRQ==","b25vcg==","IGhhYsOtYQ==","5bee","IG1pZGk=","IExhYw==","LmZpbmRJbmRleA==","IFBhaW50aW5n","LmJvcmRlckNvbG9y","Kmo=","IGNvbmdlc3Rpb24=","X0RJQ1Q=","b2xsZQ==","YXJuYXRpb24=","KHRleHR1cmU=","IHVm","IEVpbnN0ZWlu","KFRocmVhZA==","IGluZG9vcnM=","c2NyYXRjaA==","IG1ha2Vu","LlNUQVJU","IEp1ZHk=","Zm9ydW1z","CgoKCgoKCgoK","QklMRQ==","IHZvdQ==","TVlTUUw=","IGdlcm5l","IEltcG9ydEVycm9y","IFN1cnJl","PG5hdg==","IERpZXNl","ZXdhcmU=","IOuqqA==","aW1wbGVtZW50ZWQ=","U0lHTg==","ICd7QA==","cnpl","Lm1pbmVjcmFmdGZvcmdl","LmlubmVySGVpZ2h0","YmVjaw==","IGN1cnJ5","IGZvcm11bGFz","YWdvZw==","ZW5kZXQ=","IFBhaWQ=","IFJvYmVydG8=","IHVucGFpZA==","PWhlYWRlcnM=","LlBvd2Vy","IGJyZWQ=","b3JFbHNl","b3hpZGU=","IGZpbmFsaXpl","c2V0Q29sb3I=","IFN0YWR0","KCdcXA==","aXNtaWM=","IGhlbGU=","LlByb3RvY29s","Lkhvc3Rpbmc=","X01lbnU=","X2NvbmRpdGlvbnM=","IHB1cmdl","LnhhbWw=","YmFyZQ==","RlJBTUU=","IGN1YmVz","IEpvaGFubmVz","b2NyYXRz","LkRpcmVjdG9yeQ==","KWE=","Pyk6","X0xJQlJBUlk=","IGdldFRva2Vu","IGVjaG9lZA==","PWg=","X3NvYw==","IEV2YWx1YXRl","IOq4sA==","IERlbGV0ZWQ=","RXU=","IGNsb25lZA==","c3RhdGlzdGljcw==","LkNhbnZhcw==","IGhhY2tlcg==","IGdhbmdz","LnJlc3VtZQ==","cGVhY2U=","0JLQstC10LTQuNGC0LU=","IFByb2NlZWRpbmdz","56U=","IGphcGFu","ID8+Pgo=","ICR7KHs=","LnJlY3RhbmdsZQ==","Z3c=","IE9yaWVudGF0aW9u","JW0=","LiIpKTsK","IExpZXV0ZW5hbnQ=","LnRydWU=","IGVsdA==","IERJUkVDVE9SWQ==","zq8=","LmRheXM=","dXR0Z2FydA==","IHVuZGVyd2Vhcg==","LCkK","Q0lE","aW1lbGluZQ==","IEJsZW5k","cGhhc2lz","IHBlcnNl","IGdsaXR0ZXI=","IHVuaXE=","IENvbWJvQm94","IHNlc3Npb25JZA==","dXN0ZXJpdHk=","SURHRQ==","0L7QsdGJ","0KQ=","cmVuZGVycw==","X3Bvc2l0aXZl","X3Nsb3Rz","YnJvYWRjYXN0","IE1vbGQ=","L0NvcmU=","IEJhbm5vbg==","VG9vbEJhcg==","YWJlbGxl","X2F3","b2xlY3VsZQ==","IGRlbGV0ZXM=","IMOhcmVh","IHByb3BvcnRpb25hbA==","TVc=","IHdhcnk=","IGludGVybWVkaQ==","ICoqKioqKioqKioqKioqKioqKioqKioqKg==","LlNUQVRVUw==","X3R3","IGFyb21h","IGFjdGl2aXNt","LklzTm90TnVsbA==","dWF0","IHBvc3REYXRh","IHBlbQ==","X2N0b3I=","IFJhcGlkcw==","LW9mZnNldG9m","IGluZWZmZWN0aXZl","IG9uRGVzdHJveQ==","IE1ldHJpY3M=","IHBhZGRpbmdMZWZ0","LWVuYWJsZWQ=","IEdvYWxz","eW5jaHJvbm91c2x5","IHllcg==","SXRlbUF0","IE1ZU1FM","Y2Vzbw==","LktpbmQ=","dGVj","KGJ1bmRsZQ==","IHJlZmVyZWU=","LiI7DQo=","IGNvbmV4","IGJpa2luaQ==","X0FQUExJQ0FUSU9O","IHN3ZWxsaW5n","IGJlYWRz","IGJhcmdhaW5pbmc=","LS0tLS0tLS0tLS0KCg==","IGtpdGE=","KmZ0","TWluaQ==","IFRvbmlnaHQ=","IG1hbmlwdWxhdGVk","TWlycm9y","IFBvc3RhbA==","IG1hcmU=","RFc=","IGNvbXBpbGluZw==","IGZvcmVuc2lj","LmdldFZpZXc=","ZXBpbmc=","Q29z","IGFjY3JlZGl0ZWQ=","IG9iamV0aXZv","Y2FyZXQ=","UGFpcnM=","KT4+","IHNlw7E=","IHF1b3RhdGlvbg==","IEJyYW5kcw==","dWJp","eXB5","IElubGluZQ==","aW1ldGVycw==","V2ludmFsaWQ=","CWxpbms=","IEJlbGZhc3Q=","IE1lYXN1cmVtZW50","X05PVElGSUNBVElPTg==","IHJveQ==","IENHQ29udGV4dA==","IHdlZGRpbmdz","VVJOUw==","IHBvZGNhc3Rz","IFNlcmc=","IOuNsOydtO2EsA==","IGVhcm5lc3Q=","Y292ZXJhZ2U=","aXRlRGF0YWJhc2U=","RW1wbG95ZWVz","IERlbWFuZA==","IGNvbnRlbmlkbw==","IFFWZWN0b3I=","IiwiXA==","IEdlcmFsZA==","KClg","IGdyaWRCYWdDb25zdHJhaW50cw==","UkVTT1VSQ0U=","IFNhZw==","YWJpbGlkYWQ=","IGNvZXJj","b3VuY2VtZW50cw==","IElzbGU=","LmVkZ2U=","IGV4dGVy","KV1b","IFBsYXlsaXN0","IEJsaW5k","IFZpdGFs","IGxhdHRpY2U=","cmF0ZWQ=","ZGVwZW5kZW5jaWVz","IGBgYA==","IEthbmc=","bWFjaA==","LmZhZGU=","IEd1ZXNz","Kls=","TmF0dXJhbA==","Lk9r","IFJlbmFpc3NhbmNl","IHRodWlz","IGxpa2Vu","Kmg=","XCcs","LWNsb2Nr","IE9iamVjdGl2ZQ==","ZmluZE9yRmFpbA==","IERpcnR5","IHNjYW5k","IFZBUklBQkxF","IGNvbXBhcmF0aXZl","eXBhZA==","KFNvdXJjZQ==","ZWNv","IGp1c3F1","CWFwaQ==","QnVpbHQ=","ICMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMj","IGxhYmVsaW5n","IGhlYWRhY2hlcw==","IG11ZmY=","IE9yY2g=","IGhhdGVz","LWJyZWFraW5n","L2J1dHRvbg==","IEJ1eWluZw==","TWV0cmlj","IHVuc3BlY2lmaWVk","L2hlYWQ=","IHN0aW5n","IHJlaW5mb3JjZQ==","IENvbVZpc2libGU=","Ymxpbms=","IEFobWFk","ZGJn","X2xibA==","IGh0dA==","7JuQ","cm9wb2xpcw==","ICgoX18=","IHBlcm1l","IGFwcGFyZWw=","U1RSRUFN","Y2h0cw==","IHNlaW5z","ZmlsbFR5cGU=","7KO8","Uk9XU0VS","dW1waW5n","IE5pZ2VyaWFu","4oCUaXM=","X2xvZ2lj","Lk9yZGluYWw=","bG9zdA==","L3Vzcg==","QWY=","IEl0ZXJhdGU=","aWJz","YWFs","IHN5bW1ldHJpYw==","LGlucHV0","IFBMTA==","dXppb25l","Y2FwdGNoYQ==","IFRhbGU=","RXhwaXJlZA==","IE9iamVjdE1hcHBlcg==","Y2lkbw==","LmdldE5leHQ=","IG1lbmphZGk=","OnNlbGVjdGVk","IHJpZW4=","X3NlbmRlcg==","UHdk","IEZsaWNrcg==","LkphdmE=","X3ZvdGU=","X01vZGU=","LiR7","IGZ1Y2tz","IEFsaWJhYmE=","IGluc2lkZXI=","YWNpbWllbnRv","IGZyYW7Dp2Fpcw==","SlNPTkV4Y2VwdGlvbg==","IEp3dA==","TWl0","bGVpY2g=","IHByYWN0aXRpb25lcg==","L3NvdXJjZQ==","IG9nbmk=","IHBoaWxvc29waGVy","U25hY2tCYXI=","c3RlbGx1bmc=","KGJpdG1hcA==","IGFzdGVyb2lk","IG1hcGxl","dWNoYQ==","aXRlbUlk","IHN0ZWh0","T3JkZXJlZA==","ZW5idXJn","L3Rva2Vu","6YWN","IFdlYmI=","b3dhbmll","IFdBSVQ=","IEhEUg==","IEV2YQ==","QVRUTEU=","KG1hc3Rlcg==","IGVycw==","YWxvYWQ=","IHNtdHA=","dW5pcQ==","IGd1aXQ=","IFJhZmFlbA==","Imlu","KFVJ","KExheW91dEluZmxhdGVy","b3Jhbg==","IHNlcnZp","bmV6","IFRvcnJlcw==","Lk1pZGRsZUNlbnRlcg==","IG1vbGw=","IFRleHRBbGlnbg==","X3VwbG9hZGVk","IE1laHI=","IGhvbW8=","LWxpbmtlZA==","dW5uZXI=","X2xlbmd0aHM=","IGRpZmZ1c2U=","IEF1dG9tb3RpdmU=","WWVhcnM=","IGxpZW4=","W2NvdW50ZXI=","a2xhc3M=","0YHRgtC4","LkVuZ2luZQ==","IG1lbnk=","dWx0eg==","IGluZmFudHJ5","Vmlh","c2VjdHM=","LmRhc2hib2FyZA==","IHNwb25zb3JzaGlw","Lk1vZGlmaWVk","Oy0=","IFZlbG9jaXR5","dHJhY3RlZA==","KG1ldGFkYXRh","IHBsYWd1ZQ==","TlNVc2VyRGVmYXVsdHM=","YXBwcm92YWw=","cHJvYmFibHk=","LXNpeA==","X1ZJUw==","OicnLAo=","LmVuYw==","Lk1lc3NhZ2Vz","X1BST0dSRVNT","IG5lY2tsYWNl","IFRlbXBvcmFyeQ==","X21hcmt1cA==","IEZ1bmN0aW9uYWw=","IEpp","IHRlc3RDYXNl","ICgpOw0K","X0NlbGw=","IFJlc2lkZW50aWFs","IFJhaWx3YXk=","KCgmX19f","IGRlZmF1bHRzdGF0ZQ==","IGVpbm1hbA==","LmZhYw==","KmY=","IHBpY25pYw==","KGV2YWw=","IGZ1cm5hY2U=","YXNzb2NpYXRpb24=","eyEh","IENvbXBpbGU=","eGVi","RXZhbA==","gOyepQ==","KGNhbA==","IG1hcmtldGVycw==","X2hlbHBlcnM=","bG9jYWxjdHg=","IHlvZ3VydA==","IHZpdGE=","LGxlbmd0aA==","IElucHV0RGVjb3JhdGlvbg==","IGludGVydmVuZQ==","IGNvbXB1dGF0aW9uYWw=","RGVuaWVk","L2Vudmlyb25tZW50","aWlk","LkJveA==","LVRpbWU=","IGV4Y3VzZXM=","dHJhbnNwb3Nl","IG91dHJhZ2VvdXM=","KFNlcnZlcg==","ZGltcw==","Il0pOw0K","kJw=","IEVpc2Vu","KE9w","IGhhc2hsaWI=","KGxp","fiw=","xLFuZA==","IFNwaGVyZQ==","IEJlbGxh","LXRyYW5zaXRpb24=","LnJlYWRTdHJpbmc=","aGVhcmQ=","IFp1Y2tlcg==","IHdhbm4=","IGphaWxlZA==","IFRhbGVudA==","b3Bob2JpYQ==","wrY=","IG9wZXJhbmRz","U29tZW9uZQ==","IExpYnJhcmllcw==","cHJpbWFyeUtleQ==","16o=","VXI=","IG1hdGVz","INGI","LWR1dHk=","cG91cg==","PEVudGl0eQ==","PllvdQ==","Q3JlYXRvcnM=","V2l0aE5hbWU=","J2ludA==","IFJhdGlvbmFs","PUI=","LkF1dG9GaWVsZA==","IEZvdW5kZXI=","IE1lZ2Fu","LmltYWdlVmlldw==","Ym93cw==","IHdpdGhSb3V0ZXI=","IGxpYmVyYXRpb24=","IGZvcmFt","IGNpdGFz","b2NoZW4=","LnN3YXA=","IC4uCg==","LmN2dENvbG9y","IEF3YXJl","IHF1ZWVy","5aSE55CG","IEluZmluaXRl","L3N0cmluZw==","IGJsZW5kZWQ=","LUNvbA==","IHd5cw==","IHNpY2hlcg==","Lkxhc3ROYW1l","X3dhdGVy","X1JlbQ==","IGFydGhyaXRpcw==","LkFQUA==","IEV4cGFuc2lvbg==","eGRi","ZXN0cm8=","ZmF2aWNvbg==","VmVyaWZpZWQ=","IGRlbGl2ZXJpZXM=","YXJrZXQ=","IGdldEltYWdl","IEpQRUc=","IFRSSQ==","IEVsZXY=","ZnVzaW9u","IGpwZWc=","Y29sbGlzaW9u","IGRlc2NlbmQ=","LmZvcmU=","IExvZ3M=","IHBvbGljaW5n","dW50YXM=","Lmhvc3RuYW1l","YWNjZXB0ZWQ=","4KWL","IFdlbmR5","LnJlYWRGaWxl","IFNhbnRpYWdv","IEdvbA==","cmliYm9u","c3RyYXRpb24=","IHB1ZGQ=","IC8vXw==","aXNMb2FkaW5n","X1NFUklBTA==","IGluc3RhbnRpYXRlZA==","IHBvZHM=","IHdhcnJhbnRz","IGFkbWl0dGluZw==","CWNvbm5lY3Rpb24=","X2J1ZmZlcnM=","IEluY2g=","IFpFUk8=","d2VydA==","IENsYW4=","CWls","KHNoYWRlcg==","IHBpbGdy","IOWK","RHN0","X2JhcmFuZw==","Oicj","QnV0dG9uVGV4dA==","dGVyZQ==","X2FtdA==","IEZvcmV2ZXI=","LkxpbmtlZExpc3Q=","dWFyZHM=","dXJvdXM=","IFNlbmRlcg==","dmFyaWFudHM=","X21hZ2lj","IGFjY29tbW9kYXRpb25z","YXBHZXN0dXJlUmVjb2duaXplcg==","UHJvbXB0","ID8+DQoNCg==","IHJlcHJvZHVjZWQ=","X3ByZWNpc2lvbg==","IHJ1dA==","bW9uZHM=","O3g=","IH0sDQoNCg==","55S7","IFZpdGE=","IHByb3Bvc2Vz","IFBhcnRpdGlvbg==","SElORw==","ICN7QA==","IGVzc2E=","KGJhcg==","IFplbGRh","LmNhdGNo","X2V4Y2VwdA==","IG92ZXJ3aGVsbWluZ2x5","CVRFU1Q=","X0NPTlRBQ1Q=","X187","IFNlbWk=","IHRyYWJhbGhv","cmFkb3Vybw==","X3NxdWFyZWQ=","4LY=","JUQ=","IHByYXQ=","aXRleg==","KGVsZW1lbnRz","UGxhbnQ=","YWd1YQ==","IGlocmVy","LkNvbA==","IE1jTg==","IENvcmV5","T05FWQ==","Q2VsZQ==","cmVtZW50","IG1hbHQ=","IEx1aw==","57uf","UE1FTlQ=","IGFuYWx5emVy","IEhhbms=","X3VuaWNvZGU=","IGJ1cmlhbA==","IENlbHRpYw==","RUZG","TG90","d29u","IE51ZGU=","IE5hdGU=","IFNpbmdlcg==","IFNJVEU=","KGJpdA==","Yml6","IGRldG9u","UkVBRE1F","OkFkZA==","IEhvbGRpbmc=","e3JldHVybg==","bmNpYXM=","Pg0KDQoNCg==","cnVwdGlvbnM=","LnJlYWN0","dXJzYWw=","4Lib","IERPTkU=","aXZhdGVk","Lm5vdGVz","IHN0cmlwZXM=","cmlwcA==","aXJhbg==","IHNsYWI=","IEJ1cm5pbmc=","KGVudA==","LnNlYw==","R1U=","X2dvbGQ=","XSkpLg==","ZWxpbmVzcw==","0L7QsdGA0LDQ","IOKIgA==","IGNvc21pYw==","J10pOgo=","Y2Npb25lcw==","Y2lzaW9u","Y29tcGFyaXNvbg==","IEV2YW5nZWw=","IFNoaXJ0","bGFnZW4=","IGnFnw==","IGZpbGxlcg==","LnByb2Q=","IAkJCQkJ","INGE0YPQvdC60YbQuA==","IFplcm9Db25zdHJ1Y3Rvcg==","QXRB","XSkNCg0K","IGNvbnN0cnVjdG9ycw==","X1NIQVJFRA==","CWRldmljZQ==","IEFkdmljZQ==","OkAiJUA=","Pn0n","LklzRW1wdHk=","IGludHM=","bW9zdGF0","IFNpZ251cA==","Z2Vhcg==","KHBhdGhz","LHsi","L0RvY3VtZW50cw==","PENhdGVnb3J5","VUVTVA==","IGdldERlc2NyaXB0aW9u","ICJ7XCI=","IEpvZXk=","b2Rlbg==","X2d1ZXNz","RVVS","IGhlcnI=","IHNlZGFu","IHJlYWN0ZWQ=","X2Nsb25l","IFJldmVs","IGZvcmI=","UmVtYWluaW5n","XFNlcnZpY2Vz","IGF2aXM=","YmF0aW0=","emVwdA==","IERCTnVsbA==","Q29ubmVjdGlvbnM=","IGRpc3BvbmlibGU=","cGhpbg==","IHN0dQ==","IHNjaG9sYXJzaGlwcw==","LXNoYXJpbmc=","Zm9ybWluZw==","IEJyaQ==","VmFySW5zbg==","L3Nlc3Npb24=","IGFtYmlndW91cw==","IGFwcmVzZW50","X3Jk","c2l0ZXM=","L2FjdGlvbg==","dHJhY3Rvcg==","IGRpbGVtbWE=","IFNY","XS0tPgo=","IEphY2tldA==","UkFUSU9O","LmdldFNlbGVjdGVkSXRlbQ==","LWluaXQ=","IFJlZ2lzdGVycw==","X3NlcA==","IFRvb2xraXQ=","LmRpY3Q=","IHhsYWJlbA==","XFRhYmxl","dG9j","X2NvbWJv","IENvbXBhY3Q=","IHJ1Z2dlZA==","4KWH4KQ=","LW1hbmFnZW1lbnQ=","Jyl9fSI+Cg==","IFN0YW1w","xLFs","cm94","IGxhbmRzY2FwZXM=","X05PVEU=","bW9uYXJ5","Y2Fi","IG1vZXQ=","eGFm","cmNvZGU=","LWNsaQ==","X2dhdGU=","W2V2ZW50","U1BPUlQ=","Z2lh","IFNVUEVS","L0xvZ2lu","X3NodXRkb3du","aW50ZXJydXB0","IHByZXRlbmRpbmc=","IGZyaW5nZQ==","IFJlZHM=","IENVREE=","IFVOSVg=","dml0","IGJyaWc=","ZHJ2","IENvbm5lY3Rvcg==","VGhlcmVmb3Jl","IGxpYQ==","RGV0ZWN0aW9u","X2FjdG9y","IHRlbXBmaWxl","IGVjY2VudHJpYw==","LXJvbGU=","IHBhZHg=","ZGVudA==","V2VzdGVybg==","IOq3uA==","IEFwcGxpY2F0aW9uUmVjb3Jk","IGNhbXBhaWduaW5n","X3J1bm5lcg==","IENpdmlj","YWxlaWdo","IGRpcmVrdA==","LnN1bA==","ICAJCQk=","YW50ZW4=","IGlzc3Vlcg==","IGFzc2VydGlvbnM=","KG9yaWc=","QVRJTw==","IGxlYW5lZA==","w6Rz","LkRUTw==","ZXhwbG9kZQ==","Lk9ic2VydmFibGU=","IHN0YWdnZXJpbmc=","IGtpZG5hcHBlZA==","IHByb2dyYW1tZXJz","IElubm92","LnBhcmFtZXRlcg==","IGRvbWluYXRpb24=","IHNrZXB0aWM=","IOaYrw==","IGF2b2lkcw==","LlZlcmlmeQ==","dWJieQ==","IEFTTg==","IGZvcm1hdG8=","IEJlYXRsZXM=","X2JyYW5k","IGluc2V0","eW91dHU=","IHRvYw==","LWZpbmFs","U2hvd2luZw==","IERvdWI=","IE1lc2E=","QWRq","X21lZGl1bQ==","Q3JlYXRlcw==","KGVuZHBvaW50","CVVQ","YmJpZQ==","IHN0YWxr","LmRhdGFiaW5k","LlNjYW4=","YWdlbnRz","JCw=","aW5kaXZpZHVhbA==","Kykv","CXZt","KG5vdGlmaWNhdGlvbg==","IGluZXg=","IENsYXNzaWZpY2F0aW9u","cmVubw==","IG9saWc=","LXJhdGVk","IGZvcm11bGF0aW9u","Jyx7","IGFjZXB0","X3VucGFjaw==","X0NB","LlBvdw==","CWlt","IGFsdW1pbml1bQ==","QU5P","IHhu","IGPDs21v","IEluZ3JlZGllbnQ=","IHNlaXp1cmVz","5YWx","aWZpY2Fkb3I=","IHNpZ3VpZW50ZQ==","IEluZnJhZ2lzdGljcw==","IGR1cGxpY2F0ZWQ=","IERlZQ==","IG7DuA==","IEFDQ0VQVA==","KGNyYXRl","0LjRgtC10LvRjA==","LWxlc3M=","IGluZmluaXR5","QW5hbHl6ZXI=","LURheQ==","cml0dA==","KGNpbg==","IEd5","IG11bHRpcGxpZWQ=","dWNoaQ==","IEJhbGR3aW4=","L2lw","IHNob3J0Y3V0cw==","LkFERA==","IHZpZ29y","X2luc3RydWN0aW9u","KDs=","X2V0YQ==","6L+e","dXRvcmlhbHM=","IGJvb3N0aW5n","YnY=","IGFja25vd2xlZGdlcw==","TGlzdGVuaW5n","RkFR","O2I=","KCgt","IGFyY2hpdGVjdHM=","IHp3ZQ==","IHB1bHM=","IGdldENvdW50","dmVyYnM=","44Cc","KENvbGxlY3Rpb24=","a3Jl","IGp1cmlzZGljdGlvbnM=","X2JyaWRnZQ==","IENyYWNr","IERpZmZpY3VsdHk=","S08=","UmVzZXJ2YXRpb24=","X3JlcXVpcmVz","VG91cg==","44GX44Gf","LnNldEN1cnJlbnQ=","IGt5","IEFsYmFueQ==","IOin","bGxlcg==","YWduYQ==","d29ya2Vycw==","LmJsYW5r","IFByYXllcg==","TUlD","IHJlc2lsaWVuY2U=","VGVY","IExhbmd1YWdlcw==","c3R1ZHk=","CWN1cnI=","IGVuenltZXM=","U2x1Zw==","IO2MjA==","c3RyYWw=","IHR1bW9ycw==","IHNlZ3VuZGE=","PSd7","aW5zdHJ1Y3Rpb24=","IExpc3A=","L2luZm8=","ICJ7JA==","LDopLA==","IGd2","KEVycm9yTWVzc2FnZQ==","ICc9","fS0kew==","LkRvY3VtZW50cw==","IldlbGw=","IHJlbWluaXNjZW50","IGdheg==","aXJvcHI=","ZWhy","IHN1cHByZXNzZWQ=","ZXJzaA==","LnNjcm9sbFRv","IGNhZGVuYQ==","IGdhbWVTdGF0ZQ==","w61t","KGNvbnY=","IFRvbW9ycm93","IENDVA==","TW9uZ28=","dWxn","LkNhbWVyYQ==","LmhhbmRsZXJz","bXBo","IHN0aw==","IGdlbmV0aWNz","QUNJTkc=","VHJpdmlh","IEJhbQ==","KG1hcmtlcg==","LlN0cmV0Y2g=","IFN1bm5p","IEJldHR5","LnRvbGlzdA==","dW5saWtlbHk=","LlJlY3RhbmdsZQ==","b2Jzb2xldGU=","SUxPTg==","aW5uZXJUZXh0","ZW1ib3VyZw==","YU4=","IFZlaGljbGVz","dW5sb2Nr","OnV0Zg==","bm9i","IFNlZWluZw==","IE5FVkVS","IHRscw==","IGZpbGxlcw==","IGJlbmVmaXRlZA==","IENsaW50","Ki8pLA==","LmZvbGQ=","IHBvc2libGU=","QURFRA==","dGhvdXNl","LkRBTA==","IE9kZA==","cm9rZXM=","IFN1bm55","IFBhcnRpYWxFcQ==","X0J1ZmZlcg==","IExldmk=","bG9uZ3JpZ2h0YXJyb3c=","ZWxkb24=","Z2FnZXM=","X3dhcm4=","LkNyZWF0ZVRhYmxl","IERpcA==","X3F1ZXN0aW9ucw==","LmxvZ2lj","ICMi","PXsoKT0+","IHRlcA==","IGp1aWN5","7IKs","ZW5rbw==","aWFsZWN0","2Yk=","IG9uYm9hcmQ=","IOaP","CXJ0","X1VURg==","IFFBY3Rpb24=","4oCe","KENvbXBvbmVudA==","KGF1ZGlv","LmhpdA==","Z3Rl","IHByb2dyYW1tZWQ=","c3RhdGVQYXJhbXM=","IHBvbHllc3Rlcg==","ZmlyZXM=","Ynlzcw==","XT0o","X3F1YWxpdHk=","T2ZEYXk=","IEZhaXJ5","IHllbGxlZA==","b3Bs","KHVzZXJOYW1l","IERpZmZlcmVuY2U=","IGV2YWx1YXRpb25z","aWZmYW55","IGN5Y2xpc3Rz","IGNpZGFkZQ==","IHRleHRib29r","IHByb2ZpbGluZw==","X18pLA==","ZGVh","LmFjdGl2YXRl","IGluZGljYXRpb25z","0JU=","VG91Y2hVcEluc2lkZQ==","IGludmFsdWFibGU=","IE1BU0s=","IGNvbnRlbmQ=","RnJlcQ==","IHJlY3J1aXRz","KGludGVydmFs","IFVzZXJQcm9maWxl","ICcuLy4uLw==","ZWR1","X0NhbGxiYWNr","IGFuYWxvZ3k=","IFRyb3BoeQ==","YXBwaGlyZQ==","VmlkZW9z","IENoZXI=","IEhhdg==","4oCmIg==","LnZhbGlkYXRvcg==","Z2Z4","IFVPYmplY3Q=","Y2xhc3NuYW1lcw==","dHJpYW5nbGU=","IEVuY29kZXI=","LnNweQ==","IHByZWRhdG9ycw==","PXN0YXR1cw==","LXNhZmU=","OiIsCg==","IEluY2x1ZGluZw==","IHt9Ow0K","KmNvcw==","IGVuZHVyZWQ=","LnN1bGFrZQ==","IG51cnNlcnk=","IGZyYWdyYW5jZQ==","IHJlYnVpbGRpbmc=","IG50aA==","IEZyYXNlcg==","LnNldERhdGU=","IFZpbmNl","X1JFU1Q=","IHZlbnRpbGF0aW9u","5rW3","Y3JpYmVz","LmFzbQ==","bHBWdGJs","IEFiZQ==","dWlzaW5l","LGFycmF5","CWNsYXNzTmFtZQ==","ZXJyYWxz","ICcKCg==","Q2hlY2tvdXQ=","IHNvbGljaXQ=","QXV4","X2NhcHR1cmU=","IHJpYnM=","cmFnb24=","dmlvbA==","dG9waWNz","RnVuY3Rpb25GbGFncw==","IE1hcnR5","YmlrZQ==","IFR1Y2tlcg==","KGtlcm5lbA==","IE9wcw==","Q2xvc2VPcGVyYXRpb24=","L2RlbW8=","aWxkYQ==","IGzDrW5lYQ==","QVBQSU5H","IHN1aXRlcw==","LnZpc2l0VmFySW5zbg==","dXJ1cw==","IE1pbnV0ZQ==","KG1hbmFnZXI=","IGJ1dHRlcmZseQ==","IGFwYXJl","IHdvbHZlcw==","SldU","IFNhbG9u","CWRlbGF5","LWVzbGludA==","aXNhdGlvbnM=","LnJwYw==","KXwo","IFNuYXBjaGF0","L21t","TU4=","Y2VyaWVz","LnRleHRBbGlnbm1lbnQ=","IEZyYW5rZnVydA==","IGFkbw==","KG5ld1ZhbHVl","KGFjY2Vzcw==","KEV4cHJlc3Npb24=","IFNpZ25Jbg==","IEhhaXRp","X3Rw","LnNldFBhcmFtZXRlcg==","TWludXRl","IG1hbnVhbHM=","cmljYW5lcw==","IFBUUg==","IE91dGVy","IGdldGxpbmU=","b2NhdGlvbnM=","X0NE","IEx5b24=","L2d1aQ==","X2xpdmU=","aWRhbg==","Lmdlb20=","IGJvcmRlckJvdHRvbQ==","aW11dGg=","X2NoZWNrcG9pbnQ=","IG1ldQ==","IElydmluZw==","IHBldXZlbnQ=","KE1BWA==","IEFSQ0g=","IHBvdg==","LnNvdXJjZWZvcmdl","IGphbWFpcw==","IGFyaw==","IEJhZ2hkYWQ=","IENMRUFS","TWVudUJhcg==","IHRyb2lz","Q0hFRFVMRQ==","ICMNCg==","KENhbGw=","JG9yZGVy","KE1hdGVyaWFs","IGVuY29udHJhZG8=","JGxpc3Q=","IE1FVEhPRFM=","LmJlZ2luVHJhbnNhY3Rpb24=","X01BRw==","U3R5bGVTaGVldA==","IG1ham9ycw==","IGluZGVmaW5pdGVseQ==","Y2xlYW51cA==","IGhvbWVsYW5k","KGR0bw==","RGF0ZXM=","UHJlc2VudGF0aW9u","IERL","PXtgLw==","CUtleQ==","KEJsb2Nr","X2NoZWNrYm94","bmVlZHM=","IG9uQ29tcGxldGU=","cmljbw==","IGdsZWljaA==","IHht","T09E","QmV0dGVy","IFNRTElURQ==","LkJvb2s=","eGFk","IEdvbmU=","CWRw","IGRldm90aW9u","IHN0bQ==","IG9ic2Vzcw==","IEJhY2tlbmQ=","UXVlcmllcw==","SWs=","Ly8qKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioq","IGRpdmlkZW5kcw==","LnBhcmVudEVsZW1lbnQ=","fSIpCgo=","IE1hdGVyaWFsUGFnZVJvdXRl","Om51bQ==","IGV4cGxpYw==","IE9M","bGVhc3Q=","T29wcw==","aW1lbnRvcw==","IGluc3VyZXJz","IGhlcm9pYw==","CWZpZWxkcw==","LmltZ3Vy","LmJ0bkNhbmNlbA==","IERldGVjdGl2ZQ==","KHNt","IE11dGFibGVMaXZlRGF0YQ==","LmxhYg==","KChb","IGhhaXJzdA==","IFRyYW5zYWN0aW9ucw==","5byA5aeL","IHN0ZENsYXNz","dWVudG8=","R0lT","X2NvZA==","SW5zdHJ1Y3Rpb25z","Q2FsbHM=","UG9pbnRlclR5cGU=","IFJ3","IGFzc29ydG1lbnQ=","IERJRw==","K3I=","X0NFUlQ=","IGluc3RhYmlsaXR5","IHZpYg==","b25hcw==","IHJva3U=","YXBlbGxpZG8=","IGFuZ2w=","cHJlbmV1cg==","IGZsdWlkcw==","aXNlYXNl","IGRlZWQ=","cXVpc3Q=","X0NPTlNUQU5U","IGVxdWlsaWJyaXVt","X2RlbGVnYXRl","IFF1YW50dW0=","cmVp","Q2FwYWJpbGl0aWVz","cmVjdGFuZ2xl","Pz48","YWxpZW4=","IEp1Zw==","RE5B","VGlja2V0cw==","T2NjdXJz","IEhhd2s=","LnNldEhvcml6b250YWxHcm91cA==","XENvbGxlY3Rpb24=","ZmZpdGk=","IHJlYXJy","LnNldFZlcnRpY2FsR3JvdXA=","IGNhdml0eQ==","IGFkdWx0ZQ==","RmFjYWRl","LXdo","IExPTA==","2LA=","IGdyYW5kcGFyZW50cw==","U3dpZnQ=","CXd4","5omA5pyJ","aWZlbg==","ZmZzZXQ=","QmV5b25k","Ly99Cgo=","IHdhZ2Vy","IGJ1cnk=","IGNvbW1lbmNl","cmVnaXN0cm8=","c2NpZW50","IFBlcmNlbnQ=","INC00L7Qu9C2","KGlkZW50aWZpZXI=","LnNldE1vZGVs","IHNlbGRvbQ==","bnRvbg==","IGFwcGxpYW5jZQ==","YW11cw==","cnlzbGVy","IHBhbnRpZXM=","ZW5ndWlucw==","IG1pbWlj","IG9uQ2hhbmdlZA==","IGFsY29ob2xpYw==","LnJlbG9hZERhdGE=","Q2hhcmdl","IEZheA==","IGpTY3JvbGxQYW5l","RW1wcmVzYQ==","IHNoYXR0ZXJlZA==","eGJh","Rm9udHM=","P3M=","IHBvc3RzZWFzb24=","cmV0YWlu","X3JhdGVz","IHJlcXVlc3RDb2Rl","LnRvZG8=","wrRz","Q0hL","IEtlZXBpbmc=","ZW5nZWFuY2U=","IHZzY29kZQ==","SVBQSU5H","RGVmYXVsdENsb3NlT3BlcmF0aW9u","X3JhaXNl","IE9jdWx1cw==","b2dyYW1z","cmFq","cGNp","IGNvcnJvc2lvbg==","LmhhbmRsZVN1Ym1pdA==","QWNjZXNzaWJsZQ==","IFBpYW5v","bGl0dGxl","QUNM","xIdl","LnVud3JhcA==","IENvbnZlcnM=","IExlYmVu","aW9uZWVy","IE1lcmNoYW50","IEpvcmdl","IGVtYnJhY2luZw==","IHZlbnRh","w6FzdA==","IHZpZW5l","PFFTdHJpbmc=","IGV4cGxvc2lvbnM=","IGRpc3R1cmJlZA==","LiI8","bWVtbw==","IEFib3JpZ2luYWw=","IGNvbXBsZXRv","VGV4UGFyYW1ldGVy","IHVvbWluaQ==","KGFnZW50","0YPRgA==","IFdob2xlc2FsZQ==","L2Ft","IEJvb2ttYXJr","ZHJhZ29u","IGdsb3Zl","ICIiKSk7Cg==","aXZhcmlhdGU=","bm93cmFw","SW5DaGlsZHJlbg==","LkJy","IGNvbmV4aW9u","IGJhY2tib25l","IGVjbGlwc2U=","IHBlcnNlY3V0aW9u","JzoKCg==","L2xpbms=","IFBlcm8=","YW5kYXM=","IFRlaw==","LiIpOw==","LWFuYWx5c2lz","IGVyYWQ=","TWFyc2hhbA==","IGFuY2hvcnM=","b2dlcg==","IGNvbnZlcmdlbmNl","c3RpY2t5","IG5hdmVn","aW50ZXJu","X0RFU0NSSVBUT1I=","IENvbnN1bHRhbnQ=","ICAgICAgICAgICAgICAgICAgICAgCg==","IEF1Y2g=","IGVycmU=","xZtsaQ==","IEhvcml6b24=","Y29sYQ==","SW5zdGFsbGF0aW9u","aG90bWFpbA==","Q05O","LkNvbGxlY3RvcnM=","Y2hz","KHRyYWNl","IEVuY3J5cHQ=","IC0tLS0tLQ==","IEJhc2VDb250cm9sbGVy","IGFndWE=","IHJlYWN0aXZl","aWRs","IGNsYXNzTmFtZXM=","CVNlc3Npb24=","IERvZGdlcnM=","SGFk","X2x2","SXNWYWxpZA==","IEhFTFA=","dXR0bw==","IFZlcmlmaWNhdGlvbg==","IGdldGVudg==","X3Bh","LmJtcA==","OmY=","IExvdWlzZQ==","KCc7","L3NvY2tldA==","R3JhbnRlZA==","LmNhbGVuZGFy","KElQ","IFBY","LlJvb20=","IHByb2dyYW1t","ZW5zaQ==","IHRhYmxlc3Bvb25z","IGxldmU=","IG1vc3Ry","LnRpcG8=","L2Fu","KGRp","IGJpb2Q=","IGRiQ29udGV4dA==","IEpTWA==","CXJlc3VsdHM=","LkVORA==","aHRl","bGlmeQ==","UHJlY2lzaW9u","6IqC","QVJTRVI=","KWRpZFJlY2VpdmVNZW1vcnlXYXJuaW5n","YXR0ZW1wdA==","SVNQ","JmE=","X1BPUA==","IFRhYw==","IHByZXBhcmVkU3RhdGVtZW50","INC30LDQv9C40YE=","IG93aW5n","LHN0YXJ0","IHJldmlld2Vy","IHJzdA==","IHByb3BUeXBlcw==","IHJvY2t5","X2xvY2FsZQ==","IFN0cmF0ZWdpZXM=","IFdlYmVy","LkNhc2NhZGU=","X2VxdWFsVG8=","IGNvc2Fz","IERlbGV0ZXM=","IE1heGlt","IHNocmltcA==","cmV0cmlldmU=","LkluY2x1ZGU=","SUdJTg==","IE9F","XSk7DQoNCg==","LmVudW1lcg==","IGNvZWY=","X051bGw=","UmE=","dHlhcmQ=","IFNoYXdu","a2VlcGVycw==","IHFx","X3Ni","b21lbnM=","IEV4ZWN1dGVz","IyI=","VFRZ","IFZhbHVlVHlwZQ==","KTsqLwo=","IEFic29sdXRlbHk=","IFRvdHRlbmhhbQ==","L2FydA==","IGJsZXNzaW5ncw==","IHN3aWZ0bHk=","YnVzdGVy","IGF2aWQ=","Q09NTQ==","LHRlbXA=","IH0/Pgo=","LWdyb3dpbmc=","IGRlZXBjb3B5","QWNr","ZWdnaWVz","IF9fKCI=","IG5vaXI=","dGVycm9yaXNt","IGFudGhlbQ==","YWdlbmN5","X1BBQ0tBR0U=","IENsb3N1cmU=","LnJlZ2lzdHJ5","IG1hbW1hbHM=","PEw=","VUlDb2xsZWN0aW9uVmlldw==","IExFRHM=","IHZvbGxleQ==","KEJ1ZmZlcg==","X05BVElWRQ==","bGliYw==","aW1wbG9kZQ==","U2Nyb2xsQmFy","IE1hcmlvbg==","LkNvbnRyYWN0cw==","X0F0","IFdlaW5zdGVpbg==","Y29tcGFyZVRv","IEhvc2U=","ZW5pdHk=","LmNyZWF0ZVF1ZXJ5","X3JvdXRlcg==","IHN0aW11bGk=","ICsrKQ==","IENoYW1w","IEJheWVybg==","YXNzYQ==","LnZh","IGRpc3RyaWJ1dG9ycw==","IGZpbGVwcml2YXRl","IGRlcGFydGVk","Y2NjYw==","QGNsaWNr","IEx1bmNo","Pkw=","IGJsdWV0b290aA==","LkRlZXA=","LXN0YW5kaW5n","w6FjaWw=","IHJvb2Z0","IFBhdGhz","X2l0ZXJhdGlvbnM=","SW52YWxpZEFyZ3VtZW50RXhjZXB0aW9u","LnNwaQ==","IFVJQWxlcnRBY3Rpb24=","dXll","c2lnbmlu","LnByaW9yaXR5","IEVzc2F5cw==","PSd7JA==","IOi/lOWbng==","X3NpZ25lZA==","LnBlcnNpc3Q=","IHJlZGVzaWdu","VG9Mb3dlcg==","IE5ld21hbg==","PXN0YXJ0","IElzcmFlbGlz","YXNpc3dh","U3BlZWNo","IG51bWVyb3M=","aGFuZGxlcnM=","IFdvbmc=","INC80LXRgtC+0LQ=","V2VpZ2h0cw==","IEd1amFy","dGVpbA==","IE5vbmV0aGVsZXNz","X0VGRkVDVA==","IHZlY3Q=","IE9zYw==","IGNvYXRz","IFdoZWF0","IGdlZWs=","IFBST1BFUlRZ","d29ybQ==","X2NvbnN0YW50cw==","IEJvdWxkZXI=","IFBhcm0=","Y29sZQ==","IGRlZmF1bHRDZW50ZXI=","IFJvdWdl","OkE=","eGNm","IFZlbmljZQ==","bWVkaWFu","IHJlZGVtcHRpb24=","RnJlc2g=","IGNvc20=","IGZpZ3Vy","IHJlZnVyYg==","Q09QRQ==","LmNk","IGNob3Jkcw==","IFNndA==","xY0=","VlBO","IFNFTkQ=","YWluZW4=","X2FjY291bnRz","IHRlbnRo","IGRpc3NvbHZlZA==","PEFwcA==","IENvdmVyYWdl","dXNlU3RhdGU=","w6lybw==","Li48","IOyjvA==","IGRyZWFtaW5n","IEZvcmVjYXN0","LkN1cnNvcnM=","IHZpc2Fz","L3NjcmlwdA==","X3N0YXJ0ZWQ=","IGdhc3Ry","KFBSTw==","XTsvLw==","LlRpbGU=","KnNpbg==","KEFkYXB0ZXI=","IFNhbmRyYQ==","X1NJRw==","YXJkYXNo","IE92YWw=","IGRlc2NyaXBjaW9u","KHNs","IERlc2NyaXB0b3I=","IGAk","L2ZyZWU=","IEtleXdvcmRz","IHR1ZG8=","aW9uYWxl","KGZvdW5k","Lnh5eg==","IEdlbmVyYXRpb25UeXBl","X0RJU0FCTEVE","KGFyZWE=","IGVsaXRlcw==","IGhvbWJyZQ==","KG1lc3NhZ2Vz","IFJhYw==","IGV4dGluZ3U=","IEVzdGE=","b3Bv","LnZlbA==","bW91c2VvdXQ=","IGNvbnZvbHV0aW9u","IEhhbmRsaW5n","IGNlaWxpbmdz","VGVr","IEFyZWFz","LndyaXRlcm93","PFZpZXc=","IENvcm5lbGw=","X0JJTg==","LmludmFsaWQ=","JycnDQo=","aWXFvA==","X1Bvc2l0aW9u","IGtpZGRpbmc=","UENPREU=","IHdhdGNoZXI=","bG94","IOKX","RGF2ZQ==","X2FsbG93","IGJpc2V4dWFs","IHVub3JkZXJlZA==","IFNjaHdl","X3NlZ21lbnRz","IHRlYXJpbmc=","SU5MSU5F","IHVuZGVz","Lmdvb2Rz","LmNhbQ==","IExX","CXdoZXJl","Q2FsY3VsYXRvcg==","LXRocmVhdA==","LWFsZXJ0","IFN1enVraQ==","IElQQQ==","IEF0dGFjaG1lbnQ=","QUNDRVNT","KGR0eXBl","T3Bw","X3N5bWJvbHM=","IGRhbnNrZQ==","bGFnZQ==","b3JnZXQ=","cmVzb2x1dGlvbg==","0LXRhw==","IFFDb2xvcg==","IEJhcnJldHQ=","0LDRhtC40Y8=","PVwn","IE5hdkNvbnRyb2xsZXI=","L3JlZg==","KGNvdW50cnk=","X0hEUg==","IHRlcnNlYnV0","cGV0aXRpb24=","IHN1Zg==","Y3JlZGl0cw==","4LmM","eG0=","IERhdmllcw==","LnJlZGRpdA==","IHdvdmVu","IE9ibA==","IEtN","IENvbnNpZGVyaW5n","ZW5zb3JlZA==","LnBlcmlvZA==","IGRkbA==","JHdw","IGV4dHJlbWlzdA==","O1wK","IGtpbQ==","YWxlcnM=","IHNwYW5uaW5n","IGNvaGVyZW50","IGNvbnNlZ3U=","LnRleHRMYWJlbA==","LmdlbmVyYWw=","X2Rhc2hib2FyZA==","0LvQtdC90LjQtQ==","a2ljaw==","X1BJRA==","IEV4dGVuc2lvbnM=","cmVnZXhw","IENsYXVzZQ==","X21vdg==","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA==","IFJld2FyZA==","IExFR08=","QWs=","PS09LT0tPS0=","CXBhcnNlcg==","IG9uemU=","6YCA","4oCd44CC","X2JhbGw=","KHJocw==","IGNob3J1cw==","PGNvdW50","YXN1cmFibGU=","IHdpcmtsaWNo","IEVyaW4=","IE1TTkJD","IGV0dGVy","IENyb24=","X0ZMT1c=","ICwNCg==","IGNhbGlkYWQ=","IEZpbGVXcml0ZXI=","CXN0bXQ=","KEJ5dGU=","X3BhdA==","IHRlbGVzY29wZQ==","IGdyZWVk","IFRvcnQ=","KHdyaXRl","XGFwcGxpY2F0aW9u","CVJUTFI=","IENvbmZpZ3VyYXRpb25NYW5hZ2Vy","VW5peA==","RW5kVGltZQ==","SW5jbHVkZXM=","IEhhcnZlc3Q=","ZW5iZXJn","IEF1c3RyYWxpYW5z","IOuT","IHJu","IHJlcHV0YWJsZQ==","IGJsZW5kaW5n","VUxBVElPTg==","IEJyZW5kYW4=","ZGFk","IG3DuA==","IFdvbw==","X2Rj","VW5l","IHJ1ZQ==","d2l0aGlu","YW5nZXA=","IHBvdWNo","XCIiLA==","IFNpYw==","4oCdKSw=","YWx5emU=","IEdlZg==","Y292ZXJz","IGRibw==","cmVwbGFjZUFsbA==","CUxvZ2dlcg==","VHJ5aW5n","W3N0YXRl","LXBpZWNl","6ZaT","YmVoYXZpb3I=","YWxsb3dz","bHJ0","X3B5dGhvbg==","ZXJ0dXJh","LWNvdW50cnk=","IFRH","LlVJTWFuYWdlcg==","YmVucw==","YWxleA==","IEJyZWl0YmFydA==","YmFj","IHByZWRpY3Rz","IGdhYg==","IGNhcmRpbmFs","LlRpbWVVbml0","IFZpc2l0b3I=","IE1pbmc=","IGxpdnJl","IHBhcmVudElk","cG9ydHVu","IGRpbWVuc2lvbmFs","IFZlc3Q=","ZW5pYw==","4LM=","INmH","IEJMVUU=","IGl0ZW1Db3VudA==","IGZlYXRoZXJz","CXBzdG10","IFBvbGFy","ey8v","dW5kaQ==","0YPQtg==","emFy","RXJyb3JSZXNwb25zZQ==","7IOB","UmVwcmVzZW50YXRpb24=","Kl8=","K10=","cHJlcGVuZA==","ICc+","IGxlZ2l0aW1hY3k=","IG9v","U2xpbmt5","IG5hdGlvbmFscw==","LndvcmRz","O3A=","dHJhcA==","b21hbmlw","IGN1ZXM=","IGdyYWR1YXRpbmc=","IHNlbWFwaG9yZQ==","Il0pOwoK","YWNleQ==","UkVFVA==","R3JhYg==","IEZlbGl4","KElk","X25laWdoYm9ycw==","IG1lYW5pbmdsZXNz","KGRlbA==","IGplZGVy","IENvbnRlbnRWYWx1ZXM=","LmFic29sdXRl","L2Ns","IHhi","ZGF0dW0=","IHRvcnR1cmVk","IHJ1YmJpbmc=","U2NvcmVz","IPCfmIk=","IGF2b25z","IGFtc3RlcmRhbQ==","RU9T","SGFs","IHRydXN0d29ydGh5","Iz0=","LkVYVFJB","IG1hbm8=","aXNpY2luZw==","LXN1cHBvcnQ=","CWN1cnNvcg==","IFNwbw==","YWltYXNzYWdl","TWlzc2lvbg==","W117Ig==","IHByaW50ZXJz","R1JFRU4=","IHRlZw==","IGFiZG9taW5hbA==","IQoKCgoKCg==","LlNob3J0","0LDQt9Cy","IEdpZnRz","fSIp","KGJpbmRpbmc=","eGNl","4oCR","aW5mb3M=","Rm9ybURhdGE=","IGRhcnQ=","IGVsZW1z","KGludg==","WUw=","dGlu","R0VORVI=","4buv","IFRha2Vu","dWNrbGU=","OmU=","IHNwZWN0cmFs","LmJhaWR1","LycpOwo=","IGdyZWVkeQ==","ZXNpb24=","LCwsLCwsLCw=","IC8+LAo=","SW50ZXJuYWxTZXJ2ZXJFcnJvcg==","TlNOb3RpZmljYXRpb25DZW50ZXI=","IEFp","IHNwaXQ=","IGF1Z21lbnRlZA==","IHN0YW5kYXJkVXNlckRlZmF1bHRz","RklOSVRZ","UmFjZQ==","OkM=","IFJFQ09SRA==","IEhpZ2hsaWdodA==","ICdg","IGRlZmljaXRz","IG5laQ==","IHJlc2VhcmNoZWQ=","VGE=","IGNvcHA=","LkdldEhhc2hDb2Rl","KToNCg0K","T25DbGljaw==","IFdlbGxpbmd0b24=","IHJldml2YWw=","5q+U","6Zeu","IE5TUw==","IGZvcm4=","IGludMOp","IEt1d2FpdA==","X2ZsaXA=","X2Jv","X1w=","IG9jY3VycmVuY2Vz","IFNjaWVudGlzdHM=","U1JD","b2dlbnM=","aWdyYW50","UkVNT1RF","IFNJRA==","Lm9wdHM=","dXZl","KCldKQo=","IGxpYmVydGFyaWFu","IEdsaWRl","bGVzZW4=","IGZvcm1l","b3dhbmlh","IGFubm95ZWQ=","RGVmcw==","IEV4ZWN1dG9y","IGNhc3Rz","LnNldENoZWNrZWQ=","IFNoYXJpbmc=","LlNlcmlhbGl6ZU9iamVjdA==","IHNlbGVjdG9ycw==","X09USEVS","66+4","KHN1cGVy","KE9T","X1ZFUklGWQ==","aWR1bnQ=","PGhlYWRlcg==","IC8+JzsK","IHZpZMOpbw==","IE5lZ3Jv","IExvcmRz","IFRvdXJz","IHNvZnRseQ==","LnJlY2VpdmU=","IEVSQw==","IGRhdGFTZXQ=","QmFkZ2U=","CUV2ZW50","IHBlcmw=","IHt9XA==","KHNlbnRlbmNl","T3JVcGRhdGU=","IGRpbWluaXNo","UElO","KGRyYXc=","LlRvRGF0ZVRpbWU=","LkVxdWFsVG8=","KHBpbg==","LXBlbmNpbA==","bHVlbnQ=","IENhbGxlcg==","IHBsYXlmdWw=","LScr","eGNh","c3dpY2s=","KXt9Cg==","fTokew==","IE1ldGg=","LmdldENlbGw=","LmJyZWFr","IHltYXg=","PSc8Pw==","LWpzb24=","IHByaW1laXJv","IGluZGljZQ==","44Kj","IFVOSVRZ","KGFi","0YbQuNC4","X0hBVkU=","LXllYXJz","IEVyZG9nYW4=","LXN0YWNr","IGRpc2NoYXJnZWQ=","IGJyZWF0aHRha2luZw==","IGdyYXNzcm9vdHM=","IEFzaWRl","aGVsbA==","IHNuYWtlcw==","L2xvZ291dA==","IG1pbldpZHRo","IEhlYXI=","IFN0b25lcw==","IFdpc2RvbQ==","IEV2ZW5pbmc=","X2JsYW5r","IFByb21vdGlvbg==","IE1NTQ==","IEJhcnM=","44K3","bmo=","X1RJ","IFNvY2lhbGlzdA==","IEVH","LW9wdA==","PVwiJA==","KGRpYWxvZw==","IGJlaG9sZA==","IGludHJpY2F0ZQ==","IGVyZWN0aWxl","RXh0cmFjdG9y","IHNjbA==","IGNsYXM=","KGhpc3Rvcnk=","aWRlbnRhbGx5","IHBuZXVt","UmFuZA==","IExhcHRvcA==","Y2FsbGVy","IEZsb29k","b3BlbmVk","dWRkZXI=","IEdldHRlcg==","X3dhbGs=","KHdlaWdodA==","IEFsZXhhbmRyaWE=","IHRhYmxlYXU=","VmFyaQ==","IC0tLS0tLS0t","6Iez","ZXdvcnRoeQ==","U3BlY2lmaWNhdGlvbg==","IHRocmVzaG9sZHM=","KCIiKTsKCg==","X2ZvdXI=","IFNhZGx5","IChfKQ==","aXNtYXRpYw==","IEphaWw=","dG9IYXZlQmVlbkNhbGxlZFdpdGg=","Lm1hcg==","IHByZXZpZXdz","IHNjYWZm","aW5kaWNhdG9y","IGNvZGVjcw==","IGF1dG9j","KHJ0","LmdldEhvdXJz","IFJI","IFN1cmdl","aXZhbWVudGU=","IGNvbnRlbmRlcg==","Q3BwR2VuZXJpY0NsYXNz","IDs7Xg==","OjoqOwo=","LXJlY29yZA==","IG1hbWE=","IGltZ3M=","LmlzTG9hZGluZw==","IG5lZWRsZXM=","IGVuY3VlbnRyYQ==","b2RhdGE=","IEJ1ZmZlcmVkSW1hZ2U=","CWphdmE=","IFRvbWI=","VU5JVFk=","IGxpbmdlcmll","IEphbWFpY2E=","YnVncw==","KioKCg==","IE1hbw==","LmJlZ2luUGF0aA==","IHByb3N0aXR1dA==","IFBoaWxpcHBpbmU=","X3Nm","X3Bvdw==","IFNjaG8=","eGRl","J8OpdA==","4oCZYXV0","YWlzb24=","IEZpbGVJbmZv","dHVybnN0aWxl","ZHJlYW0=","IGlWYXI=","c3ludGF4","aWxsaXNlY29uZHM=","cHJvZmlsZXM=","X1JFR0VY","INC00L4=","IENvbW11bg==","QmV0","aXB6aWc=","IE1lbW8=","Lmlkcw==","IHBob3RvZ3JhcGhlZA==","IGFwcHJveGltYXRpb24=","OnZhcmlhYmxlcw==","IG1vZGlmaWNhcg==","X1NNQUxM","IEhlbXA=","IGRpc3Jlc3BlY3Q=","IGNvbnRlc3RlZA==","IGlubm9jZW5jZQ==","aWxsaXM=","U3ltYm9scw==","IGluc3BpcmF0aW9uYWw=","IGRpc2NpcGxpbmFyeQ==","IFBlcm1hbmVudA==","IGRlc2Ny","IFVOREVS","0YHRiw==","cHJlc3Nvcg==","SU1FUg==","IG1vdW50cw==","IG1vcmFsbHk=","X1NFQ09ORA==","LmZpbGVOYW1l","44OX","IGNvbnN0cnVjdHM=","IFNVTg==","RVNQ","RmluYW5jaWFs","IE51cg==","w7RsZQ==","cmljdWxhcg==","IFVzZXJNYW5hZ2Vy","aWJpbGlkYWQ=","IG9uUmVzcG9uc2U=","IGZpbG1tYWtlcg==","IGFsb3Q=","X1RIUkVBRFM=","IGVudmlyb25tZW50YWxseQ==","Li4uLi4uLi4uLi4uLi4uLi4uLi4uLi4u","IHJhc2g=","IEx5cmljcw==","IGlwYWlycw==","QmFja3Vw","U2lnbnVw","IEB7Cg==","SlVuaXQ=","d29ya2Zsb3c=","IENvbXBsZXRpb24=","IGludHVpdGlvbg==","8J0=","IG1pYQ==","IFNuYWNrYmFy","IFRpbg==","CWluc3RhbmNl","IE11c2ljYWw=","IHdlbGNvbWVz","IHJlZHJhdw==","X2NvbG91cg==","X1JFQUxUWVBF","X3NpbmNl","IEJ5dGVBcnJheU91dHB1dFN0cmVhbQ==","LWRlbWFuZA==","YXJldGg=","LnBhZA==","c2Vr","JywuLi4K","LWZpcmU=","Lnw=","IG51bWI=","IERPVUJMRQ==","QU1BR0U=","Y2htb2Q=","LWls","IGFsYXJtaW5n","Q29w","5aSH","aW52aXRl","X0lURU1T","IGxldWs=","IHJlZWw=","IGZ1bGZpbGxtZW50","UmVzdG9yZQ==","X3Jy","KGNsYXNzZXM=","IHBhZ2luZw==","eW1heA==","cmFwcGVk","7ZmU","fWB9Pgo=","IEhpcm8=","KFRSVUU=","YXN1cmVy","IGN1ZXI=","VWJlcg==","Lk9wZXJhdGlvbg==","IG9sYW4=","IHRocmlsbGluZw==","PFJlc3BvbnNl","IEZlbWlu","IHRyYXZlcnNhbA==","IHBvYw==","IHNldFN0YXR1cw==","ZGVjbGFy","c3RkYWZ4","IGFkZGljdGl2ZQ==","IEJ0bg==","IGV4cGxvc2l2ZXM=","IENvb2tpbmc=","IFBsYWludA==","IGFjY3VtdWxhdG9y","IEFwcG9pbnRtZW50","LHBhc3N3b3Jk","IEZBUg==","bHVldA==","RnVydGhlcm1vcmU=","ZGVjbHNwZWM=","X1N0YXRpY3M=","LkRpY3Rpb25hcnk=","Ij4nLg==","CXZhbGlk","IiIs","SW5zdHJ1bWVudA==","Pko=","IG5vc3Ry","IFJpZnQ=","X1BvcnQ=","IHZlY2Vz","W1sn","IHJhbGxpZXM=","LXNlcmllcw==","IHZ2","LnVj","IHJ0bg==","U3RhdGVDaGFuZ2Vk","KGlucw==","IENsYQ==","LS0tLS0tLS0tLS0tCg==","Y3Vz","IFJlbG9hZA==","Ly8tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0=","LnNlY29uZHM=","X2Rlc3RpbmF0aW9u","IHNjcmV3ZWQ=","PmM=","VGhpY2tuZXNz","RGVzaWduZXI=","IGdyaWRz","bsSF","KGNvb2tpZQ==","VHJpcA==","LU1vYmlsZQ==","IHZvbGw=","IGdlbml0YWw=","IGNvbmZpc2M=","IENvbmZlZGVyYXRl","IHdlYlZpZXc=","IG1pc2U=","IGNsZXI=","KHNlbGVjdGlvbg==","JGRhdGU=","IHNoYXJwZW4=","cmFnZW4=","QW5kVXBkYXRl","IHJlbWl4","IGh0b25z","Ulc=","TVBJ","IHJldHJpZXZhbA==","IHJpY2hlc3Q=","LkRlY29kZQ==","OmluaXRDb21wb25lbnRz","IFRWYWx1ZQ==","U2FpbnQ=","QGluY2x1ZGU=","IFBFUlNPTg==","LnNlcA==","IExEQVA=","Z2Jh","IGdyb8OfZQ==","IHJlbGlhYmx5","IERGUw==","LmdldEl0ZW1JZA==","IHByw6lzZW50","LmdldFRva2Vu","IGNoaW5lc2U=","IE1lYWw=","WU9V","Ij48Pz0k","KGNob2ljZQ==","IHBoZW5vbWVuYWw=","IFN0ZWVsZQ==","wqI=","IFBhY2thZ2VNYW5hZ2Vy","IFN5bmRyb21l","RGlyZWN0b3JpZXM=","aXZhcg==","LnVuc3Vic2NyaWJl","bGllw58=","bW9ubw==","X2Nvbm5lY3Rpb25z","X3ByZXNlbmNl","eW55","S25pZmU=","IGdyb292ZQ==","IHNjb29w","VEVNUEw=","YXNha2k=","LmhhbWNyZXN0","IGhhcmJvcg==","Y292","Kno=","IFh1","IHByb3Bvc2luZw==","IEZSQU1F","Q2hpcA==","IEVlbg==","IOyghA==","IHNtYXNoZWQ=","VW5zaWduZWQ=","KC4u","X2ZpbmlzaGVk","IGdldFN0YXR1cw==","IGZpYnJl","QXhlcw==","ICcvJyw=","eWFyZHM=","TURC","LWJz","aW50ZW50","IGJvb3N0ZXI=","LmRzdA==","LkRpYWxvZ1Jlc3VsdA==","IE1ldHM=","IGJlYXN0cw==","aW5jcmVtZW50cw==","LmthZmth","VUlBbGVydEFjdGlvbg==","LWV2ZXI=","X2JhbA==","IGhlbHQ=","IGZyZW9wZW4=","IFJlY3J1aXRtZW50","bGljdHM=","Zm9yZ2V0dGFibGU=","RGlzcGxheWVk","X1ZFTkRPUg==","Q29sbGVnZQ==","QVNDSUk=","IFNpbms=","IE1hY2Vk","IGN0b3I=","IGVzdMOjbw==","IFdpbmRzb3I=","X2NoZWNrZWQ=","X2RldGVjdA==","YXR0ZW5k","IHhtaW4=","IGluZGlzcGVucw==","L3BlcnNvbg==","X0RFVEFJTFM=","UkVESVQ=","SGF5","YWJvbGlj","IGZ1bmN0b29scw==","aWFpcw==","RlRQ","X1JlY3Q=","IEluZHk=","LXB1YmxpYw==","b2hhbg==","X21hbmFnZQ==","Q29tcHV0ZWQ=","7JeQ7ISc","IFNsaWNl","IGdheXM=","IGFsZXg=","YWl0cw==","IHJlY2VpcHRz","U1BFQw==","IEJFRk9SRQ==","IFByZWZpeA==","X3Zpc2l0","IHNwdW4=","TEVURUQ=","IGRvdw==","IGxlZ2FsaXphdGlvbg==","YWJiYWdl","IGNsYXc=","IFRjbA==","eGltYQ==","IGNvdmVydA==","Tmk=","IHRoYW5rZWQ=","IGFsbGVyZ2lj","bG92ZXI=","IEJyZWFzdA==","LmlzQWN0aXZl","IGdlYmVu","VkVSU0U=","Wk9ORQ==","CVJlc3VsdA==","JykuJw==","IGdlZQ==","IFNlcmlvdXNseQ==","cHVycGxl","IEVzcGHDsWE=","aWZpZQ==","LXBhY2s=","UGFydGljbGVz","ICcvLi4v","IG11bHRpbWVkaWE=","YXV0b2NvbXBsZXRl","IFRIUkVBRA==","IHJlZmVyZW5jaW5n","cmVldGluZ3M=","IHF1b3Rpbmc=","IGFzc2lzdGFudHM=","amVuaXM=","aGFwcHk=","IGxheXM=","bGliZnQ=","eGRh","IGZvdQ==","cGlhcg==","UmVjb21tZW5kZWQ=","IEJpcmRz","IFdhcnJhbnR5","w7xybGljaA==","LklOVklTSUJMRQ==","X2FuY2hvcg==","4oCdOg==","RmFudA==","X2RlZnM=","IGRyZWFtZWQ=","IF9fX19fX18s","cGxh","w6RmdA==","b2RrYQ==","xLFz","IGRhZGR5","c2NoZW1hcw==","PXplcm9z","IHJhdHQ=","CQkgICAgCQ==","aWVq","IGRyaWxscw==","LTw/","QUJB","Lmxpbmtz","IERlcGVuZGVuY3lQcm9wZXJ0eQ==","Lmxvdw==","aGVlZA==","X0JMQUNL","L0FkbWlu","IGFtaWdvcw==","aW5nZWQ=","IE1pY2tleQ==","LkdldEF4aXM=","IE5lZWRlZA==","IEVuY29kZQ==","w6lyaWV1cg==","IE1hbmlsYQ==","IENvbGxlZw==","YWRhc3Rybw==","IGNoaWNhcw==","5L2g","IG9uZXNlbGY=","eGVh","ZHVr","IGd3","dXJnaWNhbA==","IENlbnRybw==","IGFlcw==","ZmVlbA==","IHRyb3Q=","IGVsZWN0cm9ucw==","IHJpdHVhbHM=","IEJpbGRlcg==","IGRlY29yYXRl","IFRva2VuVHlwZQ==","IGx1cmU=","QXBpQ2xpZW50","Z3JwYw==","IE9yYw==","Q29udGV4dE1lbnU=","UFJFRklY","LXRoZW1lZA==","X2ZpZm8=","LklucHV0U3RyZWFtUmVhZGVy","X3NwZWNpZmlj","IERTUA==","PXN1YnByb2Nlc3M=","L3NoZQ==","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAo=","IGRhdW50aW5n","IGNsZWFycw==","IE1vdmVz","IG15c3Rlcmllcw==","LWJlc3Q=","IFZ1","b2xpYg==","IElzaA==","IGNhcmFjdA==","KExhYmVs","IERlYmlhbg==","IEV4cGVyaW1lbnRhbA==","IGNhdg==","LlRvRGVjaW1hbA==","IFJob2Rlcw==","IEhhd2tz","IGZvdW50YWlu","X1BFTkRJTkc=","X1NV","IHd4U3RyaW5n","IFBldw==","LmNsaQ==","0YTQvtGA0Lw=","LndlYmtpdA==","X0NO","IDs7PQ==","CW5hbWVzcGFjZQ==","IHdQYXJhbQ==","IHB1cHBpZXM=","IHRlcm1pbm9sb2d5","IGFkZGljdGVk","IGZvcmdl","IEdhcmRuZXI=","IHBlc3NvYQ==","CVJlc3VsdFNldA==","IGF0dGVudQ==","YW5nZW1lbnQ=","X2luZHM=","Q2hp","YXJpdGg=","RW5jb2RpbmdFeGNlcHRpb24=","bW91c2Vkb3du","IEJFVFdFRU4=","d2VpZ2g=","IkZvcg==","LmRk","aXRlbA==","WU8=","IERpY2U=","dW5peA==","IE9idA==","IENlZGFy","IHNwZWNpbWVucw==","cG9ybg==","IHVub2ZmaWNpYWw=","6buR","c29tZXRpbWVz","IEJ1bGxk","dHJ1c3Q=","Z2V0UmVzdWx0","IHNtb2tlcnM=","IHNhbmR3aWNoZXM=","IGV4aA==","IEZhZGU=","X0RD","IG1hc3R1cmJhdGlvbg==","Zm9ydGF3ZXNvbWU=","VEhJTkc=","X2FuZHJvaWQ=","IGRlZGlj","LXNlbnNpdGl2ZQ==","IG5hY2t0","TElCSU5U","IGFnb24=","IERJU0FCTEU=","b25lc2lh","Ymllcw==","IFpJUA==","IGhhdW50ZWQ=","IGN1aWQ=","L2NhcnQ=","a29z","CVJUTFU=","IGhpbmRlcg==","IGFkaXBpc2ljaW5n","SUVOQ0U=","LmJhbms=","IEN5cHJ1cw==","bWl4ZWQ=","LmN5","LXNpbmdsZQ==","PGxlbg==","Q29taW5n","IGZhdWx0cw==","IGZvcmVzZWU=","Z2V0bGluZQ==","ImE=","IGJyYWc=","IGRpc2Nz","IHJpcGU=","IG7DpnI=","IEdH","U0hPVA==","ZGVyYWJhZA==","KGVkaXQ=","VG9MZWZ0","W10pOwo=","IGRvR2V0","dmF0dXJl","TmVlZGVk","IENoZW5n","Y2Np","RUZJ","IGZldWQ=","IGx1bmFy","LlNoYXBl","Tm9ib2R5","X1RSSUdHRVI=","Q3k=","Z3JvdW5kQ29sb3I=","IFJlbW92YWw=","KGJvdHRvbQ==","JG1zZw==","U0NJSQ==","cml0eg==","IGZyZW50ZQ==","IGNvbXBvc3Q=","YW5zd2VyZWQ=","IFJvZHI=","X0hUTUw=","IHNpbGhvdWV0dGU=","IFFVRVNU","IENhdGhlZHJhbA==","LkNvbW1lbnQ=","IE1u","LW5ldHdvcms=","LmdldEZpbGU=","LmdlbmVyYXRvcg==","IENoZWNrb3V0","X3pvb20=","IGVuY29kZVVSSUNvbXBvbmVudA==","X1RD","c29t","IFNlcmll","IGJhc2VVUkw=","CXJ1bg==","IGh1aA==","LnNlbGVjdGVkSW5kZXg=","IFNUQVI=","fi1+LQ==","YWJjZGVmZ2g=","Lm1hcHBpbmc=","PWRhdGV0aW1l","Q29vbA==","bmlt","IERpcmVjdGl2ZQ==","RmVkZXJhbA==","IG1lbnVJdGVt","INCQ","QW5uYQ==","IFJlY3JlYXRpb24=","cnlhbg==","LWFnZWQ=","emVyYmFp","4oCm4oCdCgo=","Y2FtcG8=","IG1pbmlhdHVyZQ==","ZGV0YWNo","bWVhbmluZw==","X2VtcA==","UGVhaw==","IGJjbQ==","IEh1bmdhcmlhbg==","IENhc2NhZGU=","IHNhY2tz","IHRydW5jYXRl","IOKWiOKWiA==","IHdoYWxlcw==","IHNvcnRhYmxl","IGFzc2VydHM=","IHNlYWxz","b2N5dGVz","XSkpKQo=","YWxhcm0=","cmVzc2luZw==","KHNpZ25hbA==","IGVtcGVyb3I=","CU9O","Y29tbWl0dGVl","IHRyaWxvZ3k=","LlRyYW5zYWN0aW9uYWw=","R3Jvdw==","X3VhcnQ=","IHN3aW5ncw==","IHNwZWN0YWNsZQ==","4oCZYXY=","IFNlbnRpbmVs","INmE","IFRvdQ==","IHdpZG93","Z2VyYWxk","LHVpbnQ=","IHVudXN1YWxseQ==","PENhcmQ=","IFJlc3RhcnQ=","bW9y","44GC44KK","aXhlZFJlYWxpdHk=","IGhhbmRndW4=","4pSA4pSA4pSA4pSA4pSA4pSA4pSA4pSA","IGxpdGhpdW0=","UmVzb2x2ZQ==","Z2V0Qnl0ZXM=","L2Z1bmN0aW9ucw==","IHRhY2tsaW5n","T3V0bGluZWQ=","IH08Lw==","IFNleG8=","IEFuaw==","IHJhdGlvbmFsZQ==","cmVtb3ZlQXR0cg==","IG11bmljaXBhbGl0eQ==","IGFzc2F1bHRz","Q0hPT0w=","IFJlZQ==","IGJhdWQ=","pqw=","IGVuaGFuY2Vz","INC/0YDQtdC0","IGNvbmNlc3M=","Lmluc3RhZ3JhbQ==","LmdldFJlc3BvbnNl","c2VnbWVudHM=","IHdlbGxiZWluZw==","fTsKCgoK","aHVuZw==","44OG","IHJlbm92YXRlZA==","LmV4cGVjdGVk","IHJhZGlhbA==","IGNvbW11bmFs","dXNlck1hbmFnZXI=","K2E=","IGZ1bmRhbWVudGFscw==","LlRI","6II=","IHJhbnQ=","IFN0cmF3","IE9sZURi","YXppbw==","IGhhbWJ1cmc=","IHBhaW50cw==","IHRodW1icw==","IE51bGxQb2ludGVyRXhjZXB0aW9u","IGdyb3VwZQ==","IEhvbWVDb21wb25lbnQ=","IGJhbGxv","IElOSVRJQUw=","X2FyZQ==","IFBlcw==","dXJzZXM=","IGJhcmR6bw==","LmdldExlbmd0aA==","YW1vdG8=","Lm5vdGlmeURhdGFTZXRDaGFuZ2Vk","aWVuZXM=","ZW56aWU=","X2VtYg==","dW1uaQ==","c21vb3Ro","IERybw==","cGFzdGU=","IE5hcnI=","LS0tLQoK","z4k=","IEF1dG9y","IG91dHJvcw==","IExBQkVM","LnBh","LlN0dWRlbnQ=","KFhtbA==","IGV0aG5pY2l0eQ==","IEl2eQ==","44KI","X2Zha2U=","Pyg6","dXBsb2FkZWQ=","Z2V0TWFuYWdlcg==","LVFhZWRh","b2RpYWM=","Q29ubm9y","aWhhbg==","TUFU","KG1pZA==","IEFsYmFu","IHNvaXI=","Q29tYm8=","IFB1YmxpY2F0aW9u","b3BvdWxvcw==","cGlz","IHRlbXBsZXM=","b25neWFuZw==","X2NsaWVudHM=","IHJvZHM=","IHhj","aWprZW4=","IHJlYXA=","IOS4i+WNiA==","CWNvbm5lY3Q=","Rm9jdXNlZA==","LGNvdW50","aWV0ZXQ=","IGhhY2lh","X2FsbG9jYXRvcg==","IHRveGljaXR5","KHNlcXVlbmNl","IG51ZXN0cm9z","IFByaW5jaXBsZXM=","IGxsZQ==","YWxhcmlh","LndyaXRlU3RyaW5n","IEFGTA==","aWZuZGVm","IERvcw==","xZtjaWU=","IEFnZ3JlZ2F0ZQ==","IHNhY3JpZmljZXM=","X29mZnNldHM=","bGRi","IGxhdGNo","IGZ1bGxzY3JlZW4=","bWlzc2l2ZQ==","T1BUSU9OUw==","IFRlbGVwaG9uZQ==","IGFyc2VuYWw=","amVqZXI=","IEhvc3A=","IGZhdm91cml0ZXM=","cml2ZQ==","LmluY3JlbWVudA==","IGJ2","IEZhbnRhc3RpYw==","LnNheQ==","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA=","IG1lZGljaW5hbA==","IERST1A=","IHBpdHk=","bWV0aXM=","IHdvbGxlbg==","IGJlZg==","X0Js","ID4+Cgo=","Ym93ZXI=","IHN3YXBwZWQ=","L2luc3RhbGw=","IHNpbmtz","ZXRyaXpl","IGRlY2xpbmVz","CW15c3Fs","IENTdHJpbmc=","IE1vdGlvbkV2ZW50","Lkxhbmd1YWdl","Um9hZA==","0YLQtdGA","YXNjaW1lbnRv","JykpLT4=","LmFib3V0","KGVkaXRvcg==","IFJhdGluZ3M=","aW5jb21l","xaFl","LmRlcXVldWVSZXVzYWJsZUNlbGw=","IEF1c3RyaWFu","IHN1bGxh","IFRyaWJ1bmFs","IERpZG4=","0L7QstCw0YA=","IGluc3BlY3Rpb25z","Qm9zcw==","IGNvY2t0YWlscw==","IGFwb2xvZ2l6ZWQ=","X3N1YnBsb3Q=","b3BhbA==","Kz0o","IHJlc29uYW5jZQ==","aWJ1","IOumrA==","cm9tYQ==","cmVzZXJ2ZQ==","cGxz","IFRhaA==","YXhpZXM=","T1BMRQ==","IERhcnJlbg==","IFpvbWJpZQ==","X01hcA==","IF0pCgo=","IFFp","IFNhaWw=","IHJlc3RyaWN0aXZl","IGVyb3Npb24=","LXBhcg==","V0hJVEU=","IG9sZHU=","IGFwZXJ0dXJl","IGJpdGNvaW5z","dGV4dG8=","IENvbWNhc3Q=","IHRpbWVsZXNz","ZW5raW5z","IGZlZWRlcg==","L3RtcA==","cmVzZGVu","Kydf","LkRlc3Ryb3k=","IMOnb2s=","IERPQ1VNRU5U","LmxuZw==","LnRhZ05hbWU=","IGt1bGxhbg==","ZWdyYXRl","ICgqLg==","57yW6L6R","IGhhbmRzaGFrZQ==","c29j","X2dlb21ldHJ5","IERhbWFzY3Vz","TWlub3I=","IEthZmth","7Jes","RmxvcmlkYQ==","X2NvbXB1dGU=","LmV4cHI=","IHBhcmFsbGU=","IERpYXo=","Y2ly","W3RhcmdldA==","IGpva2luZw==","IGdsb3I=","KHNldHE=","X2hhbmRsZXJz","SGFuZw==","IGZlcnI=","cmltaW5hbA==","CSAgICAJCQ==","ZW50aWVz","ZGVmaW5lcw==","LXRheA==","anNvbnA=","IFVQUw==","bWV0cm8=","X187Cg==","IFVnYW5kYQ==","XSkpOgo=","X3Rk","eGFl","bHc=","Lk9T","IExvZ2dlZA==","YWNpZA==","IE1heW8=","YXNwZWN0","IHZhZ2luYWw=","IGluaXRpYWxpemluZw==","IHN0ZXJvaWRz","ZmljdGlvbg==","R1JF","Z2VuZA==","IGxpYWJpbGl0aWVz","IExldHM=","TWVjaA==","KG5j","KGNoYW5nZQ==","IGNvbm5lY3RvcnM=","Oms=","IHRhc3Q=","ISIpOwoK","dGhpbmdz","cm9waHk=","bHVldG9vdGg=","IFNpZ25VcA==","LmN0cmw=","IHRoZXJlaW4=","b3JkYQ==","LmVzY2FwZQ==","aWdhdG9y","IHBldHJvbA==","IHNwZWNpbWVu","IGRlYnV0ZWQ=","LVBybw==","IGNyaXNlcw==","LmFkZFZpZXc=","64+Z","LWRvb3I=","IG1vbmV0","IG1pbGxpcw==","IHZpZXI=","SW50ZXJuYWxFbnVtZXJhdG9y","IGFkbWlucw==","IExhaXI=","emlu","Z2V0UXVlcnk=","dW1ibGVz","TElNSVQ=","IFZpZw==","X3Nvbmc=","PENoYXJhY3Rlcg==","Ojou","X2hvbQ==","X2Jw","IFN1cGVydmlzb3I=","c3VibWlzc2lvbg==","YWJpbGU=","IG5vaQ==","T3JDcmVhdGU=","IHBlZWw=","IG9uU3RhcnQ=","IHNlbnRpbWVudHM=","dmVoaWNsZXM=","IGNsYXNzcm9vbXM=","IHN6ZXI=","IGJlbmRpbmc=","IGxvbmdldml0eQ==","IGFjbA==","IEFsZXBwbw==","IFVN","IFJpY2h0","IG11bHRpcHJvY2Vzc2luZw==","RE9NQUlO","IiwiKw==","X1lFQVI=","IHNjcmFwZQ==","IHNvbGl0YXJ5","ICJdIjsK","L2Vycm9ycw==","7J6s","nOugpQ==","YmV0dGVy","CW51bWJlcg==","IExG","IEFjcm9zcw==","UHViTWVk","XCIi","IEV4Y2VsbGVuY2U=","IHVzYW5kbw==","IFVJUA==","QWN0aXZpdHlJbmRpY2F0b3I=","X1ZPSUQ=","IGJyZWVkcw==","772l","dWVzdGFz","IFRyZWFzdXJl","dXN0cmFsaWFu","KGZhY2U=","IFRlbm5pcw==","CUludA==","IEhhbnNlbg==","57U=","Okk=","IOKclA==","R1JBWQ==","T1VTRQ==","IGhlcGF0","oO0=","QUlS","w7PFvA==","IHF1ZXVlZA==","dmluY2lh","IENocm9taXVt","IGNvbXBldGVuY2U=","dW5nYWw=","aWxsaQ==","IGdldEJ5","IEZpbmRlcg==","IGluY2FwYWJsZQ==","IHNhZGQ=","IGNpdGVz","IENodXJjaGlsbA==","U2Rr","TW9yZW92ZXI=","QXNwTmV0","KEZsb2F0","JHBhc3N3b3Jk","IENvbm5vcg==","LXNlc3Npb24=","X2Rt","Kikp","IGRldXRzY2g=","IE5Y","IHBlcmtz","X1NPUlQ=","X1RPT0w=","X1ZJU0lCTEU=","LmFzcA==","5oiW","IEJyZWF0aA==","RGV0ZWN0","IER1ZWw=","LmNtYg==","W2l0","LlNldEJvb2w=","IG5hcmNpc3M=","IGFiaWRl","IGVqZW1wbG8=","IOKElQ==","IG1vcm5pbmdz","IGNvbXB1dGVz","LnNzbA==","anQ=","IG11Y2hvcw==","X1NT","W2VuZA==","IGJhc2lu","IGFsZ3Vub3M=","IENyb2F0aWE=","bGluZXdpZHRo","KHRhZ3M=","KGhpZGRlbg==","w61jaW8=","IGFwYXI=","INC2","5LiO","LmZvb2Q=","IFJ1cmFs","IGJyZWFkdGg=","5b2x","KHNlc3M=","KyIp","IFBhc3Rl","IHNlcnZpZG9y","IEJpdFNldA==","IFRyYW4=","bGF1cw==","dmV0dGU=","ZXllcw==","IENMSUNL","IFZJSUk=","IFR1cm5z","IExlQnJvbg==","IE11ag==","IERlZw==","IEFkdWx0cw==","X3N1aXRl","cHJvY2Vzc2FibGU=","IFBIWQ==","Z2hlc3Q=","LkZhaWw=","IFNsYWNr","Y2Vq","XENhcmJvbg==","IHN1cGVyc3Rhcg==","IGhvbGRpbmdz","KGZvcm1z","ICcjJw==","TXVsdGlw","KCJbJQ==","LXNvbGlk","L3VybA==","LXRpZXI=","W2xlbmd0aA==","IFN0cmVhbVdyaXRlcg==","IE1hcmtldHBsYWNl","Z2V0dGV4dA==","X1RJQ0s=","IEZvcmdl","IGJsYWNramFjaw==","IERPRVM=","IE1hdHRlcnM=","d2F2ZXM=","IHdoaXNwZXJlZA==","IGx1c2g=","7Jik","ZGlnaXRhbA==","IHdyaW5r","IEhvZ2Fu","IHJ1c3RpYw==","LkFwcGx5UmVzb3VyY2Vz","IEhhcmR5","b3NvbWVz","QVVU","LlNUQVRF","IG5hcnJhdGl2ZXM=","CXN0b3Jl","Ymli","CVNjYW5uZXI=","IENvZHk=","XFJlcG9zaXRvcmllcw==","IHJldW5pb24=","YW5kdW0=","4oCZaA==","IHNuaWZm","TlNCdW5kbGU=","IGNvbXByZWhlbmQ=","X1VTQUdF","X29jYw==","VVJSRU5DWQ==","Sk5J","IHNwZWNpYWxpemluZw==","IHZpc2lvbnM=","IGRvbG9yZQ==","IHbDoQ==","IENoZXZ5","IFN0eWxlZA==","aW1wYWN0","YWxsZW4=","IGthcnQ=","IFRhYmxldA==","c3R1ZmY=","cmVlc29tZQ==","0LDRgtC+0YA=","Ly8tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0K","X0FkbWlu","IGNlbGxwaG9uZQ==","IGF1dG9wbGF5","IGNhbWJpbw==","IG1hcml0aW1l","X0JPT1Q=","LXF1YXJ0ZXI=","IGxhdGluYQ==","IEFKQVg=","ZXF1aXY=","IEZyb250aWVy","IFhZ","fV0K","IFJvdWdo","LnByb3Rv","IGNvcnJlY3RuZXNz","IGZhY2ls","IFJlYWNoZWQ=","44Gd44Gu","VklT","LnBz","IHN0cm5jcHk=","IGRpZmZ1c2lvbg==","LnN0YXJ0QWN0aXZpdHk=","77+977+977+9","IGFjY29tcA==","QU1FU1BBQ0U=","aW1vbmlhbHM=","IEJsYXN0","YWJ5cmlu","IGRvbWU=","IGV4dHJhdg==","IHllbg==","IGN1bGluYXJ5","UFJJ","IENvbW11bml0aWVz","bmlk","X29wZXJhdGlvbnM=","Lmhz","IE1pbHRvbg==","IG5vaXNlcw==","QXV0b3Jlc2l6aW5nTWFzaw==","KGNpZA==","fQoKCgoKCg==","XX0sCg==","IERldGVjdGlvbg==","dGFibGE=","IGxpYmVydGllcw==","X0RZTkFNSUM=","d2dldA==","IFTDvHI=","IFBhc2NhbA==","VHJhbnNwYXJlbnQ=","RGVsYXllZA==","XSgp","IEhlcmJlcnQ=","PEFjdGlvblJlc3VsdA==","Y2hhbGxlbmdl","IG11c2hyb29t","Lmluc2VydEJlZm9yZQ==","IFJpbg==","IGh1bW91cg==","IGbDuA==","YXBpS2V5","YWxsb2NhdGVk","IGNvbmZlc3Npb24=","LiIsDQo=","CWFzc2VydFRoYXQ=","IFNPUlQ=","IExPUkQ=","IGV4cG9ydGVy","LnNldExldmVs","cG9rZW1vbg==","YXNodHJh","IGbDqQ==","dXJhdG9y","KE1TRw==","IHR1cA==","IEh1bGw=","IHlpZWxkZWQ=","LlN1YmplY3Q=","XFJvdXRl","IT8=","INGD0LTQsNC7","XFNlY3VyaXR5","LWFy","IGFsbGVnYXRpb24=","KFNldHRpbmdz","w6RuZGVy","IGVsbGlwc2U=","IFJldHJvZml0","IHJlZ3VsYXRpbmc=","IE1vbGx5","IExvaw==","X0N1c3RvbQ==","IFByb21v","aXNpbg==","IHJlc3VtZWQ=","IG1ldHJvcG9saXRhbg==","LmVycm9yTWVzc2FnZQ==","Oi0tLS0tLS0tLS0tLS08Lw==","Lm1s","c2NvcGlj","LnJlZnM=","YXB0b3Jz","IEluc3RydW1lbnRz","IHByb3BhZ2F0ZQ==","fS0+","IHBhc2Fkbw==","dGhhbms=","X0RlbGV0ZQ==","IEJyaWdodG9u","LHVuc2lnbmVk","5L2c6ICF","IGFzcGlyYXRpb25z","LWhvdw==","Um9zZQ==","PSgo","X25lZWRlZA==","X3BsdXJhbA==","PEFwcGxpY2F0aW9u","IFdFRUs=","IFVubG9jaw==","IFRFTVA=","U291","IHNjaGl6b3BocmVuaWE=","IHRyb2xs","IGNvbXBsZW1lbnRhcnk=","IE5FVFdPUks=","IGJsaXI=","IHByb2dyZXNzRGlhbG9n","IiUo","IEF0dHJpYnV0ZVNldA==","CXRz","Lml0ZXJpdGVtcw==","6K+d","IGVzY3JpdA==","dm91cw==","X3BsYWNlcw==","SEs=","IHNlZ3Vpcg==","X2Z3","IFJvdW5kZWQ=","IGRpc3Bvc2l0","6KeG","cGFybQ==","d293","U1RSVUNUSU9O","LmFsbG93","IENoYXJTZXF1ZW5jZQ==","CWV4dGVybg==","IHByb3NlY3V0ZWQ=","IG1vcnRhcg==","IEp1ZGE=","LW1zZw==","IGVzdHVk","LmdldERlc2NyaXB0aW9u","IHNvdw==","YW1icmU=","IHJvbWE=","RW5o","Ym9udXM=","IHNxdWF0","IGRpc3RyYQ==","ZWRJbWFnZQ==","IHBlcHBlcnM=","LXBlcmZvcm1hbmNl","LAoKCg==","LGZpbGU=","IE1JTUU=","X2NvbmNhdA==","QUJT","LWZhc2hpb24=","IHVuZGVyY292ZXI=","T25lVG9NYW55","IHJlY2xhaW0=","Q09QWQ==","IGJpbmRz","IFRhcGU=","IGdvc3NpcA==","IEVxdWl0eQ==","L0NhcmQ=","LmFjdGl2","J2Ft","IGRyYWluYWdl","PFNjYWxhcnM=","IG9uQmluZFZpZXdIb2xkZXI=","KCk/Lg==","IHNvcnJvdw==","IEli","dXB5","X1VVSUQ=","IENoYXJt","IEVsZWN0aW9ucw==","Lm9uRGVzdHJveQ==","IEludGVyZXN0aW5nbHk=","b3VuZGluZ0JveA==","X2RldGVjdGlvbg==","LWhlbGQ=","X3Vua25vd24=","IHJlZnJhaW4=","IG3DqXRvZG8=","IGVCb29r","RU5PTUVN","IGRhbmc=","UHJvZmVzc2lvbmFs","IGRpY3Rpb25hcmllcw==","L215c3Fs","IFNUVUQ=","IG1hc3Nl","c2NhcGU=","IGRyZWk=","Om5hbWU=","LmxvZ28=","U2lnblVw","IHRhaHVu","KHRoZW1l","IEZlbW1l","IGJvbWJlcg==","IEphZGU=","IFRheQ==","IHN1Ym1hcmluZQ==","X2NsYXVzZQ==","enljaA==","IHNpbXVsdGFuZW91cw==","IGNhc29z","LmJvb2xlYW4=","KGxocw==","IGNvbnRpbmVudGFs","LXNhbGU=","CWVudg==","IEN1dGU=","IEZhY3RvcnlHaXJs","YWJ1cw==","L3ZhbHVl","IGphZHg=","IHN0ZXJu","Pj4KCg==","IHN1cmZhY2Vk","IOyggOyepQ==","cGxhdHo=","CWVtYWls","Y2VwdG9ycw==","Ij4o","IGVwaWxl","6K+7","IERlYnQ=","5ZGK","Tk9Q","Imh0dHBz","Omo=","Rm9ybUl0ZW0=","X0xJQ0VOU0U=","LmdldERvdWJsZQ==","IEFnZW5kYQ==","CWZpbmFsbHk=","KGZpbHRlcnM=","KGF2","576O","QVBFUg==","IGxhdmE=","0LXRgNC2","KSkpKQoK","IGZhdWx0eQ==","X25t","IHRyYXZh","KEJpdG1hcA==","IHNwZWVkaW5n","PicpLg==","IHNjcmVlbmVk","X3JvbGw=","IE1hY0Jvb2s=","IEFVRA==","IGRpYWdub3Nl","LkdlbmVyYXRl","IF5e","IHN0cnM=","W1Rlc3Q=","IHJhbnNvbQ==","IERIQ1A=","ZWxkZW4=","IGludGVycHJldGF0aW9ucw==","KCldLg==","ZmxhdE1hcA==","IGxpbmVIZWlnaHQ=","X21vdW50","IFdpemFyZHM=","IHNsdXRz","ZWhsZXI=","b2RhbA==","IG1pbGl0aWE=","5bI=","ZWFybmVk","IG1pc2VyeQ==","aW50dmFs","ZnVuZA==","IGhpZGVz","IGRpYXJy","IFdlc2xleQ==","IHhtbQ==","IHF1ZW0=","IEFyYWJz","aWZ0aA==","YXRlZ29yaXplZA==","RGlzcG9zYWJsZQ==","UHVyZQ==","X05PVElGWQ==","c25pcHBldA==","IEdhcnJldHQ=","LnJ1bm5pbmc=","LndlaWdodHM=","ICgtLQ==","IGludmFyaWFudA==","5LqL5Lu2","IEFsbG93ZWQ=","ZGlycw==","IHBhc3Npb25z","IGxhZA==","IEZsdXNo","bWVudXM=","OmJsb2Nr","IGNvbXByYQ==","LmNob21w","YWxsb2NhdG9y","IGN1cmF0ZWQ=","IEtub3dpbmc=","IFBhdHRlcnNvbg==","IHRlbGFo","J2V4","IGRvb21lZA==","IHBoaWxhbnRo","b3R0eQ==","LnN0eWxlcw==","T3duZWQ=","IGFsbGVyZ2llcw==","PXBhcmFtcw==","b2Nlc2U=","aXRlbGlzdA==","IFNlbmRpbmc=","YmVm","b3JyYXI=","IE7Do28=","IEZhcmdv","IEx1Yg==","IENvbWJpbmVk","X2dpdmVu","CQkJCQkgICAg","IHJlY29uY2lsaWF0aW9u","UGF0dGVybnM=","YXphcmQ=","IGJpb21hc3M=","IEhvdXNlcw==","cmVzcHVlc3Rh","Y2Nv","L3RvcGljcw==","IFl1aw==","IHdlYWtlbmVk","X2NhbGVuZGFy","IG11bGhlcmVz","IE1hcmw=","IHNpbmU=","IFRpbA==","IFNvdWxz","IERldXRzY2hl","IEZPTExPVw==","IHBpcGVsaW5lcw==","IEJldmVybHk=","X0RJUFNFVFRJTkc=","IiM=","IFByb3Rv","LmJpZw==","IFNhdmluZ3M=","IFRhbno=","anVu","IEdhbW1h","IFNhZGQ=","IGFkdmlzb3Jz","IHJvYXN0","IHVudGVycw==","dWRpZXM=","X2xvbg==","LXBvaW50ZXI=","IEVsZW1lbnRSZWY=","XEJ1aWxkZXI=","ZXhhbXBsZUlucHV0","LndlYmRyaXZlcg==","ZGF0YVR5cGU=","IFF1aXRl","IENlbHRpY3M=","dWls","LWRlZmVuc2U=","YmlzaA==","IFVJV2luZG93","IFN1ZGRlbmx5","LmhvdA==","LnJlYXNvbg==","IGfDtnI=","QU1E","Lk11bHRp","YXV0aGVudGljYXRlZA==","cmVnaW9ucw==","Oyg=","0LDRgNCw0Lw=","IEtpcmJ5","JHJvdXRl","UFJFQ0FURUQ=","IER1cmhhbQ==","b3dv","IFBlcmZvcm1z","IGRpc3JlZ2FyZA==","bnN0","IFBvbHM=","IGdldFA=","Il06","LWNvbG9yZWQ=","KEtleXM=","IEFsbGVn","X21vZGlmeQ==","X2xvYWRpbmc=","c3RyYWluZWQ=","IGF0cm9j","X3Bocg==","PFNwcml0ZQ==","IHNhdGlzZmFjdG9yeQ==","bWFuc2hpcA==","LnBpcGVsaW5l","VG9ueQ==","IHRoaWVm","cG9sYXRvcg==","KGxvY2s=","YnVyc3Q=","IE9wdGltaXphdGlvbg==","IHN1cmZpbmc=","Illlcw==","IGRlc2NlbmRlZA==","5pI=","X0NsZWFy","IGNyaWVz","IEZyb3plbg==","RElSRUNU","LUNvbg==","IExlaWNlc3Rlcg==","5aWz","T09N","PWRi","IGdldE1lc3NhZ2U=","PFN0dWRlbnQ=","X2JhdGNoZXM=","Lk1hc2s=","X2V0aA==","XCk=","IHNvbWE=","Q2F0Y2g=","W2No","T3duZXJz","aW5kbGU=","OmF1dG8=","LnZlcnQ=","aXZy","LnNldExvY2F0aW9u","IGZsdWVudA==","X0VORElBTg==","IENhcmxv","Y2VwdHM=","YWRkQWN0aW9u","Lm9hdXRo","PFVuaXR5RW5naW5l","cmVlbWVudHM=","LlNraXA=","PykKCg==","LmRlZmF1bHRQcm9wcw==","IGNhYmU=","IFNoZW4=","ZXJvc2lz","IFByb2ZpdA==","IHBvaXM=","X0NSRUFURUQ=","IHJlbW92ZUZyb20=","KHdz","P2FjdGlvbg==","KEZpZWxk","IGVycm9uZQ==","Lm1pbmltdW0=","IFJldHJpZXZlZA==","IGRhZG8=","IFBSSVZBVEU=","LXNwZWM=","IGd6aXA=","cGRhdGE=","IHBvc1k=","KGxvdw==","IHF1YWxxdWVy","L2Nsb3Vk","6rKM","KGNvbW1vbg==","IEFyYmVpdA==","b3JnYW5pc2F0aW9u","IHRpZHk=","IFJvbGFuZA==","KHBo","LnpvbmU=","IGdlbnRsZW1lbg==","xrDhu6Nj","5bGx","IGVuY2xvc3VyZQ==","IE1hbmFmb3J0","CUNvbG9y","U3RlbmNpbA==","Tmlj","IHRoZW9yZW0=","IFZH","IGNvbG91cmVk","VkJveExheW91dA==","dWxzaXZl","RHJhZ29u","Y2Zm","ZXRlc3Q=","ZW5zYQ==","b2ZkYXk=","LkF6dXJl","OlVJQ29udHJvbEV2ZW50VG91Y2hVcEluc2lkZQ==","X3VwZGF0ZXM=","IHRyZW5keQ==","dWdhcw==","d2Vha1NlbGY=","IHJpZGdl","aWJyaQ==","IOy2lA==","KENH","IE1vbmtleQ==","LndyaXRlSW50","LnRpbWVkZWx0YQ==","Vmlld0NvbnRyb2xsZXJBbmltYXRlZA==","IFByb3ZpZGVuY2U=","44GI","IGJsZW5kcw==","L1N1YnRocmVzaG9sZA==","IEFwcGw=","IGF0YW4=","IHJlbG9hZERhdGE=","dW1ib3Ryb24=","c3TDvHQ=","T0F1dGg=","IEdpdmluZw==","IOyEpA==","IEZpbm5pc2g=","Y2hlY2tpbmc=","LkVtYmVk","c2VxdWVsaXpl","IGluaXRpYWxpemVz","IE9zbG8=","2LY=","Z2V0RXh0ZW5zaW9u","X0FMVA==","KGJsYW5r","IGZhdGFsRXJyb3I=","IGRlbWlzZQ==","KioqKioK","IFhT","KEFG","IEVucw==","YW50aGE=","IFBPUg==","IG5pY2g=","Lk5hbWVk","IGdpZ2FudGlj","IE9ic2VydmF0b3J5","LlJlc29sdmU=","IFBheW1lbnRz","Z3VpbGQ=","IGN1cnJlbnRTdGF0ZQ==","PT09PT09PT09PT09PT09Cg==","IFNleQ==","cERhdGE=","IGRlYWRsaW5lcw==","IGNlbnRyYWxpemVk","IFNjaG9sYXJzaGlw","X3N1cHBvcnRlZA==","LmNocm9tZQ==","KCldKTsK","IGN5YW4=","IENhZ2U=","QXV0aG9ycw==","Xw0K","L29z","a2lt","ZGVl","LnRleA==","IHlvdXJzZWx2ZXM=","IG1ncg==","IGFsaw==","LWluc3RhbGw=","IGRyYWZ0aW5n","IHJ1bW9y","IHN0YXR1ZXM=","UG9vbGluZw==","b2xpbmE=","QUFBQUFBQUE=","LyotLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0t","IGV4dHJlbWlzdHM=","Q2FsY3Vs","aWdodGhvdXNl","SW5zZXQ=","KElOUFVU","IHN5bmNocm9uaXphdGlvbg==","aXZpcnVz","LmF4ZXM=","IEdhcA==","LUFu","X1RlbXBsYXRl","IGdhbWVy","IENyaWNrZXQ=","IGxpbnQ=","IGF1dGhvcml0YXJpYW4=","TlNVSW50ZWdlcg==","IHJlZG8=","IGFkaXBpc2Npbmc=","X0ZFVENI","Y2hlaWQ=","IEZhbmc=","LmluZGljZXM=","dG9uZQ==","0LTQtdC7","IHt7LS08","YnJhaGlt","IHNhbGE=","Z2V0Q29kZQ==","IGNvbW11bmljYXRlZA==","c3RhcnRzV2l0aA==","ZXJ0eg==","UmVhZGFibGU=","SXRlbUlk","b3JlZmVycmVy","Y3JlZGlibGU=","w6FyaWE=","IGNvbWJpbmVSZWR1Y2Vycw==","KiovCgo=","IGJsaXNz","IGFkb3Ju","ZGVwZW5kcw==","IFJPT00=","IGZyYW1pbmc=","ID8nLA==","YXV0eQ==","X3BvdA==","X3RhYnM=","RXhhY3Q=","LCIs","ICd9JzsK","IGFyYml0cg==","YWhyYWlu","LmdldFN0cmluZ0V4dHJh","ICRc","IG91dHB1dFN0cmVhbQ==","IGNvbW1lbmM=","YW51cw==","Y2h5","PEVtcGxveWVl","IGhleGF0cmlnZXNpbWFs","IG5hY2lvbmFs","KHNlcmlhbGl6ZXJz","X3B1dGNoYXI=","X1NBRkU=","ZW50aWFsQWN0aW9u","SXRlbVNlbGVjdGVkTGlzdGVuZXI=","LkRpc3BhdGNo","Q29uZmxpY3Q=","X2Fib3V0","b3NhdXI=","Qm91bmRhcnk=","IGNsZWFyQ29sb3I=","KExvY2F0aW9u","IE1PTlRI","IFRhc3Rl","LUdlbmVyYWw=","IFdBUg==","IGVyaGFsdGVu","LXNhdmluZw==","IGNvdXBsaW5n","LXRyaWdnZXI=","bW90b3I=","IHl5eXk=","IFBhdGVudA==","cHRv","IG1pc2RlbWVhbm9y","dmFzaW9u","IEFkbWlyYWw=","4LmJ4Liy","X1BXUg==","IGRldmFzdGF0ZWQ=","Zm9saW9z","SVRVREU=","dXJyZWN0","IHJvYm90aWM=","IFNhbmN0","IEhhd2FpaWFu","LlJvdXRl","LWNvbmRpdGlvbg==","IHJr","LyoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioK","Y3JlYXRlRWxlbWVudA==","IEtvcA==","aWduYW50","LnJvbGxiYWNr","IHNhbHVk","Xycs","IEFOU0k=","RXhjZXB0","IERyYXdhYmxl","LlV0Y05vdw==","Ijpbewo=","IGtvbGU=","THVh","IEJlbGlldmU=","Q29tcHV0","IGhhbGx1Yw==","IFNpZ25z","cnN0","Lmh1","IEtOT1c=","V2k=","IEJyYXNz","IFJhcw==","QGhvdG1haWw=","IHNlZGltZW50","IGFwaw==","IOyDgQ==","X3JlZ2lvbnM=","IHBvZGl1bQ==","PEJvb2s=","0LbQtQ==","IHNpeHRlZW4=","IEFsaWFz","IGluZnJhcmVk","IFZhbmRlcg==","IExlYWRpbmc=","dWNpbmc=","LDosOg==","X2hvcg==","d2F0","IGTDqWNvdQ==","X1dpZGdldA==","U291bmRz","X25hdmlnYXRpb24=","IHNjaG5lbGw=","KGdlbmVyYXRvcg==","dWNlbmU=","IHJlbWFrZQ==","SVB2","IHLDqWFs","X0lOQ1JFTUVOVA==","IGh5cG90aGV0aWNhbA==","X2FuZw==","IG9mcw==","ICEK","LmNvbXBsZXRlZA==","R2V0VHlwZQ==","IGtvbW1lbg==","w6FsaWRv","YWRkT24=","IHrFgg==","VUxB","X2luZGljYXRvcg==","J10KCgo=","YXBhY2hl","X1NlbGVjdA==","IEdyZWVuZQ==","V2hhdHM=","X2FuaW0=","IHJlcGV0aXRpdmU=","bXVjaA==","IFRocmVzaG9sZA==","IGxm","KENhdGVnb3J5","Y29uZQ==","TWl4","X01FVEFEQVRB","YXlzaWE=","TmVpZ2hib3Jz","CQoJCQo=","SVBIRVI=","IEZyYWc=","IENlbGxz","IG5hbWVzcGFjZXM=","KGJhY2s=","IFJlc3RhdXJhbnRz","c3Zj","INC70Lg=","b3RlY2g=","LXNs","pb8=","IFdU","IFJlZHVjdGlvbg==","IGRvdHRlZA==","CWZvdW5k","IFRFQU0=","Qm9ybg==","IE11c2g=","IENvbXBhcmFibGU=","IGhpdGNo","QVRP","IG1heEhlaWdodA==","YmVnaW5UcmFuc2FjdGlvbg==","w612","X2Ju","IGhlcmQ=","IHJldmVyc2Fs","IEhvbmQ=","ZGVsaW1pdGVy","IGNvbmZ1c2U=","IGhvcHM=","IGNlbnRyb2lk","IGNvdXJ0cm9vbQ==","LmRlY29yYXRvcnM=","IG1waQ==","IEltcHJvdmVk","SU5ORVI=","IEJhbmdhbG9yZQ==","IFRhbWI=","IGJvYXN0","KCkpKQ0K","IGlsbGljaXQ=","IE1vcm9jY28=","Z3JlZ2F0b3I=","X3Jlc3VtZQ==","IGNyYWNrZG93bg==","IHBvcnRyYWl0cw==","L2hpZ2g=","KFwn","IGF5dWQ=","X2ZlZWRiYWNr","IGNhdGU=","L2F2YXRhcg==","IGhlYg==","UG9pbnRDbG91ZA==","IOWSjA==","IDwhWw==","IGdldFJlc291cmNlcw==","fTp7","T3BlcmF0aW5n","IEZvZw==","CXRhYg==","IFJlc2VhcmNoZXJz","IGZhYnJpY2F0aW9u","LmRhdGFzZXRz","IENhbXBv","IEthdWY=","IGRsbA==","bGlndA==","XSkpOwoK","c3RlbGxlbg==","QUNLRVQ=","bHZs","IEdsb3J5","LmRhdGVUaW1l","IGNvbW11dGU=","IG9uQ3JlYXRlVmlld0hvbGRlcg==","IFhFbGVtZW50","IFRva2Vucw==","PHRoZWFk","X3BpY2s=","7KQ=","dm9u","ZGVwYXJ0dXJl","KHJlbmRlcmVy","cGhvbmVOdW1iZXI=","KFBlcnNvbg==","Z2VuZXM=","IExhcnM=","ICl7Cgo=","IEpzb25SZXN1bHQ=","IG1ldG9kbw==","Vk9LRQ==","LmdldFVzZXJJZA==","QWNjZWxlcg==","CXJlcXVpcmVk","IGNoYW1waW9uc2hpcHM=","QnVpbGRDb250ZXh0","L3Rhc2s=","L3JlbGVhc2Vz","Q2F0ZWdvcmlh","X292ZXJsYXk=","IHNjYXJjZQ==","X2xpbQ==","bmdy","YWhsZW4=","IEFydGlmaWNpYWw=","c3ByZWFk","IGJvd2xpbmc=","LmFuYWx5c2lz","U01UUA==","CXBhc3N3b3Jk","IGJhdGhz","XSkpewo=","Y3VycmVudGx5","YWNpZW50ZQ==","X3NlcGFyYXRvcg==","IGRlYmVy","IERpc2FibGVk","acOocmVz","IOKV","X3Byb2Nlc3Npbmc=","IHByb3Rlc3Rpbmc=","IFJPVA==","Z3JhYg==","INC30LDQug==","IHByb2FjdGl2ZQ==","d29yZHByZXNz","IFNldmVy","aW5kZW4=","IHdpa2lwZWRpYQ==","KXsNCg0K","X3dpbmRvd3M=","aXNsYXRpb24=","IHVucmVzdA==","IGRpc21pc3NhbA==","Lk5VTQ==","X0ZBU1Q=","aXNzdWVk","IEZBQ0U=","X3VuZGVy","IHBsdWdnZWQ=","IOWw","IGLEmWR6aWU=","IElDQw==","IGNvbWJ1c3Rpb24=","IGtpc3NlZA==","IHN0YXJyZWQ=","IFdhdHRz","IHNwaWVsZW4=","LXB1cnBvc2U=","IEV2YWw=","YXJnZXM=","LHJlc3VsdA==","dGVjaG5vbG9neQ==","IG5hdGlvbmFsaXR5","aWN1cw==","IE51Zw==","INGC0L4=","CQkJCQkJCSAg","Y29sbw==","IGdhc3Rybw==","YW50ZWVk","T0xJRA==","LmJpYXM=","X3RlbGU=","Lmluc3BlY3Q=","IHZlaWw=","LmZvb3Rlcg==","IG5lZ2xpZ2VuY2U=","IGp1ZGdtZW50cw==","Um9vbXM=","eW5u","CWNvdW50ZXI=","b2NjdXBhdGlvbg==","IOeUnw==","dW5hcw==","ICheKSg=","TGFtYmRh","ZmVs","LlBhcmFtcw==","INC00L7QsdCw0LI=","c2V0TGF5b3V0","IGRlcG9ydGF0aW9u","IGxvY2FsT2JqZWN0","IFBoYXJtYWNldXRpY2Fs","Y2VwdGl2ZQ==","IE5vbWU=","RXF1aXBtZW50","RmFu","VW5pdmVyc2Fs","CXNvY2tldA==","IGdyaW4=","IGV4cG9zZXM=","IGhhYmVy","IHNpbmNlcmVseQ==","IGNhbXM=","IG3DvA==","ZW5pYQ==","RW1lcg==","Q3J5cHRv","U2xvdw==","KHhocg==","IT0o","LXNlcnZpY2Vz","IFBX","IHByZW5kcmU=","IG3DpGRjaGVu","ZW1vbnM=","0L7Qt9Cy0YDQsNGJ","Lk1hbmFnZXI=","7Jk=","IGdyYWY=","LXJh","bWV0cmljYWw=","L2Zs","IGNlbWV0ZXJ5","Z2Vucw==","IHDFmQ==","IE15U3FsQ29tbWFuZA==","LVRv","IHbDpQ==","IGFpcnN0","b21lbnR1bQ==","IHNlcnZv","bWlsbGlvbg==","IE1pcmFuZGE=","IlNoZQ==","IGFkdm9jYXRpbmc=","LWNhcHRpb24=","IEF0dHJpYnV0aW9u","IHdlbGNoZQ==","X3ZlbmRvcg==","CVN0YXR1cw==","YXJyaXM=","IHByaW50aw==","IiwiIw==","IHJlbGF0aXY=","aWZmZXJlbmNlcw==","aXp6ZXM=","IGRlY2ltYWxz","IFByb3Y=","Lm1heGltdW0=","QXJu","IGhlbGljb3B0ZXJz","X0JPVFRPTQ==","Y2h1cmU=","b2Rpbmdz","Jyg=","IikpKTsNCg==","KGJlYW4=","LmZk","RnVuZA==","IGhhbmdz","YXBwaWQ=","L2tlcm5lbA==","LnBvaQ==","Lk1pblZhbHVl","LXZhbGlkYXRpb24=","THVrZQ==","Y2Rm","IEZ1bmVyYWw=","IFNhbXBsZXM=","CWRl","IHRvYXN0cg==","IHRheGFibGU=","IGNsdXN0ZXJpbmc=","ICdcJw==","IHJlc3RyYWludA==","ZWNlZA==","Y2hhaW5z","44CC77yI","X0dSQVBI","IGZ1ZWxlZA==","6ZyA","SHA=","5aSN","VGlsZXM=","IGF1bnF1ZQ==","SkM=","IGhvc3RhZ2U=","IEVzaw==","IG1hdg==","IGdlc3Rpb24=","IGJhbm5lcnM=","fXsk","LmludFZhbHVl","LiciCgo=","X01BVFJJWA==","IGNlYXNlZA==","IEdPRA==","X0NBTUVSQQ==","LkFsbG93VXNlcg==","dHJhY2tlZA==","Q29vaw==","YmFpcnJv","KGNvbXBhbnk=","IHZpZXdwb2ludA==","LmdldFdyaXRlcg==","IE5ldHM=","d2l2ZXM=","ICgpKQo=","ZXhhbXBsZU1vZGFs","CWNoaWxk","IG15dGhvbG9neQ==","IC8vIg==","X2F4ZXM=","aWJvbGQ=","LkRhcms=","IE1heHdlbGw=","IGdwb2ludGVy","b2xpY2l0dWQ=","QmF0","dWxuZXI=","YmFsYW5jZWQ=","bWFpbGVy","IGNvbnRlbXBvcg==","5omL5py6","KCJfXw==","ICIpIg==","cmVhcg==","IEh1YW5n","XScpCg==","16k=","RlRB","IENhbGxpbmdDb252ZW50aW9u","IE91dHB1dHM=","UGs=","LlJlZmVyZW5jZQ==","bGVjdHVhbA==","ICk6Cgo=","IGJyYWNlbGV0","dWdlcg==","CUVycm9y","U3dlZXQ=","KCIvIik7Cg==","aHg=","IHVucmVhc29uYWJsZQ==","SW50ZXJwcmV0ZXI=","IGxvZnQ=","X3Byb2R1Y3Rv","IHNvY2lldGFs","LlBhcnNlcg==","IEFkYXB0","LmZvbw==","KHdoZXJl","LkZlYXR1cmU=","IFlhbWFoYQ==","Z2xhc3M=","Rm9yZ2U=","IHByb2hpYml0cw==","IGNhcGFjaXRpZXM=","IO2VqOyImA==","IHBlcm11dGF0aW9u","IGlobQ==","Rmxk","ZWxpYWw=","PT09PT09PT09PT0K","QENvbmZpZ3VyYXRpb24=","IGdlYXJlZA==","aW9zbw==","aWVzdGE=","dHJhbnNsYXRpb25z","SW5wdXRDaGFuZ2U=","UG9wdWxhcg==","IFBMVVM=","IHZm","X0ZyZWU=","YmJveA==","IGNhdXNhbA==","UElMRQ==","IHNjaMO2","IGlyb25pYw==","TWly","LkA=","5Y2X","IOiH","UmV3","dWxlbmNl","Zmxlbg==","IGNhbkFjdGl2YXRl","LXJlc3BvbnNl","IGFjY2VudHM=","aWdub3JlZA==","wrBG","LkRlcGVuZGVuY3lJbmplY3Rpb24=","CXBvaW50","IGNvbnRpbmdlbnQ=","IHNxdWFzaA==","IHBhcm1z","IENlbWV0ZXJ5","IGRlbHRhVGltZQ==","IERPUw==","IHZhbmlzaGVk","0LDRgNCw0LzQtdGC","IERQUw==","dGZvb3Q=","IFp1cw==","X0lOU1RBTEw=","R0FO","IGFyYg==","IG11bmljaXBhbGl0aWVz","SW50b0NvbnN0cmFpbnRz","QXV0b3Jlc2l6aW5nTWFza0ludG9Db25zdHJhaW50cw==","LGltYWdl","X2lnbm9yZQ==","IGRhbmdlcm91c2x5","cXVpc2E=","cGx1Y2s=","IGhhcnVz","dXBwZQ==","SHR0cEV4Y2VwdGlvbg==","QnJhY2tldA==","LicnCgo=","IFRvbA==","IFZpZXdlcg==","emJvbGxhaA==","LkNvZGVBbmFseXNpcw==","w6xuaA==","IGNvcnJlY3RhbWVudGU=","LmRh","IEFsZ2Vy","15A=","YmF1bQ==","IFBhbnRoZXI=","cGFydGljaXBhbnQ=","5b+F","LXN1cA==","IGVtdWxhdG9y","IGZhZGluZw==","IFdvbHZlcg==","Y3JlYXRlcw==","IGJvb2tpbmdz","LlF1ZXN0aW9u","p+ihjA==","IHN0cmVzc2Vz","IHJld3JpdHRlbg==","LlBJUEU=","ZWRlcw==","IGNiZA==","IjoiLw==","IGVuaGFuY2VtZW50cw==","X3N5","QklO","IFNsaXA=","SW5zcGVjdA==","IFdlZw==","IGNvbmdyZWdhdGlvbg==","IF86","X3Jt","RnJhbWVidWZmZXI=","ICcmIw==","IEZhbGxvdXQ=","SXNSZXF1aXJlZA==","IFBlYXJzb24=","IEZBQ1Q=","IHJlbGll","CWJveA==","IFNoZXBoZXJk","IFdpa2lMZWFrcw==","IENvbGxlY3Rvcg==","IHJlc2l6ZWQ=","bWV0aG9kTmFtZQ==","IGV2ZW50VHlwZQ==","IEF0aGVu","RGVzY3JpcHRvcnM=","IGJlcnM=","LW9wZXI=","IEluaXRpYWxseQ==","5aE=","X0JUTg==","ICAgICAgICAgDQo=","w6Fi","X2NhbXBhaWdu","X3dhdGNo","Rm9yZA==","LWRhdGVwaWNrZXI=","IHZpc2M=","IHNhdHU=","X3Ntcw==","IGNvbnRhZG9y","LXN2Zw==","IERPSQ==","JGFyZ3M=","IGtub2I=","LkJPTEQ=","IGRlYmF0ZWQ=","aW1ncw==","c29ja29wdA==","dHJ1dGg=","IEZlZXM=","IGhXbmQ=","X2Zvb2Q=","IGFicmFz","IG5vdGlvbnM=","IFRvZA==","OmNyZWF0ZQ==","IENvbmZsaWN0","VXN1YXJpb3M=","T1RPUw==","IG1zbQ==","S0hUTUw=","KFso","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA=","IH1d","d2l6YXJk","IG1pZW50cmFz","IGRhdGFMaXN0","IGVtZXJnZXM=","xINuZw==","LlJlYWRJbnQ=","UEdB","SUxMSVNF","SUVudW1lcmF0b3I=","KHR1cGxl","Q2hyaXN0bWFz","TG9va0FuZEZlZWw=","b2dlbmVyYXRlZA==","ICMKCg==","Y29udHJvbGxlZA==","IGV4cXVpc2l0ZQ==","IGFjZXN0","UmVhZFdyaXRl","R2Fpbg==","44CN44CM","IGNvcHlyaWdodGVk","IGRvb20=","LlRhYmxlTGF5b3V0UGFuZWw=","IERvcnQ=","IGNoaWxp","IHdlcms=","IEVWRU5UUw==","IEJlYWNvbg==","IHNoaXBtZW50cw==","IHNlYmFnYWk=","dXBvbg==","dXRvbQ==","LmNvbnZlcnRlcg==","LkRyb3BUYWJsZQ==","PXt9Cg==","Zmlj","fgoK","IGxlc2JpYW5z","X25h","Rm9yZWlnbg==","CXRoZW4=","L21z","IG9yaQ==","Z2V0UHJvcGVydHk=","CXNucHJpbnRm","aGVzaW9u","44Gk","In0sIg==","IGFjcnlsaWM=","UGVycw==","QEVuYWJsZQ==","SXNs","KENhcmQ=","LlN0YWNr","TGljZW5zZWQ=","X0dVSUQ=","OnRpdGxl","IGh1c3Q=","IHByaW5jaXBhbFRhYmxl","YW5pdGl6ZQ==","L2VtYmVk","IGVuc3VyZWQ=","IEVHTA==","2YjYsQ==","IOWIhg==","LywK","IGZ1bmRyYWlzZXI=","S2V5TmFtZQ==","IG1hcmNoZWQ=","X1ZBTFVFUw==","IFNjZW5hcmlv","IG1ldGlj","X2Fzc29jaQ==","IFBhc3Rvcg==","CQkJCQkJCQkJCQkJCQkJCQkJ","ZXJhdGU=","IGludml0YXRpb25z","cXVvaXNl","IGJsYW1pbmc=","IGRhcmluZw==","VU1NWQ==","IHJpY2hlcg==","ZW1ha2Vy","IElkZW50aWZpY2F0aW9u","IOyduA==","IEJpbmRpbmdGbGFncw==","Y2hhcw==","IHJlc2lsaWVudA==","X3Bn","IHJlbGVn","IElSQQ==","U1RF","IHRyYWN0b3I=","LWxvYWRpbmc=","IFByZXZpb3VzbHk=","IFZhY2M=","L2Jl","IG7DpXI=","IHVybGVuY29kZQ==","IE5vcmZvbGs=","LlJlbGVhc2U=","IE5ldXRyYWw=","5Lit5Zu9","IEFybGluZ3Rvbg==","IGFsbGVnZXM=","IFdyaXRlcnM=","VGVzdGVy","IFJhbGx5","IGPDoQ==","CVByaW50","IOKHkg==","IFVzZXJDb250cm9sbGVy","IFNlZWtpbmc=","LlZBTA==","TGlzdE5vZGU=","X2Zm","IFBoaWxsaXA=","RkFDVA==","IGNhcmFtZWw=","IE11bHRpcA==","IENvbXBhcmVk","IFNlcmJpYQ==","n7M=","IHJldml2ZQ==","IEthbnll","IHZlcmdl","IEJ1bGdhcmlh","Z2V0Qm9keQ==","IHw+","Y2VwaA==","LkRhdGVUaW1lUGlja2Vy","LiI7Cgo=","IFRpZQ==","LGl0ZW0=","IG1lbm4=","R2Fz","b2NoYQ==","X3ZpcnR1YWw=","IG1hc3RlcnBpZWNl","X3NlcXVlbmNlcw==","TFRF","IFN1Ym1pc3Npb24=","Q2FsbGVy","JFw=","U3BvcnQ=","YWd1cw==","Q29uc3RyYWludE1ha2Vy","IGNvbG9j","IHdpZw==","INCj","CUFycmF5","TG9va3M=","IEdUQQ==","LnN0ZXBz","YXRjaGV3YW4=","X3Jhbmdlcw==","ZXh0QWxpZ25tZW50","IEJyZW5uYW4=","IGFic3RyYWN0aW9u","dWxlckFuZ2xlcw==","Lm1pc2M=","IGFudGlib2RpZXM=","IGV4cG9uZW50aWFs","IENIQU5ORUw=","ZXhwZW5zZQ==","J3k=","IGRldGVjdGl2ZXM=","IHB1cnBvcnRlZA==","WVNURU0=","IHJhZGlvYWN0aXZl","IExhdGluYQ==","LkVuY29kaW5n","LlRBRw==","eGlu","RGVncmVl","dXJhY2lvbg==","cHJpY2Vz","IFJlZmVyZW50aWFsQWN0aW9u","IHJhcml0eQ==","IHBpbGVz","Z2VuZGU=","X3Byb2plY3Rz","X2dsb2JhbHM=","LnN0YXJ0VGltZQ==","IOq1rA==","U0VDVElPTg==","X3B1Ymxpc2g=","RmF1bHQ=","RERM","X3ByaW9y","TW9t","IHRoaWNrZXI=","IHNlcXVlbGl6ZQ==","IGVzc2VudGlhbHM=","c3RyYXM=","aW50cg==","PigoKQ==","Lm1hbmFnZW1lbnQ=","ZWls","6Zet","QXdhcmU=","LkNpdHk=","IEFyYml0","X0RN","X2tleWJvYXJk","TE9iamVjdA==","LXdlYnBhY2s=","IE5ld3BvcnQ=","IHByaW5jaXBhbENvbHVtbg==","bGVnYW50","IHBhbGxldA==","IGZyYWN0dXJl","IGdtYWls","Lk1ldGE=","QWJvdmU=","LktleUV2ZW50","aml0","X21hY3Jv","X1BVU0g=","4bup","L2NvbnRyb2xsZXI=","5Yqg6L29","IHN1cGVyZmljaWFs","ZXh0ZXJpdHk=","IG1lbnNhZ2Vt","V2luZA==","aXN0b24=","Lm9wZW5hcGk=","0LjRgNC+0LI=","IFNlcmlhbGl6ZXI=","dWN0aXZl","IHphcg==","UGxhY2Vz","LlN0YXRpYw==","QmE=","IGluYWR2ZXJ0","IEluZG9uZXNpYW4=","X0lQVg==","KGhvcml6b250YWw=","IGdldFRpdGxl","aWRlcHJlc3M=","IENvbnNvbGVDb2xvcg==","aXBlcnM=","JG91dA==","IGZlc3RpdmU=","IGV2ZW5pbmdz","LkdldERhdGE=","dWl0a2E=","IE1hbnVhbHM=","dXNzZWQ=","X01heA==","LkNoYXQ=","IEFpcmNyYWZ0","PWNvbQ==","Rk9VTkQ=","YXBybw==","IHRyZWFzdXJlcw==","X2FsaXZl","IGdhZGdldA==","ZWtpbmc=","QnV0dG9uRG93bg==","QnJvd3NhYmxl","LlBFUk1JU1NJT04=","UEFTU1dPUkQ=","IEhBU0g=","ZsOp","XFRlc3RDYXNl","TE9TUw==","b3RoZXJz","LEo=","IGFzc2hvbGU=","d2Vyaw==","IG3Dow==","Lmll","ZXZpbA==","a29udGFrdGU=","Ly8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8K","PXN5cw==","CWxvY2s=","LS07Cgo=","X0ZVTg==","RmlsbENvbG9y","w7Nh","cHJlbmQ=","IGNvbXByZXNzb3I=","TW90aGVy","IEFyY2hlcg==","LmdvdG8=","IHfDvHJkZQ==","IGJhbWJvbw==","77yO","IFRyZWVz","IGJ1bXBlcg==","IHNhdXNhZ2U=","IEVsYXN0aWNzZWFyY2g=","IGhvcml6b250YWxseQ==","IEd1bA==","SW1tdXRhYmxl","IGxvc2Vy","IGFib3J0ZWQ=","LWRlbW8=","IEhhdGNo","IHVuZGU=","IHByb2Nlc3Nv","LWNhbGw=","SW5jb21l","5YM=","X3JldHVybnM=","J10uIic=","KHN3","Q0JT","YW1pbGllcw==","IFlvdXJzZWxm","IEhvbHQ=","Lk1PTg==","4KeH","0YjQtQ==","YW5vbg==","IEZvbnRBd2Vzb21l","cHJvZHVjZXI=","anI=","IG1hdQ==","CWludGVy","IGRpc2hvbmVzdA==","IG1hZ25h","IENvbGxlY3RpdmU=","IHZyYWltZW50","IGNob2l4","c3RheQ==","IHdlbGRpbmc=","cmlzaW5n","LG1pbg==","IEZhdGU=","Z2xvYg==","UkdCQQ==","IGRldHRl","VmVu","IGVtYmFycmFzc21lbnQ=","LkRFTEVURQ==","Z3JlZ2Fy","LXJlbmRlcg==","KGJ1Y2tldA==","Ij4KCgo=","LndhaXRLZXk=","QnVzeQ==","IGRpZmZlcmVudGlhdGlvbg==","IENTVA==","LkNvbnN0YW50","IGxpbmVOdW1iZXI=","KG1hdGNoZXM=","IHdlYnNvY2tldA==","IGJhcnJlZA==","IHB1ZWRlcw==","TW9ubw==","Q09SRQ==","SUlE","ICAgIA0KDQo=","IHDDumJsaWNv","bGVhbmluZw==","IGNsZWFuc2luZw==","IGNyaXM=","IERldmlscw==","X1NFVFRJTkc=","dW50YXJ5","Lik7Cg==","CiAgIAo=","W2N1cnI=","dHN5","IEFsZXhpcw==","cml0ZWw=","IHBldHJvbGV1bQ==","LnByZXByb2Nlc3Npbmc=","bWF0dGVy","Rm9yUmVzdWx0","LWxpY2Vuc2U=","IHRyYXZlbGxlcnM=","IERpc3BhdGNoZXI=","ZW5uaWZlcg==","IGRpZ2VzdGl2ZQ==","UEVE","aGliaXRpb24=","TUFTQ29uc3RyYWludE1ha2Vy","IFdhdHQ=","QmVuZWY=","LnNldFZpZXc=","ZHRv","VEVF","IFBlbG9zaQ==","X0VYVFJB","IG1lZGFscw==","eGhy","Zm9yZWNhc3Q=","IG5hcmdpbg==","b3Vucw==","LWZpbGw=","X0NVUlNPUg==","IHN1cGVydmlzZWQ=","IHR1cmY=","IEVkZ2Fy","UE9TSVRJT04=","IGNhdGVnb3J5SWQ=","4ok=","X0VS","4bunYQ==","U2hvd24=","Lmxs","X1BPTElDWQ==","KCksJw==","IFByZXY=","IFN0cmluZ0ZpZWxk","CUdsb2JhbA==","YXNzZWQ=","VGhyb3VnaG91dA==","b3N0cmluZ3N0cmVhbQ==","LmF3dGV4dHJh","IHNsb3Blcw==","IFNlcXVlbnRpYWw=","IGdpb3Ju","IHplbGY=","IHZlcnNhdGlsaXR5","bGVuZWNr","LmNnaQ==","IGRvdWJsaW5n","IEJhbmdrb2s=","IGJ1dXJ0","IHVzdcOhcmlv","c3R1ZGlv","IGpldW5lcw==","IG11dGVk","IGlwcw==","X2ZyYWN0aW9u","JiYo","IHN0dW50","Jyk7Pz48Lw==","IExpZ2E=","IHF1YWxpdMOp","QXNzaWduYWJsZQ==","IHdvcmthcm91bmQ=","IHNwdXI=","IHNsZXc=","X0dF","IEFncmljdWx0dXJhbA==","IHJlbGVudGxlc3M=","KFF1ZXJ5","IFNlY3Rpb25z","IHJldmlld2Vycw==","UmFpbg==","ZGxn","YXNzZXJ0RmFsc2U=","IG5vbWluZWVz","X18pLg==","LmR5bmFtaWM=","IFBCUw==","Q2hhbmdpbmc=","IHNsaWdodGVzdA==","IE1hbmc=","fT4NCg==","IGV2YXBvcg==","YmFibGU=","IFBSSUNF","IOaz","bHVjZW50","IHZhbXA=","IFRlY2huaWNpYW4=","IHVuaXF1ZW5lc3M=","TWVz","dXJiYW4=","LnBhcmFtZXRyaXpl","IFJlcGxheQ==","U2Vzc2lvbnM=","ZW1icg==","LUFtZXJpY2Fucw==","X1BST1hZ","IHBpYW4=","IHRyaWU=","IERlc3RydWN0b3I=","R2FtZVN0YXRl","IElNRg==","Y2hpbg==","IHBvcnRl","IFN3YWw=","5Z+O","U3Vic3RyaW5n","aW1pbmc=","L0xpYnJhcnk=","IGZyaWdodGVuZWQ=","d3JpdGVz","IHJlY3Vyc29z","YXJSZXN1bHQ=","X0lOSVRJQUxJWg==","IEJhZGdl","X2NyYw==","RWlnaHQ=","IERJU1RJTkNU","IHRocm8=","QFhtbA==","IExlZ2VuZGFyeQ==","LXR3aXR0ZXI=","X2Vhc3k=","ICsrKw==","KERBVEE=","LkxvY2FsZQ==","IGvDpA==","IG51cnQ=","IGNydWlz","X2lvcw==","IHNlbnNpbmc=","X0xpbmU=","CiAgICAgICAgICAgICAgICAgICAgCg==","cG9uZw==","b2xlb24=","IHdpbGRjYXJk","55So5oi35ZCN","IGJlZ2dpbmc=","Um9k","IMOO","X0NFTEw=","UmVzZWFyY2hlcnM=","LnNlbGVjdG9y","X2luZw==","IGFzcGlyaW5n","IGltbW9ydGFs","IHltaW4=","X3JvYm90","IHBsdXI=","QlRD","IERJRA==","IHBpZXJjaW5n","KnU=","X0RFRklORUQ=","IFRoaQ==","aXRhaXJl","KG1lZGlh","LW9ucw==","IGNoZWZz","ICIqLg==","L0FQ","IHJhem9y","IHNlYXJjaERhdGE=","ID0m","IOOAgg==","IG1vdXJu","dGluZ2hhbQ==","IG9saQ==","IFZlcm5vbg==","X1JT","nuaApw==","IGbDoWNpbA==","YW5nZW4=","Y2VsYWlu","IGFpbA==","bGVzdA==","IFFDT01QQVJF","Z2Fpbg==","IM61","IEtvYg==","IEZhdWx0","X2NvbmZpZ3M=","57uT5p6c","Lis=","Y2FsYXI=","KGNvbG9ycw==","TXVs","X0FSVA==","IGV4cGVyaW1lbnRpbmc=","ZXJtZW4=","IEFuZ2xv","LkZpeGVkU2luZ2xl","U2Vh","IGN0eHQ=","LnNsaWRlcg==","Q29sbGFwc2U=","R3JleQ==","IGZsZA==","LXByb29m","LmNhcGFjaXR5","Z2V0UGFyZW50","IENvbXBsaWFuY2U=","IGJ1cmds","LXJlYw==","IG92ZXJ3cml0dGVu","TVU=","IHJvdXRlcnM=","CU1vZGVs","IGZhbnRhc2llcw==","YXZpYW4=","X3ByZWM=","IFNjYW5kaW4=","IC8vPA==","L29jdA==","IGNlcmVtb25pZXM=","TW9udGhz","dW5keQ==","IHF1ZWQ=","IE5vdQ==","IFZpYnI=","LnJnYg==","IGNpdHJ1cw==","IGJyYWNlcw==","LXVwcGVyY2FzZQ==","Z2V0VGFibGU=","IGRvcG8=","IEtlcnI=","X0NISUxE","LWNsb3Vk","CU1hdHJpeA==","IGdhcmRlbmluZw==","U2luZw==","YWxtb3N0","UmVxdWlyZW1lbnRz","dWd1YXk=","KFByb3BlcnR5","c3Vic2NyaWJlcg==","RkFTVA==","cmVhY3Rpb24=","KGxw","KX0pCg==","YCku","LndhbGxldA==","X2V4Y2hhbmdl","Lk1heGltdW0=","IFZlcmI=","4pSB","KCk8","77ybCg==","Uk9U","Q0FSRA==","dWJpdA==","e0A=","X2tlbA==","IFRvb2x0aXA=","TXlTUUw=","TWFpbkFjdGl2aXR5","YXJm","IG1hbGlnbg==","IHNlaW5lbg==","YXBpc3Q=","IDwl","TWV0aG9kSW1wbA==","TWls","IE1pY2s=","LmRlcGVuZA==","PElE","IHByZWRpY3RpdmU=","IEFQUExJQ0FUSU9O","bGVm","ZGltZW5zaW9ucw==","IGNvbm9jZXI=","L2NvbmY=","IFRyYWN5","Rm90bw==","X3JlbWFpbmluZw==","PWZpbGU=","IHBhZ2VJbmRleA==","IFBhcmlzaA==","IHRleGFz","IE1BR0lD","IEhldw==","ZGlmZmVyZW5jZQ==","IGFsdHVyYQ==","Y3Vt","CWRhdGFUeXBl","IGNhcmFjdGVyZXM=","YXZpb3Vycw==","IFZPSUQ=","6L+R","UFVCTElD","Qmlv","IHN0cmluZ0J5QXBwZW5kaW5n","UGFyc2VFeGNlcHRpb24=","IFN1ZmY=","IE5vcnRvbg==","L2RldGFpbHM=","Lm51bGw=","Pj4m","CW9r","LWxvdw==","LnVzdWFyaW8=","bmVzdGVk","WEI=","T1VSUw==","LkJvcmRlckNvbG9y","IGJyb3c=","INCV","Y29ycg==","IFJlZHNraW5z","LmdldFRhZw==","LmdldFRyYW5zYWN0aW9u","IHN0aWdtYQ==","aGFyZHQ=","IFBsYXllclByZWZz","YWxzeQ==","dWNzb24=","TGFuZ3VhZ2Vz","IE9saXZpYQ==","IHRhYw==","IGJsaQ==","IGNhdmFs","IGNvbnNvbGlkYXRlZA==","IHBlcmls","IGRlbGU=","IGZvcm11bGF0ZWQ=","IGhpZ2h3YXlz","LnNwYXdu","PT0k","IE5pZXQ=","IHZlZ2dpZXM=","eXBv","LXJ1bGU=","IFZpZQ==","L2VwbA==","IGVuZmFudHM=","c3RyaW5nTGl0ZXJhbA==","IHRvdWdoZXN0","YnV5ZXI=","IGNvdmFyaWFuY2U=","IGlsaQ==","IFNvcGhpZQ==","IEJBQg==","ICIpLA==","IFVr","Y3VycmVudEluZGV4","X3VzZXJkYXRh","LmNvZGVj","IFB1bmphYg==","IFNOUA==","bG9s","YWR2YW5jZQ==","IGNvbWZ5","SnNvbklnbm9yZQ==","IGZhc2hpb25hYmxl","IElDT04=","IG9yYQ==","IFByaWNpbmc=","PG51bQ==","IElSQw==","RVJW","IE1laW4=","IElEaWN0aW9uYXJ5","QURPVw==","aXNOZXc=","IERldm9u","YXRs","KHJlcXVlc3RDb2Rl","CVByZXBhcmVkU3RhdGVtZW50","SU1QT1JU","IG1hcml0YWw=","X1NFTEVDVEVE","Z2V0UmVzcG9uc2U=","YXJEb3du","QlY=","aWJOYW1l","IFBBVENI","w6TDpG4=","IGRhYXI=","IEZpbGVNb2Rl","IG1hcnR5","LlNwcmluZ0FwcGxpY2F0aW9u","Y2VuZQ==","YW1wb2xpbmU=","Z2V0U2l6ZQ==","UmVzdGFydA==","5pWI","LnByb2plY3Rz","IEV0aGlvcGlh","IHN0YXR1c2Vz","VElPTg==","KGJn","IFh1bml0","VGVtcG9yYXJ5","IEVuZ2FnZW1lbnQ=","IHhm","IHByb3hpZXM=","IGdlbmVzaXM=","UGFnZXJBZGFwdGVy","IFNsYXZl","IHN1bmdsYXNzZXM=","IENobG9l","IGtvamk=","YWRlbQ==","CUpTT05PYmplY3Q=","zrM=","IGhvcnM=","Knc=","w7Ny","ZXNjaA==","IGNyaXRpY2lzZWQ=","emlhbA==","IFNhbGVt","LlZlcnRpY2Fs","IFJhc2g=","PkU=","dGVyaW5n","L3NjcmVlbnM=","IGhlaWdodGVuZWQ=","0LDRgNGC","QXV0aG9yaXRpZXM=","X2Jib3g=","w7xuc3Q=","LmZvbnRTaXpl","IEJPT0xFQU4=","ZGl2aWRl","IFNsb3Zlbg==","dWNlcg==","2ZI=","c3R1Yg==","IG5hdmlnYXRpbmc=","OmFuaW1hdGVk","X05PVw==","X3ZlY3Q=","fXsK","QCg=","IHRlbGVjb20=","IGNvbnRyYWN0aW5n","IEFzc2FuZ2U=","IGV4dHJhY3Rpbmc=","IGdyw7Y=","Y29icmE=","LkRJUw==","IGNyYWI=","IHR3aXRjaA==","IHZlcnRz","IHJlamVjdHM=","CWZvcm1hdA==","IHJlZ2VuZXJhdGlvbg==","LlN5cw==","c29sdmU=","CWRpYWxvZw==","c2hp","bWV0ZXI=","KGJlc3Q=","dmFsaWRhdG9ycw==","IG9ud2FyZHM=","IGd1cnU=","IG1vZGVyYXRvcg==","b3dpZWQ=","ZXhwZXJpbWVudA==","cnVi","IG1xdHQ=","IENhdWNhcw==","IG5hdGlvbmFsaXNt","IG1hbmdl","CUltR3Vp","L0VkaXQ=","IGluaA==","IGludGVsbGln","ZXJva2Vl","CWV4cG9ydA==","IGRpc2NyaW1pbmF0ZQ==","c3VidHJhY3Q=","IE1vb2RsZQ==","ZW5zZXI=","IEd1aWRlcw==","UkFQ","LWhvdA==","X2dycA==","LnBpY3R1cmU=","WEE=","IGluaXRWaWV3","X0NvbW0=","IG92ZXJkb3Nl","ICsKCg==","IFNpbGVudA==","c2hvd3M=","IGludGVycG9sYXRl","Rm9ybWF0aW9u","IGJpc2M=","bWFya2V0cw==","KFND","WmU=","IE5ldHdvcmtpbmc=","IGFkcmVuYWw=","IEd1bnM=","ZXRlb3I=","RGVjbGFyZWQ=","b3JnZXRvd24=","IGthcmVuYQ==","L3Bhc3N3b3Jk","X2FkZHJlc3Nlcw==","SVRFUkFM","QnV6eg==","IENvbndheQ==","KGNhc2U=","UFdE","aGVpcm8=","KGFjdA==","KioNCg==","KCkpOwoKCg==","IGFudg==","IC4uCgo=","KE1lbnVJdGVt","KG1haWw=","X3NlY3Rpb25z","CW5ldA==","IHBsdXQ=","IHdyZW5jaA==","L29iamVjdA==","IElzdA==","IFZJUw==","L3B1Yg==","YWx0ZW4=","IGd1aXRhcnM=","IGFudGliaW90aWM=","77yW","wrk=","ICIrIg==","Zm9ybXVsYQ==","IGJhYmVz","IFByb21wdA==","IGVuaW0=","L3BsYXllcg==","CXJlZg==","IGJ5xIc=","IGNvbnN1bWVz","IEhhc3Q=","IFRhbw==","ICcpKQo=","IGNsYW0=","IHRoaWdocw==","IG1vdGlm","QXBpT3BlcmF0aW9u","IFdM","Z2V0Qw==","CWZsYWdz","b2ludG1lbnRz","IGVjb25vbWljYWw=","bmVlZGxl","eGxz","cHJhY3RpY2U=","dXR6ZXI=","dGltZW9mZGF5","LW91dHB1dA==","IGZpbmRCeUlk","IEJ1ZGR5","0J7Rgg==","U2V2ZW4=","IEJhcms=","IGVudm95","X2FsZ29yaXRobQ==","5Yip","IGJhbGxpc3RpYw==","56e7","cmFkZXM=","CWRvYw==","cm9kdWNpbmc=","IEVhdGluZw==","VW5tb3VudA==","L2RhdGFUYWJsZXM=","X2JvbnVz","IGxpdHQ=","cHBz","KWxvY2FsT2JqZWN0","cGVyZg==","IEhlbHZldGljYQ==","c2h1dGRvd24=","L21s","LnRva2Vucw==","IEhhcmRjb3Jl","LHJvdw==","L2Jn","U2NhbGVy","4oCUYXM=","X2xvZ2l0cw==","4oCZaW50","CUFwcA==","SW1wbGljaXQ=","LkZwcmludGY=","RVRP","IHRlcnJh","IHBvc3Nlc3Npbmc=","LnJzdHJpcA==","LCks","PXllcw==","IFN0cmlwZQ==","Pz0=","bmV1dHJhbA==","Lmdvb2Q=","IGtlbm5lbg==","IFN1bmc=","ZmF1bHQ=","eXN0YXRlY2hhbmdl","Q2FuYWRpYW4=","JywnIi4k","IE1pdHM=","w6ZuZA==","IFNUUlVDVA==","IFVSTFdpdGhTdHJpbmc=","IENvbXBhc3M=","IC0tCgo=","IE5TTGF5b3V0Q29uc3RyYWludA==","fG1pbg==","LWFkanVzdA==","IHJlYnVpbHQ=","TElHSFQ=","L3Nl","LW1vdW50","dnBu","dmFsaWRhdGVk","KFFPYmplY3Q=","IGlnbml0aW9u","IENoYXJnZXJz","UllQVE8=","XWluaXRXaXRoRnJhbWU=","IEZsdWlk","IGNhZHJl","IG5vbWluYXRpb25z","TmVpbGw=","IEhvdQ==","IGN1cnJlbnRz","X2dlbmU=","KGlucA==","UGFyaXM=","esSZ","YWdncmVnYXRl","IGFzc29j","d2VldGVk","ZXJyYXQ=","4oCTCgo=","ICcvJywK","Zml4dHVyZQ==","IEhpZ2hlc3Q=","YW1iaWVudA==","IGNobW9k","IGNvbnRl","IHNlbnN1YWw=","IGdhcm1lbnQ=","emVycw==","IFBvd2VyZWQ=","ZG9tYWlucw==","UmV3YXJk","aW9tYW5pcA==","IGNvY2twaXQ=","b3V0ZmlsZQ==","IGJ1aWx0aW4=","IGluc2lzdGluZw==","LnZhcnM=","emlwY29kZQ==","IO+/ve+/ve+/ve+/vQ==","ZmFpbHM=","IGNvbnNvbGlkYXRpb24=","X29pZA==","UGxhbmV0","ID0iLA==","CWVs","VUlMVA==","w6R0eg==","YWZhcmk=","IE1jQ2w=","VGltZWxpbmU=","RXN0YQ==","IGZyYW0=","WUU=","IGNlcmVicmFs","T2ZNb250aA==","IFByZWdu","INC60LvQsNGB0YE=","ICAgICAgICAgICAgICAgIAogICAgICAgICAgICAgICAgCg==","IEZyZXM=","QXBwcm92ZWQ=","LlNwZWNpYWw=","IFByb3Rlc3RhbnQ=","IGFsbGVyZ3k=","X3BjbQ==","CUNvcHlyaWdodA==","IHN1cGVyQ2xhc3M=","InN0cmNvbnY=","IE1vaGFtZWQ=","ICcvLw==","Rm9yZUNvbG9y","QXJ0aHVy","IEp1bmdsZQ==","IHZlaW5z","U2Fk","IGJhY2t1cHM=","IE9waW5pb24=","w7t0","IGludGVybWl0dA==","b2R5bg==","IENocmlzdGluYQ==","IGFuZHJl","IGV2YWN1YXRpb24=","cGFsZXR0ZQ==","aG9yc2U=","IFJlc2lkZW50","IEhhc3Nhbg==","Lk5pbA==","IGFpc2xl","IEdyb3dpbmc=","IGJsb2dpbmZv","L3NxbA==","X2lvY3Rs","U2NhbGluZw==","IE1vbmFk","X2NwcA==","IEh1dGNo","IEFwcGxlV2ViS2l0","RXhwZW5zZQ==","X0pPQg==","IHBvaW50bGVzcw==","RnJvbUJvZHk=","YW50YWw=","IGRlcGljdGluZw==","IENFTEw=","IHJlZmlu","IENOQw==","7LmY","X2RpbWVuc2lvbnM=","IFNBTg==","IGFmdA==","IGZvb3RzdGVwcw==","Y2NvbGk=","X1BIT05F","L21hdGg=","LWtpbmQ=","IE1lYW5z","aWNoYWVs","Lmd1bmE=","IGluYXVndXJhdGlvbg==","LWRyaXZpbmc=","KGRlbGV0ZQ==","IHRvdGFsQ291bnQ=","X01D","LkV4dGVuc2lvbg==","Q29tbWVyY2lhbA==","IHpJbmRleA==","PEN1c3RvbWVy","Imc=","LXNoYXJl","IHBhY3Q=","YWdhcmE=","IFNJTA==","X21vZGVz","IE1vbGVjdWxhcg==","IHN5c3RlbWF0aWNhbGx5","PEc=","X3Njcg==","IE9ybw==","YXNlcnM=","IGJpYw==","IGRlc3Ryb3lz","UElQRQ==","LlN0YXJ0UG9zaXRpb24=","IGPhu6dh","aXJleg==","LkJ1bmlmdQ==","X0Z1bmN0aW9u","IHPDvA==","X2Z1dHVyZQ==","IFdlYWx0aA==","IE5hdHVyYWxseQ==","5oC7","X3llcw==","IGFicnVwdGx5","U3RyaW5nRW5jb2Rpbmc=","IENHUG9pbnRNYWtl","IHpo","IGltcGVyc29u","IHBpdm90YWw=","IFNvbWFsaWE=","IHNlZ21lbnRhdGlvbg==","X0FOQUw=","IExvZ2luQ29tcG9uZW50","Q29uc3VsdA==","IHRydW5jYXRlZA==","XSI7Cg==","LmdldENvbmZpZw==","IGludGVybnNoaXA=","QmFieQ==","6rCc","IHN0cmVuZ3RoZW5lZA==","X01J","YmFza2V0","IG5pY2h0cw==","IFRWcw==","IFNoYW4=","44K1","cmFjdXNl","LlJlTFU=","L2ludGVyZmFjZXM=","IGdldEl0ZW1Db3VudA==","IHJldGlyaW5n","IHNwZWNpYWxz","IGVudGl0eU1hbmFnZXI=","YmVsaWVm","IHNvbGRlcg==","ZGF1Z2h0ZXI=","aWprbA==","IHV0aWxpemVz","LmZpeGVk","U1U=","IGRyYXN0aWM=","IGhhY2tz","Z3J1bmQ=","IE1V","IFN0YXJ0ZXI=","LkNvbXBvbmVudHM=","X21vdG9y","R29sZGVu","IGxvZGdl","ICkpOw==","IENvcmludGg=","0LjRh9C10YHRgtCy0L4=","w7NuaWNv","Z3JlU1FM","IEZsdWVudA==","IG1hcmM=","LkxvYWRTY2VuZQ==","Lkdyb3Vwcw==","IGVyaA==","IEF1dHVtbg==","U3RvcHBlZA==","IGl0YWxpYW5v","IG1pbmlvbnM=","IEFzc2VydGlvbnM=","IG11eA==","QnU=","IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQ==","CXVw","cmVhZHlzdGF0ZWNoYW5nZQ==","X01ldGE=","IGN1cnJlbnREYXRl","IENoYXBtYW4=","VW5kbw==","U2Vhbg==","YXBy","IHBhcm0=","X2ljb25z","IFN0YQ==","w6F6","IHN1YmRpdmlzaW9u","IGFsdGVyaW5n","UE5H","cG9uZW50aWFs","IHBvc3RncmVz","IEJEUw==","LWV4aXN0ZW50","IEJyYWRmb3Jk","IE9NWA==","X1dISVRF","X1BST0dSQU0=","cWM=","IHR5cGluZ3NTbGlua3k=","IFBpY3M=","X01FVEE=","SVRURVI=","X3N1YnNjcmlwdGlvbg==","SVJPTk1FTlQ=","IEh5dW5kYWk=","KCk7CgoKCg==","INiz","IGphYw==","IGVsaW1pbmF0ZXM=","KX0pOwo=","IGNvbXByZW5k","CWluc2VydA==","X2ZhY2Vz","Ij4k","IGViYXk=","IGNhcHRpdmU=","cGxpYW50","IENhbGN1bGF0ZXM=","b2x0YQ==","ZXN0aW5n","X3JldmlzaW9u","IG3DunM=","K20=","IiwiIiwi","V0hBVA==","IGNvbXBhc3Npb25hdGU=","aGFyZ2E=","W3JhbmRvbQ==","IG1vZHVsbw==","KHNu","IG9jY3VwYXRpb25z","Ly8vLwo=","CWJvYXJk","IEJhbGs=","d2nEhQ==","IFdpZmk=","LlByb2ZpbGU=","Om1hag==","CW1hdA==","TE9DS1M=","KGpCdXR0b24=","ICgnJA==","TXVy","5oyJ","YmJsZQ==","IGZyb2c=","LWhpZGU=","IGJyb2FkY2FzdGVy","4Lie","aGFsZWQ=","IGFtdXNpbmc=","X3ByZWRpY3Rpb25z","X2ludHI=","IGVhZ2xl","0LDRgtC10LvRjA==","IGdldExpc3Q=","cHNpbG9u","IGNoYXJhY3Rlcml6YXRpb24=","QVJEUw==","IHJlbG9jYXRpb24=","IHJ1bGVycw==","UEFZ","IERlZmluaXRlbHk=","X0FjdGlvbg==","IGNsb3N1cmVz","IGZhY3R1YWw=","b2R5bmFtaWM=","IHByZWNhdXRpb25z","bmllag==","IFBhcnRpZXM=","IFN1YmFydQ==","IGNvdXNpbnM=","YXJiZWl0","Lm1vbmV5","Z3VudGE=","KGFuZA==","Z2V0aXRlbQ==","LlN0eWxlUHJpb3JpdHk=","IHNsaWQ=","c2luZ2xldG9u","IGdhcm4=","IFBBUw==","IGRheno=","YcW8","IGJvZ3Vz","IE1vZw==","IHJpdmFscnk=","aXNvbA==","IGxhbmRtYXJrcw==","w7Fhcw==","QmVybg==","IFNhY2hz","ICIpCgo=","IGhvc3RpbGl0eQ==","X21leA==","bWVyZQ==","TW90","cGljdHVyZUJveA==","RGVmZW5zZQ==","IGFmZmlkYXZpdA==","b3RoZXJ3aXNl","LmRpcmVjdG9yeQ==","X1VuaXR5RW5naW5l","LWJsb2c=","LnNraW4=","cGhlbQ==","QXBlbGxpZG8=","ZXJjaGFudA==","W2NsYXNz","IHdhcnQ=","LiJb","YWxldXI=","L2JhY2s=","ICAgIAkgICA=","IHByZWNpcGl0YXRpb24=","IG9ic3RydWN0aW9u","IHBPYmo=","IHJ1cHQ=","VUNLRVQ=","YXll","5o6S","Z3g=","IGVjbA==","IHNlY3JlY3k=","L0hlYWRlcg==","IExlc2I=","IGxlaQ==","IEJ1bGxldGlu","IGdpdmVhd2F5","LkhvbWU=","X1JPT00=","Ilc=","IGNvd29yaw==","X3Jh","IEN5Y2xpbmc=","IFBhdw==","IHB1cGls","L2FyY2g=","IEZpbGVVdGlscw==","6aaW","cnNw","IGZyZWVkb21z","IExlYXI=","fWApLg==","IGJvd2xz","L2Jsb2Nr","X2xvZ2dpbmc=","IG1ldGhhbmU=","IGhvcm5z","IHdvbmRlcmZ1bGx5","IGFsdGVyYXRpb25z","IGV4aWxl","bHNlbg==","X3BhdXNl","X0xBTkdVQUdF","IFVTREE=","X215c3Fs","X0FNT1VOVA==","IExJRkU=","IHlvdW5nc3RlcnM=","IHJpb3Rz","W0U=","IHVuZm9yZ2V0dGFibGU=","LH0sCg==","RGlzcG9zZWQ=","IEFzc2Fzc2lu","VU5H","IE5ld3Nw","VXNlclNlcnZpY2U=","OmFsb2Fk","Kycs","IHNldHRsZXJz","IHNjcmVhbXM=","IGluY29udmVuaWVuY2U=","LlJvdGF0ZQ==","IGphcnM=","IFB1enpsZQ==","IG1lc3Q=","YXJzaQ==","IFNoYXJtYQ==","fCg=","LmRz","IFNhY3JlZA==","X2V2dA==","IGV4cHJlc3Nlcw==","IGhvY2g=","IER1Y2g=","LmNhbGxz","dGhy","IFNoZWZmaWVsZA==","LkFsZXJ0RGlhbG9n","IHJhZGljYWxseQ==","IHRyb3Vz","IHByZXZhaWxpbmc=","IFdXSUk=","4oCZbg==","ZW5zZWx5","IFllc3RlcmRheQ==","IFNpcml1cw==","IGtpbGxlcnM=","IEZGVA==","IG92YWw=","Jyk6DQo=","IOygleuztA==","b3VyYWdl","IENoZWNrYm94","V29ya2Jvb2s=","LmRlZmVy","X2Zsb29y","IGNvdW5jaWxs","IG5vcnNrZQ==","bW9pbA==","b3JlYQ==","IG1hcmtldGVk","X1NVUg==","eEFB","IHN0YWluZWQ=","ZXV0","IE1lbmc=","IGllZWU=","LmV4dGVybg==","ZWdpZQ==","IHJhcHA=","IFB5b25neWFuZw==","J2NsYXNz","TW9i","IGluaXRpYWxWYWx1ZQ==","X3dhdmU=","IGphYg==","IG1hc2N1bGluZQ==","IGFtcGxpZmllcg==","IHR0eQ==","UGF0aENvbXBvbmVudA==","X3h0","IEdGUA==","L3NlYw==","CWRpc3BhdGNo","bWFya2Rvd24=","IFNjaG4=","Ym9sZQ==","wrfCtw==","bW91c2Vtb3Zl","IGVyck1zZw==","IGFzaWdu","X21vbm8=","VG9TZWxlY3Rvcg==","IFp1","KFJlY3Q=","IEVycm9yQ29kZQ==","bGF0aW4=","YW5naWJsZQ==","dnRr","Q0dTaXpl","UG9rZW1vbg==","IGNsYXNzbWF0ZXM=","IGF0dHJhY3Rz","IFRhdHRv","dWx0YW4=","b2zDs2c=","IGhhbHRlZA==","4KSo","IEthcnQ=","IHVl","X0luaXRTdHJ1Y3R1cmU=","VGVzdENsYXNz","IEFpcmJuYg==","XyIs","IGNoYXJjb2Fs","IGlwYw==","IFN0cmV0Y2g=","LmdsaWRl","bGF0ZXNBdXRvcmVzaXppbmdNYXNrSW50b0NvbnN0cmFpbnRz","IHBvdGlvbg==","SVRUTEU=","IGNvdW50ZXJ0","X2hk","cHJlcGFyZWQ=","QWRz","IFZhbXBpcmU=","cm9ib3Rz","LkNyZWF0ZUluZGV4","U3RhdHVzTGFiZWw=","IHR1Y2tlZA==","YWbDvHI=","VXQ=","IHN3ZWF0ZXI=","X0ZO","ICAgICAgICAgICAgICAgIAk=","YXRha2E=","IGV5ZWJyb3dz","YWNvZXM=","dWRlbg==","LkxpbmVhckxheW91dE1hbmFnZXI=","IHN3YXk=","IG11bHRpbg==","KCkpKSkK","IE5TVUludGVnZXI=","IE15QmFzZQ==","UGFydG5lcg==","dXRzY2hlbg==","IENhdGVy","LnNldEJhY2tncm91bmRDb2xvcg==","IGFjY29tcGxpc2htZW50","X3Byb2JsZW0=","LmR0ZA==","IHBhZ2VOdW1iZXI=","IGphY2tldHM=","IGNyb3BwZWQ=","dWVscw==","IEhlcA==","IGNhcHBlZA==","Kk1hdGg=","X2NhbGxiYWNrcw==","IHB1YmI=","IEJydW5zd2ljaw==","LnJlc3BvbmQ=","WyJf","IGJlZGRpbmc=","aHl0aG0=","T1g=","KHNwZWVk","IHBlc3RpY2lkZXM=","IC0tLS0tLS0=","LkJsdWU=","IG5vb2RsZXM=","IEdvZXM=","IHNhdmVy","b3h5","X2NvbXBsZXRpb24=","IFN3aW5nZXI=","IGdldERhdGU=","IG1pbmRlZA==","aW50ZWdyYXRpb24=","IExvdHVz","KHN0b3A=","KCcsJyk7Cg==","IGZsb29kcw==","IFdvcmtmbG93","IGVydXB0ZWQ=","TWFjcm8=","IFNhdWNl","IGV2ZW50TmFtZQ==","XElucHV0","QnJlYWtpbmc=","CXdoZW4=","X3B3","SU5ERVI=","IFdlbGxuZXNz","IHZveGVs","IE1lbGw=","IE1FRElB","U0VOUw==","IEZ1bmRz","IE1pbGQ=","PEFycmF5","LXRoaXM=","dW1wZWQ=","L2Z3","IERiQ29udGV4dA==","V0k=","Z2lybHM=","SE9X","Jyk7Pz4K","IHRlbXB0aW5n","IHRlc3RhbWVudA==","IGJpYmxl","IGNvbnN1bHRlZA==","IEluZGV4RXJyb3I=","6KiY","IGtleXBhZA==","aXp6bw==","KG9r","IHdoYXRzYXBw","IFJlbW90ZUV4Y2VwdGlvbg==","IHRlYW1lZA==","4oCU4oCU4oCU4oCU4oCU4oCU4oCU4oCU4oCU4oCU4oCU4oCU4oCU4oCU4oCU4oCU","wrss","IGdldFRpbWU=","ZGlhZw==","aXNzeQ==","IGhlZA==","IGtub3Rz","am9t","IGZ1bm5lbA==","LW1haWxz","IGV4cG9ydGluZw==","IFZM","IEthcm4=","IEJ1ZGRoaXNt","IEFsbGFu","X1JBRElVUw==","IHdvcmRpbmc=","IEZvcmdldA==","IENvcm9uYQ==","aXBoeQ==","IGxpbWJ1cmc=","dWdneQ==","IFVzZXJSZXBvc2l0b3J5","aW1pbg==","KGVsZQ==","IGxhYmVsbGVk","56S+","IEhlcm1hbg==","LnFx","ICIpKTsK","aWViZXI=","LlRyYW5zbGF0ZQ==","cnlu","IGRlc2Vudg==","dW1k","U2ltcGx5","CW1vZGU=","UnBj","IFZhbGVuY2lh","IHN0YWZmZXJz","IHNlbHY=","IFNwaWtl","IGRlbGlj","IGVydQ==","X0RU","SnVkZ2U=","4buV","IEJhc2lu","Lm11dGFibGU=","InVybA==","IHRhcmlmZg==","IFNsZWV2ZQ==","IGZsYXJl","LmRyb3BvdXQ=","IGJyaWRlcw==","KSksDQo=","X2NvbnN0cmFpbnRz","ZGVzdHJ1Y3Q=","T3V0bGluZQ==","IGRpc2FwcGVhcnM=","X2xvY2tlZA==","IE5TTG9jYWxpemVkU3RyaW5n","Y2tl","CW51bGw=","YWRyZXNzZQ==","IHRvcHBpbmc=","IEpva2Vy","YmlzaG9w","0L3QvtGB0YLRjA==","YW5kZXJpbmc=","X2FtcA==","PXRpbWU=","X1NwYWNl","X1BVTEw=","Jz0=","IGFudGlxdQ==","IGNhY2g=","X19fCgo=","T05FUw==","0L7Rjw==","IHVucmVhZA==","LnBvbGljeQ==","b29vb29vb28=","65+s","IHVzdGVk","IFJlY2U=","IGFsbGVt","44O844K5","IFRob3VnaHRz","dmVpbGxhbmNl","aXN0cmF0ZQ==","X2xhbmU=","IGZhbWVk","LkdldE5hbWU=","IHNtb290aGVy","IFF1YWxpZmllZA==","YXplcnM=","X2dlbw==","RmF4","IE1pbmRz","IFJhaXNlcw==","IHRyYW5zY3JpcHRz","Q29udmVyc2F0aW9u","IHJlbWFya2Vk","64KY","ZGxpbmc=","IGRlcGxveWluZw==","IHNoYXJlZEFwcGxpY2F0aW9u","IGtw","Rm9udEF3ZXNvbWVJY29u","X2R1bW15","cmVpYmVu","IEphbmVpcm8=","RGlyZWN0aW9ucw==","LmdldEJlYW4=","c2Fzcw==","IGNvbW1hbmRlcnM=","dmF0aW9u","ZXJyb3JDb2Rl","IEFsbG95","LmxvY2FsaXplZA==","0JE=","IGRpc2h3YXNoZXI=","IFNvdXA=","TnU=","X0RlZmF1bHQ=","IHVuZXZlbg==","IC8+IjsK","LUJhc2Vk","IHNlYW1sZXNzbHk=","LW51bGw=","IFhD","IHN0ZXc=","KGRlbGF5","QVRPUlM=","IFdoZWVsZXI=","Ijw/","IENoYW5kbGVy","IHJldGFsaWF0aW9u","IGJ1ZGRpZXM=","LXNpemluZw==","IEVpbnM=","IC4uLiw=","cXVldGU=","IERPQw==","IGZhbHNlbHk=","IGZsYXRz","TklDQUxM","IGxpYnI=","QmVOdWxs","aW11bGF0aW9u","CVF1ZXJ5","X3V0","IHBsYXF1ZQ==","YmlsZA==","IHNjcmVhbWVk","Lm12Yw==","LldpZGdldA==","IGRpZmZlcmluZw==","L3N1cHBvcnQ=","X1ZPTFVNRQ==","Lm5vZGVUeXBl","CVdyaXRl","IHLDs3du","Ym9va21hcms=","X0NPTk4=","IENyZWVk","IGluaGliaXRpb24=","IFJlaGFi","dXZyZQ==","IGR1bXBz","b3dlag==","X3BsYWNlaG9sZGVy","IEhXTkQ=","IGRlcm1hdA==","LmRldGFjaA==","IGZpbmFsaXplZA==","Z2VyaWVz","aWRhaw==","X3Byb2c=","IHVwZGF0ZVVzZXI=","bHlz","Lkdvb2dsZQ==","IGx1ZWdv","IGFudHM=","5qCH6aKY","IERSTQ==","0LvQtdC9","LWRi","ZXJyaWNr","X2xu","Li5c","aWtpdA==","IERpZW4=","IHBhcmFtZXRyb3M=","a2V5cHJlc3M=","IEtlcmFsYQ==","IGRyYWluZWQ=","ZsO8Zw==","IGNhcGl0","X2F1Zw==","dGFudA==","TmF2QmFy","IHJvbGxiYWNr","IGxleQ==","4LiI","IEJTUA==","IFByZWRpY3Rvcg==","IHdhZ29u","ICJ8Ig==","U2VydmU=","LkRvbmU=","IER1cmNo","UHJvdmlkZQ==","CXNjb3Jl","X09E","LndlYXBvbg==","IHVuaXZlcnNhbGx5","IGluanVuY3Rpb24=","X1NDUk9MTA==","Lk1hdHJpeA==","IE1vbmdvQ2xpZW50","YnVmZmVycw==","IGJhZGdlcw==","IHNoYXJrcw==","IFNoYXJr","TU9ERUw=","LlJFQUQ=","CXRhZw==","IHN0cnRvdXBwZXI=","RVJHWQ==","Ymlhcw==","IGFjY291bnRJZA==","IEVtbWFudWVs","IHJlc29ydHM=","IHN2bg==","d2FybmluZ3M=","X0lF","TEFT","IG51bGxh","CWFz","IGRlbWVhbg==","4oCcQXM=","QXV0aG9yaXplZA==","IHRlbmRlbmNpZXM=","LXNldHRpbmc=","IHByZWxvYWQ=","IGNubg==","4oCcTm8=","JSkKCg==","PVQ=","dXN0bw==","IEZJUkU=","cmVzZWFyY2g=","INCT","IExlc3NvbnM=","LkFwcGVuZEZvcm1hdA==","IGluaXRpYXRpb24=","IENvdXM=","YXJlcg==","cHJvamVjdGlvbg==","IFNoZWV0cw==","IEZvbGQ=","UmVkZGl0","RGVsZXRpbmc=","IHphbQ==","IE5ldXJhbA==","IEZlY2hh","IMKu","IHRhc3RlZA==","IEVuZW1pZXM=","IEpvaG5zdG9u","IGRhbmNlcnM=","IGRpc2FibGluZw==","IHBldHR5","IFdlbGQ=","Ly0t","KHNwcml0ZQ==","SUdP","YXJnb3V0","IHF1YXJ0ZXJiYWNrcw==","ZGlzcGF0Y2hlcg==","IFN1c3RhaW5hYmxl","ZW5hcmlvcw==","IFNraQ==","IGZhY3Rv","aWxsaW4=","X2V4dGVuc2lvbnM=","ybU=","Pkg=","ZWFzdA==","LmFpcg==","4oCcQnV0","T2JqZWN0Q29udGV4dA==","c3VjY2Vzc2Z1bGx5","X2xhbmQ=","IGZvbGRz","X0NPT1JE","IHN1YnBv","LmdldEFkZHJlc3M=","aW5zdHI=","TWF0ZXJpYWxz","0YPRgdGC","ZGVwb3NpdA==","LWxhc3Q=","X0dSQVk=","PWZpbmQ=","IG11dGFudA==","IGxlc2JpZW5uZQ==","bGV0Y2hlcg==","Uk9VR0g=","dXJla2E=","LmNhcHR1cmU=","IGVubg==","IChbWw==","IEZsdQ==","IHRhc2tJZA==","IEh1c3NlaW4=","LmZvbGRlcg==","IGF1c3Rlcml0eQ==","SVNUUkFUSU9O","X0ltcGw=","5rOo5oSP","IGRlY3JlZQ==","LWNoYXQ=","IGltcGxpY2F0aW9u","IGd1ZXNzZXM=","dWxrYW4=","QW5hbHl0aWNz","LnBsdXM=","Q09NTUFORA==","0LXQu9C4","wrsKCg==","X1NJVEU=","IGVxdWFsVG8=","U3VwcG9ydEZyYWdtZW50TWFuYWdlcg==","IFJlY29yZGluZw==","5a6M5oiQ","IGJhZ2dhZ2U=","IHBpdGNoZXJz","IEVo","b3F1ZQ==","CWNudA==","ID0+JA==","L2Zvbw==","SVJB","IFNhdGVsbGl0ZQ==","Ym9yYWg=","IH19Igo=","IEVuZHM=","IFNwcmF5","LHBhcmFt","LkNocm9tZQ==","KnE=","dGhvdWdodA==","aWJyYXRlZA==","IHRoaWV2ZXM=","IGJlbmVmaWNpYXJpZXM=","RW50ZXJlZA==","b3R0ZXN2aWxsZQ==","IHZldGVyaW4=","QnlJRA==","cXVpcGU=","dW1wdGlvbg==","LXVuaXQ=","RXhlY3V0aW9uQ29udGV4dA==","QHM=","IEdpb3Y=","LlRvb2xUaXA=","X2ZyaWVuZA==","KGF0dHJpYnV0ZXM=","IGR1bXBpbmc=","IEpD","X0RPQ1VNRU5U","IEFybW91cg==","KGluc2VydA==","Lkhvcml6b250YWxBbGlnbm1lbnQ=","IFFlZA==","44GE44G+44GZ","L2dpdA==","IFlZWVk=","IENhcmRpZmY=","IGFwYQ==","b3JnYW5pYw==","IFdoZXJlYXM=","IOad","IE1pYQ==","IGRlbW9saXRpb24=","IHNjYXJz","IHBhaQ==","IHJldHJpZXM=","IHJx","IERlbmlz","KFV0aWxz","IGFsbGV2aWF0ZQ==","IFBJQw==","aWR1ZQ==","IGFja25vd2xlZGdpbmc=","IC8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8=","56Gu5a6a","xKs=","XEpzb24=","LmJpbmFyeQ==","IHh0eXBl","c2lnbmFscw==","IEFwcGVhcmFuY2U=","JnI=","fXM=","Q2k=","IElsbHVt","cG9yYXRl","aG9n","IGluZGV4T2Y=","XENvbW1hbmQ=","X3BhcmFsbGVs","IFNoZXJsb2Nr","7YM=","ICIiKQ0K","Ly8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8v","IGNyaXRpY2l6ZQ==","IFNvYXA=","IE1hdGNoZXI=","IGdyaWxsZWQ=","KlQ=","IGFkb3Jl","dWxsaW5n","IGplZG9jaA==","X3JlZnM=","bGVhbnVw","IEpBWEI=","IHJvc2Vz","IExpYW0=","c2l6ZWk=","IGdldGNoYXI=","IHRhcmRl","LXRvb2x0aXA=","IHF1YWxpZmllcg==","IEludGVybWVkaWF0ZQ==","X1dpbmRvdw==","IE1hbHRh","RGlzY29ubmVjdA==","ZXdoZXJl","Q2FtcG8=","IGlycmF0aW9uYWw=","bGVkbw==","IERO","QVJHVg==","IG91dHJv","IHRoaXJ0ZWVu","Sm9zZXBo","TUFS","L2ds","SmVzcw==","IFBzeWNoaWF0","IHBhZGRpbmdCb3R0b20=","LWxvb3A=","L2ZvbnRz","X3NlZW4=","VGVhbXM=","UmVhY3RET00=","KG1hbg==","KHhwYXRo","LmdldFNpbXBsZU5hbWU=","Pigq","IFB2dA==","IGVsZGVycw==","IHBpZXM=","LnVzZXJBZ2VudA==","LXJlZ2lvbg==","IEdyZWVrcw==","KGZyYWdtZW50","c3R1","IGNvdW5jaWxz","IHN0YW1pbmE=","IEdvZGRlc3M=","6KW/","IHBoaWxvc29waGVycw==","IHBlcnNvbmU=","IExvc2U=","IENMUg==","IERvY3M=","IHNvYWs=","IEhPTERFUg==","IGJlbGxz","aGFzaENvZGU=","UkFURQ==","X1dFSUdIVA==","aW5vdXM=","ZW5kcmE=","b3Bob2JpYw==","IHByb3Nl","IGZpbmVseQ==","L29hdXRo","KHNwYWNl","YWRnZQ==","IE1hbWE=","IHN0cmluZ0J1ZmZlcg==","IHN0aW50","IG1pc21h","IHZpbGxhaW5z","IENyaW1lYQ==","IGRpcGxvbWE=","INC/0L7RgdC7","IEJlYQ==","KGpvaW4=","IO2VtA==","Q0hBVA==","cGVyaW5n","IENyb3M=","IG1vbmtleXM=","IHByZWRz","eWxh","LCws","IHZpYnJhdG9y","IE5V","5YWI","ZmFudA==","emV0","IGJpZXRldA==","dW5mdA==","c3dvcnRo","LkZsb3c=","IHBzeWNoZWQ=","IENvbnRpbmVudGFs","PnQ=","IHF1aWx0","LlVQ","IGV4cGFuc2l2ZQ==","RGlzcG9zZQ==","KGxhbmd1YWdl","Q2Fwcw==","X1pPTkU=","IHJlY3ljbGU=","IE1hbmFnZWQ=","Y3VycmVudENvbG9y","LmJyb2FkY2FzdA==","c2lnbklu","LnByb20=","bGx1","dWVibG8=","IHB1bmNoZXM=","IGF1dG9tYXQ=","IGFzc2lnbmluZw==","IGNyZWF0ZVVzZXI=","IEFsbGllZA==","IGNvbmR1Y3Rvcg==","gqg=","IHNhZGRsZQ==","IGRuaQ==","b21lZGljYWw=","LVdlc3Q=","UG9zaXRpdmVCdXR0b24=","IGl0YWxpYw==","P1s=","KHRyaWdnZXI=","IGVsZXBoYW50cw==","IjoiIiwi","IGNhbGliZXI=","cmFmdGVk","ZGlnaXRz","IG1hcnNoYWw=","bWlsbGlzZWNvbmRz","bWFya2Vycw==","bW9t","L3BsYWNl","IGhvbGlzdGlj","OnQ=","Iyw=","IGJvdG8=","IG5hdXNlYQ==","IFNob290aW5n","aXRlY2g=","IHRleHRTdGF0dXM=","PENsYXNz","IERlc2NyaWJl","IGJ1ZmZldA==","Z2ls","IGxvZ2l0cw==","c3RkY2FsbA==","bW9kcw==","IFNrdWxs","IEJhcmU=","aG9wZQ==","IEludHI=","RmFpcg==","CXB0","IGFjb21wYW5o","IGZraw==","X3JwYw==","SW5zdGFsbGVk","X2Fucw==","LmdldE1pbnV0ZXM=","4oCmIgoK","LXRocmVhZA==","IHByZXNjaG9vbA==","QUlMUw==","IGRpZmZpYw==","KGNvbnZlcnQ=","IE5hdGg=","IERPSg==","IHJlZ2ltZXM=","IGVudGh1c2lhc3Q=","IHdhcnJhbnRpZXM=","IGZhc2NpbmF0ZWQ=","X2JpbmRpbmc=","X05vdA==","b2Z0ZW4=","X1JX","L21haWw=","IHRpdGxlTGFiZWw=","IHZpbGxhZ2Vycw==","IEppYW5n","IHN3YWdnZXI=","LlJvd0luZGV4","X2ltZ3M=","cmFweQ==","VkVSQUdF","LlVw","IG5vb3A=","Y2lv","CVNU","IGRlY3JlbWVudA==","IG1hZ25lc2l1bQ==","X3JvdGF0ZQ==","U2l0","IG5pZXV3ZQ==","IHRlcm1lZA==","7ZWp64uI64uk","IHVyZw==","X3RvdWNo","IHN3YXJt","IGNsYXZl","dGhlc3Q=","IExhZg==","SFg=","IEh1bGs=","IHBsYWludGV4dA==","IFNvZmE=","Z2V0U2Vzc2lvbg==","TGVk","IGVjb3N5c3RlbXM=","aGVp","IEtpbGxz","IGh1c2JhbmRz","0YXRgNCw0L0=","KGRvbQ==","X3RpbGVz","TmliTmFtZQ==","IGRvbmF0aW5n","LmFjYw==","IGxpZmVzcGFu","LmJu","X1JHQ1RY","5qU=","YW5zZW4=","IG1vZGVsbGluZw==","TGF5b3V0UGFyYW1z","IG9uQ2hhbmdlVGV4dA==","cnNh","LWxvY2F0aW9u","LlBl","KGJ1cw==","KHNvbmc=","IHByb2R1aw==","IFNIT1VMRA==","IENK","IHNvcw==","IEhvbWVDb250cm9sbGVy","LmxvYWRlZA==","KERvY3VtZW50","LnNvY2lhbA==","dGlsZXM=","IGxhbWU=","PWRm","LnBhcnNlTG9uZw==","IHByYWM=","IGRldG94","IFZF","IHB1bnRvcw==","IGRvY3Ry","IGFuY29y","Q0FQRQ==","IGNtYg==","54S2","Kiki","Oi8vLw==","VmFsdWVUeXBl","IG1vcnRnYWdlcw==","O3E=","IFJvY2tldHM=","c3BvcnQ=","VUdD","Y3Rz","44KB","aWV1cg==","IEFwcGVhbA==","KG5i","Ly8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8=","SU1BVElPTg==","IENyZXM=","IE1hbmlw","Q2F1c2U=","YXR5cGVz","bWFudWZhY3R1cmVy","Iy0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0=","IHNwb3I=","ZXNvbg==","IHB1bmNoZWQ=","IGJvb2ttYXJrcw==","IEJ1bGs=","Q29tcGxldGVMaXN0ZW5lcg==","IFRhbGtpbmc=","IEVybmVzdA==","IHJ1YmJpc2g=","a2lsbHM=","IERFRklO","IG5laWdoYm91cmluZw==","YXJsbw==","IFBDQQ==","CW1hdHJpeA==","bG9r","IGF0bGFz","IEd1cg==","IHd5bg==","LW5lZ2F0aXZl","IHR1bA==","IHJlbGlj","IFZvbHRhZ2U=","IFByZWlz","IEpOSUNBTEw=","IFBNSUQ=","YWtldA==","CWF0dHI=","IGV0aXF1","IE1K","IEdtYWls","Y2xy","X2V4ZWN1dGlvbg==","6ZSu","cG9zaXRvcg==","LmFm","TnI=","R2VvcmdpYQ==","VG9wb2xvZ3k=","IHBlcmNow6k=","IG11c2xpbQ==","IGVwaWRlbWk=","IHNhYm90","YWN0dXM=","IOuMgA==","IElPRXJyb3I=","LmVzdA==","cHJlZnM=","IEtyaXNo","LlJlYWRLZXk=","TkFTQQ==","dcOnw6Nv","X0Ri","dW1lcmF0b3I=","V2lkZQ==","KHN0YXRlbWVudA==","LmVuZHBvaW50","Li4uLi4uLi4u","IFsq","c3RyZWFtcw==","bXRpbWU=","UHg=","YXRy","IHRwbA==","Um9tYW4=","IHNjZW5pYw==","Lm56","IFNlY29uZHM=","c3VibWVudQ==","IOyLpO0=","X2J1bmRsZQ==","IGRlxJ8=","IFNpc3RlcnM=","cHJlZmVyZW5jZXM=","IHBvcnRh","QWR2aXNvcg==","bWF4TGVuZ3Ro","IEdSRUFU","X18oCg==","b2xlc3Q=","IExhYmVscw==","IGVuZmVy","ICAgICAgCgo=","IFRoZWZ0","X0ZJTEw=","IFdpc2U=","KWFwcGxpY2F0aW9u","dW5hbWk=","PigpKQo=","QUREUkVTUw==","QlNU","ZXR6dA==","IFFncw==","U2Vuc2U=","RXhjZXB0aW9uSGFuZGxlcg==","IENodQ==","LmdldE93blByb3BlcnR5","IGV4ZXJjaXNlZA==","aW90aWM=","IFJlbGVhc2Vz","IHBpbnRlcmVzdA==","b2xpZQ==","aXNvZnQ=","IHNlcXVlbmNpbmc=","IHBhZHJl","XSkpOw0K","KHJhZGl1cw==","Lm1lZA==","YWludGllcw==","Lk9iamVjdE1vZGVs","IGVtcGxl","IHNlZ3Vybw==","U3RhcnM=","IHF1YWxpdGF0aXZl","bGVtbg==","4bux","PiIpLg==","IGd4","LWNlcnQ=","IEFTVE0=","IGZ1bGxuYW1l","IHRlbGVtZXRyeQ==","IENhbWJvZGlh","X3Vs","IENsYXJl","Q1VTVE9N","UUM=","IFVucw==","IEhUVFBT","IFBhcmtpbnNvbg==","YW5jeWJveA==","JywnLg==","VHVl","LmdldExhc3Q=","IGFiaQ==","xIVk","QXN0","IEVkaXRpbmc=","LlVuaXR5","am1w","IG1hdHM=","IHNoYXJlZFByZWZlcmVuY2Vz","Q2FwdGFpbg==","LnBhZ2VTaXpl","IHJ0bA==","IGFubWVsZA==","UnVudGltZU9iamVjdA==","IGRlbWFuZGU=","KCI7","c2VpdGU=","LWhlYWRlZA==","IEtyYQ==","IEZPTlQ=","YFw=","Q2xhc3NOb3RGb3VuZEV4Y2VwdGlvbg==","LmF2Zw==","YXRpY2Fs","QWo=","IHBlcm1pdHRpbmc=","UHJvag==","RVJSUQ==","IGNyZWFtcGll","IEJ1eWVy","LW1vZHVsZXM=","IFN1bmRheXM=","fGAK","IGRheXRpbWU=","ICso","IGdsaXRjaA==","IE9wZXJhbmQ=","IHRveGlucw==","aW55YQ==","RE5T","IFNhcw==","Q2FrZQ==","IE5hdGlvbmFscw==","LmFkZFRv","IHNpbmtpbmc=","IGNvbXByZWhlbnNpb24=","IHNjb3I=","YWdlbWVudHM=","IHRhcmQ=","IG1hcmNoaW5n","IE1UVg==","IHNhbmU=","Q3JlYXRlSW5mbw==","4bqv","IGVuZEluZGV4","CWxheW91dA==","IOWQjQ==","U0lURQ==","IFRIRVJF","IFt7Jw==","b3BhdGhpYw==","IHRyYW5zbWl0dGVy","L2JvZHk=","IHB1bmQ=","IENsb3Npbmc=","IHNldGF0dHI=","IGJvdW5kZWQ=","QXRsYXM=","c3VtaW5n","KHRpbWVz","cGFyZXI=","eW5vbQ==","ZmVpdA==","IGZyZW0=","LWxlZw==","IEJyYXM=","PiM=","IOy2nOugpQ==","IElOU1RBTkNF","IENvdWNo","X2hvc3Rz","bGlrZWxpaG9vZA==","Lk1hcmtlcg==","IE1hc2tz","IGNlcmVhbA==","dXRpbGl0aWVz","IGVsZW1lbnRhbA==","IGRpc3RvcnRlZA==","aW5hY3RpdmU=","Y3J5","V0w=","VVBQT1JURUQ=","LlRocm93cw==","L3NjaGVtYQ==","c2VyaWU=","LiInLA==","IEJlbmVkaWN0","LXBpY2tlcg==","aWdncw==","IFBpcmF0ZQ==","5ZGo5pyf","IFRoZW1h","IFNvdXRoYW1wdG9u","IGFycmF5V2l0aA==","IFBhdWxh","IHByZWRpY3Rvcg==","LUFzcw==","LnVzZXJpZA==","IHBlcmk=","IGV4YWdnZXJhdGVk","dXJhdGU=","YXJzZWlsbGU=","IENvbmNlbnQ=","IFBpaw==","IEBfOwoK","IGZvcm1hdGlvbnM=","IGRlbm9taW4=","Ii8+Lgo=","ZW5kZWRvcg==","IHBhbmNyZQ==","IGFtdA==","IG9uUmVzdW1l","b25EZWxldGU=","IEJDSA==","KSgi","bW92ZW1lbnQ=","IHBvdGFzc2l1bQ==","PCEtLVs=","IG1lbWVz","X1NFVFVQ","X2dhbW1h","IGNvbG9yV2l0aFJlZA==","IGdyYXZlcw==","IHN0YXR1dGVz","IGFxdWFyaXVt","IExhbWFy","IHhBeGlz","V2VicGFja1BsdWdpbg==","X2ZvbGQ=","Lmdlbw==","IEZlZXQ=","LXNwZWFraW5n","6aKd","X2Nvcw==","IEF2ZWM=","YW5zdA==","IEVFUFJPTQ==","IGRlYWxlcnNoaXA=","IFVudGVybmVobWVu","LEludGVnZXI=","IMOqdGVz","LmB8YAo=","dmluZQ==","IEtuaWZl","X3ZlcnRpY2Fs","LkRvd25sb2Fk","IG92ZXJzaXplZA==","bGlk","IHBpbGxhcg==","Y2F1Z2h0","IGZsYWdnZWQ=","KHJvdXRlcg==","KFJFRw==","IGJhcmJlY3Vl","YnJvd3Nl","IEZpdHpnZXJhbGQ=","INC/0YDQvtCy","aXJpZQ==","IGVyc3Rl","ZWxpYg==","X1BSRVNT","IGhlYWxlZA==","IGhhdXQ=","PnhwYXRo","IFdlbg==","Z3J1bnQ=","LktleXdvcmQ=","LWhhc3BvcHVw","bnc=","U1o=","Z2FiZQ==","SW50ZXJhY3Rpb25FbmFibGVk","cHJlY2g=","IHByaW1v","c3RyaXBl","YWx0ZWQ=","X0JPUkRFUg==","ZmluZEJ5","X2Fubm90YXRpb24=","V2ViU29ja2V0","QnVy","IGRpcGxvbWFjeQ==","KHRk","IFNpbXBs","ZGV0ZWN0","cGVyZm9ybWFuY2U=","IGNhcmJvaHlkcmF0ZXM=","L2lvdXRpbA==","LS0tLS0tKw==","X3Ny","bWVldGluZw==","IHwtLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQo=","X1Zhcg==","IHJvdmVy","IGNhc2k=","IE1hdGNoZXM=","cXJ5","X0JPT0s=","IHByZXN1bWVk","IE3DqXQ=","L2l0ZW1z","IENyZWRlbnRpYWxz","XSkuCg==","IEthcmRhc2g=","QWRtaW5pc3Ry","IFNsb3Zhaw==","KCcsJykK","IGNvbnF1ZXN0","UGVyc2lzdA==","IERyYWlu","Ymlq","IGRvdg==","IHPDuGdlcg==","V29uZGVy","QVNFVA==","W21pbg==","Z3VuYQ==","Z3Jvd24=","IH0pCgoK","QVVE","IGJlbGlldmVy","aXNlcnM=","KHNlbnQ=","SmFja3Nvbg==","IHBhaXM=","IGN1ZGFNZW1jcHk=","IGZsYXNoZXM=","YmVyZQ==","IG11bHRpZg==","IENhcmdv","RWxlbWVudHNCeVRhZ05hbWU=","KGVwb2No","IEt1bmRlbg==","UmVjb2duaXRpb24=","IFNldFZhbHVl","IFN1bnNoaW5l","QUNQ","OnN0cg==","IGFtYmlndQ==","IO2VnA==","LWxpbmVhcg==","IFdPVw==","KGN1c3RvbQ==","IGlzRW5hYmxlZA==","QkFU","X2RpYWc=","X0dVSQ==","SGVhdA==","IGFzc2VtYmxpZXM=","IENldHRl","L2NhcmQ=","IERlY2xhcmU=","IHVwaGVsZA==","IENsYXVk","LWZsb3c=","IGhvb2t1cA==","SVJR","RmF0aGVy","RGVsZXRlcw==","KSk7Ly8=","IFBUU0Q=","KTsNDQo=","ZWdhbA==","LmFycm93","IE1QVQ==","w7Nq","IG1vdGl2YXRl","IEthdGhlcmluZQ==","LmZyYW1lcw==","IHRoaQ==","PFJlc3VsdA==","LmdyYXk=","IEt1c2huZXI=","IENlbWVudA==","IEJ1cmw=","SW50ZXJ2aWV3","PSciLg==","UE9XRVI=","IENEcw==","IFsmXSg=","IGNoYW5nZXI=","Pj4sCg==","LXdl","IENMSw==","IEFkcmk=","IGNpbA==","PVg=","IHNlbmRv","IENlbHNpdXM=","YmxvY2tlZA==","T3V0T2ZCb3VuZHM=","LiE=","b3Byb2plY3Q=","YW5kZXM=","ZWRpdGluZw==","IHB1bXBlZA==","KCk7fQo=","4Ka/","X0VWRU5UUw==","IEZyaWVkbWFu","ID4v","ICoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKio=","IHRlbXB0YXRpb24=","IElwc3Vt","IENlcw==","IG5vdGljaW5n","X2VsZQ==","QWNjZW50","IE52aWRpYQ==","IGFtdXNlbWVudA==","IGludHJvZHVjdG9yeQ==","CXJldHZhbA==","IGxpbA==","aXJpbQ==","ZW5xdWV1ZQ==","LWhpc3Rvcnk=","IGNvdW5zZWxvcg==","VFJBTlNGRVI=","X1ZlY3Rvcg==","Y2F0ZWdvcnlJZA==","cGVyeQ==","RklMVEVS","KHJlbW90ZQ==","IHNlcGFyYXQ=","IEVtYmVkZGVk","IEJhY29u","dGVycmFmb3Jt","IHJlc3BlY3RhYmxl","aWNoYQ==","YWlj","Kydc","IHN0cmF5","0LXQvdC40Lk=","IEF1ZGl0b3I=","ZW50aWNhdG9y","IGNsb2Fr","IFVOS05PV04=","IEFtZW4=","dm94","YXN0cmVldA==","Li4uXQ==","IGAl","LXByb3BlcnR5","IFF1YWxjb21t","ZWRpdGVk","IGRpc2NyZWV0","LU11c2xpbQ==","LnJlY2lwZQ==","IHZhbmRhbA==","IHXFvHk=","c2VuaGE=","LGlz","IFBvbXBl","IEtuaWNrcw==","KCknLA==","KHRi","IEhJRA==","IHBldw==","IGNhcnJvdHM=","IHBvbGljeW0=","Lmxp","IHR3ZW50aWV0aA==","X3Byb21wdA==","c2NlbmFyaW8=","LkpGcmFtZQ==","IE1RVFQ=","IEluZGl2aWR1YWxz","dG9NYXRjaFNuYXBzaG90","w61zdGljYXM=","IkQ=","IGZvZA==","IHJpY2h0","IFphcg==","IHJlc3VycmVjdGlvbg==","IG1pbGl0YXI=","IE1hbmFnZXJz","X0dSSUQ=","bm9ubnVsbA==","QkVSVA==","T3V0cHV0cw==","ICAgIAoKCg==","IHByZWRlY2Vzc29ycw==","IGlzU2VsZWN0ZWQ=","IGN5YmVyc2VjdXJpdHk=","5YaZ","Lm1j","UXVp","IGFsbGVnaW5n","IHRpYw==","TWFudWZhY3R1cmVy","IEVuaGFuY2Vk","IEJpeg==","IHJlYWRPbmx5","w7Ru","IGx1bWJlcg==","YWVk","IHJhaW5z","cHJvdmlkZQ==","TGF0ZQ==","IHBlZGVzdHJpYW5z","amF2","QWN0aXZhdGlvbg==","J0JyaWVu","IHZhY2FuY3k=","Ly8t","IGJsYWRkZXI=","IGFnaWxl","IHN0ZWFscw==","IHJlZ2lzdHJhcg==","IGVsZWN0b3JhdGU=","R292ZXJubWVudA==","J109Ig==","YWxidW1z","ZWxlY3Rpb24=","YWJs","IE9yaWVudA==","IHBpcmF0ZXM=","IGxvb3Bo","CXJlYWRlcg==","IMO6bHRpbW8=","IFBldHJv","INGB0YLRgNCw0L3QuNGG","IHNhbXA=","aW52ZXJzZQ==","LmdyYWRsZQ==","IERvbnQ=","eG9u","IGNyZWFk","ZXJ0aWxpdHk=","cmdjdHg=","IHBvbMOtdGljYQ==","VmFsdWVDaGFuZ2Vk","QXBpUmVzcG9uc2U=","Y29tYm8=","IFVY","IGRhaGE=","J2Fu","LW15","4oCcTXk=","cGVl","bGF0bG9uZw==","XEJhc2U=","Lndpaw==","IFBPVA==","IHB1bmN0dWF0aW9u","cXVz","aW55aW4=","PW1pbg==","IG51Y2xldXM=","IGNvbmNlc3Npb25z","LmF2ZXJhZ2U=","dXNlcmluZm8=","IHRhYmxlc3Bvb24=","IE5laWdoYm9yaG9vZA==","KFRocm93YWJsZQ==","PnY=","b3Z5","WFhYWFhYWFg=","aXN0aQ==","IGJhcnQ=","77u/Cg==","RW5jcnlwdA==","PWVuZA==","IGluY3Vy","IHBlcnRpbmVudA==","X01JTk9S","KSI+Cg==","Y2hpZWY=","IHZk","KGAK","dXJneQ==","YWJ5cmludGg=","IFNoYXBlcw==","IHZhZ3k=","LmRkcw==","bWVtY21w","CUl0","c2VtZXN0ZXI=","IEVtaXQ=","IGluc2Fu","IGJydXNoZWQ=","X0ZBVEFM","ImVycm9ycw==","IGRpc3J1cHRpdmU=","JW4=","IGNvbXBvc2l0aW9ucw==","IGJhY2hlY2E=","IGRpc2FncmVlbWVudA==","UHJvdGVjdA==","TElLRQ==","LkZpbGVOb3RGb3VuZEV4Y2VwdGlvbg==","IHdlaXRlcmU=","IE1vbmFjbw==","Xzw/","IG1vZGVsZWQ=","c3RlZWw=","ZWVudGg=","IFtdKS4=","KHJlZ2V4","ZW5pZQ==","LkZsdXNo","LnBvcHVw","IE92ZXJz","LkRlYnVnZ2Vy","PmA7Cg==","bml0ZQ==","LnF1b3Rl","IGNvZw==","IHdha2Vz","IFdyZXN0bGluZw==","SW50cm8=","IHNlcmRl","IHJldXNhYmxl","IENvbXBvdW5k","SW1wbE9wdGlvbnM=","CUl0ZW0=","IG51bU9m","IENIUg==","IEJvbHRvbg==","UExVUw==","Ym91bmRpbmc=","KCsr","ICIsIjsK","IEd1ZXN0cw==","IGRlcHJpdmVk","IG1lbG9keQ==","WklQ","Pj4oKQ==","IGNvbmNlZGVk","X2RpZQ==","IGpveXN0aWNr","IGFuYXRvbXk=","IFRvb2xTdHJpcA==","IEVub3VnaA==","Iio=","aW50b3No","aGFiaQ==","IFN5cmFjdXNl","IEluY3JlYXNlZA==","TXVz","LnBhdGllbnQ=","IGluY3JlbWVudHM=","IFBJWA==","IGJvb3R5","LnByaXZhdGU=","ZXJ0b2lyZQ==","IGN1dHRlcg==","IGJla2Fu","IGRyYXdlcnM=","X0FMSUFT","QW5pbWF0aW5n","X2Fuc3dlcnM=","LmF0dGFjaw==","d3JpdGVycw==","IGdhYW4=","aWtvbg==","CWNvbnRyb2xsZXI=","IGZhY2FkZQ==","k+WQjQ==","LHN0YXR1cw==","LmZl","IHBvc3Rwb25lZA==","IEZvbnRz","IEJlbmNobWFyaw==","aWRlbnRhbA==","IGNoaWxsaW5n","IEtpZXY=","IGJydXNoZXM=","LXdoZWVs","IEhpcmU=","KHByb2M=","IGNoZW1vdGhlcmFweQ==","INCx0YvRgtGM","IE5vbGFu","KGllcnI=","IEp1ZGU=","LUF1Zw==","dW1ub3M=","Y29udmVyc2F0aW9u","IEJlaGF2aW9yU3ViamVjdA==","YmF1Z2g=","IGd1aXRhcmlzdA==","Lm9mZmVy","IGFjY3VzZQ==","cGFyZA==","cmVmZg==","LlJlYWN0","IHVjaGFy","IG9mZnNldG9m","JHN0YXR1cw==","L2VtYWls","LmNvbm5lY3RlZA==","Lys=","QHFx","YXJhdmVs","IGZ2","LlBlcnNpc3RlbnQ=","ZW5zdGVpbg==","Li4uXQoK","LmdyaWRWaWV3","IEpPQg==","LScuJA==","LmxheW91dENvbnRyb2w=","IGNhcmc=","IEtvdA==","X2VxdWFscw==","IHdpdGhkcmV3","QVRFU1Q=","LWJ1dHRvbnM=","CVVQUk9QRVJUWQ==","IFVJR3JhcGhpY3M=","IFB1YmxpY2F0aW9ucw==","IElOVEVSTg==","IGV0aGFub2w=","w6RuZ2Vy","U0VORA==","CXNsb3Q=","0LvQtdC90LjRjw==","IHBhc28=","X2V4dGVuZGVk","b3J0aGFuZA==","KHNoZWV0","IHByb2NlZHVyYWw=","IGtpZG5hcHBpbmc=","Ly8tLS0tLS0tLS0tLS0tLS0t","W21zZw==","T2NjdXJyZWQ=","QWxpY2U=","IENBU1Q=","IGthdGE=","5rOo5YaM","Y2hlYXA=","aWNpdHk=","IHJlYWRpbmVzcw==","KioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKio=","IFNZTg==","IE1hZ2dpZQ==","cmljYQ==","IHlp","IFR3ZQ==","aWdub24=","YW5kZW4=","IGpxdWVyeQ==","IHN0YXJ0WQ==","IGF2ZW51ZQ==","QW50aA==","X2NhcHRpb24=","IFJvd3M=","wq/Cr8Kvwq8=","c2VxdWVuY2Vz","0LjRhA==","KCIvIikK","Y3JhdGU=","IFNhZ2E=","SnVk","IGZhY2V0cw==","X3NjYWxlZA==","UnVieQ==","IFBR","IGNydXM=","SXJhbg==","LnNxdWVlemU=","CWZk","IHBlcmNl","IGRhdGFw","Xl5eXg==","X1NDT1BF","IFNhbG1vbg==","IHRhaWxsZQ==","IFZhbG9y","QUdFTUVOVA==","UnA=","IEd1YXJkaWFucw==","IHJlYWRGaWxl","IG5lZ3Jv","IG9icmE=","LlBhcmNlbA==","Q0FDSEU=","cmV0Y2hlZA==","Y3Jt","cXJzdA==","b3VmbA==","7ZqM","Lm5vbQ==","c3NpZA==","IHNhZmVzdA==","LkVycm9ycw==","X3BuZw==","Q29udmVydGVyRmFjdG9yeQ==","PFNlbGY=","IHNlcGFyYXRlcw==","X2pCdXR0b24=","IG1pc3VzZQ==","ZXhjZXB0aW9ucw==","IFt7Ig==","IFBBRA==","562+","a0h6","PWVu","IGjDoG5n","SFo=","IFhhdmllcg==","e2lk","IHN0YWlyY2FzZQ==","dGV4dGZpZWxk","L2RvY2tlcg==","KHRhYmxlTmFtZQ==","IHRlbGVjb21tdW5pY2F0aW9ucw==","b25zbw==","b2Ns","UGFyZW50cw==","L3BhcnNlcg==","LWRyb3A=","KHN0eWxlcw==","X21vZGlmaWVy","UmVxdWVzdElk","LmJyYW5k","IENvaW5z","IGt1bnQ=","Lkdy","IEhJU1RPUlk=","KGRyb3A=","QnJhZA==","IHNla3Np","X3Nkaw==","IGluc3BlY3RlZA==","cHJlZGljYXRl","LmZp","R09S","IGNvY29h","IElRdWVyeWFibGU=","LS0tPC8=","IGRlcm5pZXI=","IFVzZXJEZWZhdWx0cw==","X1RT","IGVvcw==","IGJsZW5kZXI=","IGxvdWRlcg==","U3BhbmlzaA==","bGluZXI=","XHdpZGdldHM=","IHNjaGVtYXM=","X0NBUFRVUkU=","Lm1pY3Jv","44Kt","IPCfkQ==","IGFuZGVy","YWx0dW5n","ID09Jw==","IGVuZm9yY2luZw==","IEV4aXN0","dXZ3","aXJ0c2NoYWZ0","IEdyZWF0ZXN0","IE1vc3Vs","X3Bv","IHNpbW1lcg==","IHByb2dyZXNzZWQ=","IHJvdGFyeQ==","IG50bw==","Tm9pc2U=","IGNoYXNlZA==","IGluc3RpbmN0cw==","UHVibGljS2V5","IHNuYXBzaG90cw==","IFN1cGVydg==","Lm1hYw==","IEJpYmxp","Li4uKQoK","CW9sZA==","S0VO","IENsaW0=","IFByb2dyZXNzRGlhbG9n","bGljYW50cw==","X3NsaWRl","K2g=","IGVtcG93ZXJlZA==","SW5qZWN0b3I=","IGluZmx1ZW56YQ==","IHBsYW5ldGFyeQ==","V2lsbGlhbXM=","IG1vbmQ=","ZW5hbg==","LnJhbmRvbVVVSUQ=","KFBvc2l0aW9u","IGhvbWJyZXM=","IGluc2VjdXJl","IHZlcmJz","X3JlY3RhbmdsZQ==","SU5TVEFMTA==","IFBhcnNlRXhjZXB0aW9u","X1RB","JGZpZWxk","LkltYWdlSWNvbg==","IEd1amFyYXQ=","LWxpdmVk","X3NvbWU=","IGNsaXBwaW5n","LmdldENvbXBvbmVudA==","LmNsb3Nlc3Q=","LmxpdmU=","IGluY2lk","DQoJCQ0K","IHByb2R1dG9z","X211c2lj","U3FsQ29ubmVjdGlvbg==","IFByZWRpY3Rpb24=","IFhU","LW5vdGVz","IEpld2Vscnk=","cmVtZW4=","KHJlYXNvbg==","U25hcA==","QWZmaW5lVHJhbnNmb3Jt","YW5nZWxvZw==","IGRpY3RhdGU=","IHpvc3Rh","QmFyQ29udHJvbGxlcg==","L3Nob3A=","ZWlk","LXN3","Q291cnNlcw==","Zm9udFdlaWdodA==","IEhvZmZtYW4=","X051bQ==","S1I=","IFdpbGxpZQ==","YXJrYW4=","LXNjYWw=","IGF1ZGl0aW9u","LmRpc2M=","IHR3aXN0cw==","IGRlcGljdHM=","IGJhbnlhaw==","IEtpdHM=","IEhlemJvbGxhaA==","bm9ydGg=","IEdSRQ==","w7Zn","cXVvaQ==","LXRocmVhdGVuaW5n","IHdvcm1z","IFBO","IHNleGRhdGU=","IG1vbnVtZW50cw==","TU1D","Ym90cw==","IFNETEs=","ZGVhdGg=","IHBpdHM=","X2Nob2ljZXM=","KHNvbHV0aW9u","IHByb2NsYWltZWQ=","IFFpbmc=","IHNzY2FuZg==","c3RyYXRlZ3k=","ZGVhdXg=","IEZpc2NoZXI=","X0lW","IGlud2FyZA==","RGF0ZVBpY2tlcg==","IHNld2Vy","IGV1cm9w","IGhvbWVsZXNzbmVzcw==","LlNwcmluZ0Jvb3RBcHBsaWNhdGlvbg==","IFNwYWNlWA==","IGluZm9ybWluZw==","ICch","IHBsYXN0ZXI=","SW5pdGlhbGl6YXRpb24=","LmJldGE=","IFBlcnNvbnM=","dWdnbGluZw==","IHNoYW1wb28=","IEplaA==","IHNlcnI=","IG1heFNpemU=","IHN0aXRjaGVz","W3BhdGg=","LnJldA==","IFByZXQ=","TmVpbA==","Q29udmVydGVk","IE1hemRh","UE9TSVQ=","VG9vbGtpdA==","IFJFQURNRQ==","Q3VzdG9tQXR0cmlidXRlcw==","YXJjaGl2bw==","LlBhaW50","Z2V0T2JqZWN0","SVE=","LldlYkRyaXZlcg==","IGFudGlib2R5","IExpbWE=","aW5jb3JyZWN0","RnJhY3Rpb24=","IERlYWRsaW5l","c2VuZE1lc3NhZ2U=","Lk9mZnNldA==","ZWRpbw==","INeQ","IHNtb290aGluZw==","LmJv","IENFTlQ=","ZWxhc3RpYw==","LmNoYXJDb2RlQXQ=","UmVmcmVzaExheW91dA==","QUdFRA==","KTtcCg==","IFtdKQoK","IHRhcHM=","RFY=","4oCV","IENveQ==","IG91dHdlaWdo","J2dj","XEV4Y2VwdGlvbnM=","IEdyYW1tYXI=","IEd1YXRlbWFsYQ==","IEd1cnU=","IHRlag==","IGZyaWVuZHNoaXBz","IGNvcGluZw==","KHVwZGF0ZWQ=","X2R4","QW5hbA==","LU1heQ==","IG1hdGNobWFraW5n","IGp1bnRv","UEFDS0FHRQ==","IHJlbnRz","IOiHqg==","Y2FrZXM=","44CCJywK","cmVuZGluZw==","X0ZyYW1ld29yaw==","LSk=","KHVwbG9hZA==","IG9wb3J0dW4=","IGNhdXNh","IHByb2xpZmlj","Um93Q291bnQ=","IG5hY2t0ZQ==","IFNveQ==","U2h1dGRvd24=","6Ig=","X0VYUEk=","IEhhcmJvdXI=","IHRvcmU=","XE1lc3NhZ2U=","L1U=","T01CUkU=","LnNlZ21lbnQ=","IGNvbWVk","cm9tYW4=","IHNlZ8O6bg==","U2lnbWE=","IHNraWluZw==","IFRlcnJhaW4=","IGJlbmNobWFya3M=","IEF0dGVudGlvbg==","IH0qLwoK","IGdlaWw=","IGNhcnRvb25z","IGF0dHJpYnV0aW9u","IHJvdG9y","ZW5oYQ==","IM6z","IHRyYWo=","IGPDtG5n","IHNoYWtlcw==","IENsZW1zb24=","IGJydXRhbGl0eQ==","IDsNCg0K","IGVpZ2h0ZWVu","IEF3YXJlbmVzcw==","KHJlc3Q=","IHZpb2xpbg==","X1JPVVRF","LkZpZWxkTmFtZQ==","IEFkZQ==","aXppYQ==","IEhlbG0=","IHR5aW5n","IFByb2dyZXNzQmFy","YXV0b3I=","IGxvbmRvbg==","Jnc=","Z29v","SVNUUlk=","L0NyZWF0ZQ==","IFVTSU5H","IEdY","IEVGRkVDVA==","RmNu","IEVuY3J5cHRpb24=","Q0VE","ZmluZQ==","LWFycmF5","IHB1c2hWaWV3Q29udHJvbGxlcg==","QCQ=","VXBsb2FkZWQ=","LXdyaXRl","LmdldFBhZ2U=","X2VzdGFkbw==","QU5UTFI=","IFZpZXdEYXRh","ICR7KA==","IGFsbW9uZA==","IExvZ2ljYWw=","IHNob290ZXJz","IOygnA==","IHB1ZmY=","IHVuY29tbWVudA==","IGN1c3RvbWl6YWJsZQ==","xINy","RGlyZWN0aXZl","CWlkeA==","Q2hhbGxlbmdl","IHN1bW1hcml6ZQ==","IEF2Zw==","LlVzZXJJRA==","LmRpc3BhdGNoRXZlbnQ=","IGNvb2tlcg==","IGNvbm5lY3Rpb25TdHJpbmc=","IHNocmlua2luZw==","amFk","IFRoZW1lcw==","YW5kYXRvcnk=","IGR1YmlvdXM=","IGNlcA==","c3Bpbm5lcg==","IHN1YnJlZGRpdA==","IGlpaQ==","L2NhY2hl","ZGVmZXI=","IHN1YnN0aXR1dGVk","IGd1bm1hbg==","Y2xpbmc=","IOyw","KGN0cmw=","T3JkZXJJZA==","X2VuZw==","IGZpbG1tYWtlcnM=","IGZvcndhcmRpbmc=","IHN0cmFuZGVk","IExlYW4=","IOunjA==","KFVuaXQ=","IGRpZFNldA==","bGFrZQ==","Z3JvdW5kcw==","5Zug","IHVucmVnaXN0ZXI=","IG1pbmhh","IFZlZ2Fu","CWlWYXI=","LS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQo=","b3R0bGU=","SVBD","IHByYWdtYQ==","IElJRA==","X01pbg==","JTsiPgo=","X3JhbQ==","ZHJpdmVycw==","IENoaWNr","IGNscg==","X0JVRkY=","INCy0YvQsQ==","TWVyYw==","anV2ZW4=","IHNoaW0=","0YvRhQ==","IHRoZW9yZXRpY2FsbHk=","L2ZvcnVt","IHNwaWRlcnM=","IGdvb3Nl","IFBob3Rvbg==","IHByb2ZpY2llbmN5","IENsZXJr","X2ZpZw==","Q29uY2Vybg==","KGNvc3Q=","IHJlZGQ=","LmVudmlyb25tZW50","Q3JvcA==","IOKJpQ==","eWVjdG9z","LkJhdGNoTm9ybQ==","LWNvbXA=","JGltYWdl","IE5pa29u","IGRtZw==","Wzo6LQ==","UExM","dW5jaW9z","Zm9jdXNlZA==","IHR1bw==","IGh2b3JkYW4=","IGF0dGFpbmVk","IHByb3RlY3Rvcg==","IEthbnQ=","IHNob3Jlcw==","IEV0aGFu","X3NjaG9vbA==","IG5lYXRseQ==","LlNoYXBlcw==","IE5lbQ==","aGNw","LicvJy4k","IE3DqXhpY28=","c3RydWN0dXJpbmc=","IGxha2g=","IGFkcmVzc2U=","JywnIw==","IEhhc2tlbGw=","X0VOR0lORQ==","IHJlcGVudA==","IGN1Y2s=","LkZJRUxE","IFNrZQ==","QEBAQA==","SGl0cw==","IGltcGxhbnRz","IENvbnN0aXR1dGlvbmFs","IFBIUFVuaXQ=","IHRvaWxldHM=","LmFsYnVt","5LiL6L29","CXNldFN0YXRl","KCItLS0tLS0tLS0tLS0tLS0t","LkFtb3VudA==","ZWN0dXJl","IFRob3VzYW5kcw==","TmVpdGhlcg==","IHByZXNldHM=","IEFzc3VtZQ==","KGZhY3Rvcnk=","IGxpY2s=","IGdvYWxrZWVwZXI=","PFN0YXRl","LXNlY3VyaXR5","X2ll","ZXNrdG9w","IEx2","IFN5bXBob255","LnNhbXBsZXM=","IGh5cGVydGVuc2lvbg==","xYJ1","Lmp1c3Q=","TWVuc2FqZQ==","IT0t","PFRLZXk=","IHNweWluZw==","LGRhdGU=","b3JnYW5pemVk","ICAgICAgICAgIA0K","KGN1ZGE=","X01ldGFkYXRh","dWJpc2hp","LUJlbno=","X0Fzcw==","IEVsc2VJZg==","IGxlc2lvbnM=","IFByZXN0b24=","VGVjaG5pY2Fs","IHBsYXRpbnVt","L3Bp","SW5kZXhlcw==","IHBhcmFwaA==","IG92ZXJ0aHJvdw==","aXBhdGVk","b250b2xvZ3k=","IGRlbW9ncmFwaGljcw==","IGNhbmU=","IHByb2ZpdGFiaWxpdHk=","IGVzdGFibGlzaG1lbnRz","XSY=","OmFic29sdXRl","ZW50cmFkYQ==","VHA=","IHNoYXJlaG9sZGVy","Lidf","5aaC5p6c","bnBq","dnJpcg==","IEVYRUM=","IFBvbGljaWVz","IGZlbGxvd3NoaXA=","IENHUmVjdEdldA==","X3JlY2lwZQ==","X1JFQw==","dW51","IHJvYmJlZA==","IHR1cm1vaWw=","KTo6","LnN0YXJ0RGF0ZQ==","IGV2YWN1YXRlZA==","LWVxdQ==","IGZvdXJ0ZWVu","QFNwcmluZ0Jvb3RBcHBsaWNhdGlvbg==","IOaVsOaNrg==","bmFudHM=","dGhyZW4=","U29ueQ==","REZT","LWNpZ2FyZXQ=","IGFnZ3JhdmF0ZWQ=","IG5lZGVybGFuZA==","IEZ1ag==","dWNlcw==","L3VzZQ==","dW1tZXI=","KFNURA==","6rCE","Kj4m","LnBlcmNlbnQ=","aWFudHM=","IEN0","VkFT","X1RIRU1F","IHNuaXBlcg==","X0VM","LXdvcmtlcnM=","U25vdw==","IEF1cmE=","aWVnbw==","IEdsb2I=","TmFtZWRRdWVyeQ==","X0JH","IExpdmVEYXRh","IFNlbmRNZXNzYWdl","IHJlc3BvbmRzVG9TZWxlY3Rvcg==","ZW5jZXJz","aW5zdHJ1Y3Rpb25z","KEl0","5ZG95ZGo5pyf","IEdvbWV6","Y2hhcmdlcw==","LkdlbmVyYXRlZFZhbHVl","IE1hY3Jvbg==","KFBPUlQ=","IFByb2Nlc3Nlcw==","Lm9uUmVzdW1l","IGZpZQ==","QnVpbGRlcnM=","KWdldA==","X3dhbGxldA==","IGNhbmM=","IE1vYmlsaXR5","IGFsYXJtcw==","cm9zaXM=","YW1hw7Fv","IHBpcw==","IOODuw==","U2hh","IGNvbmZlc3NlZA==","KElORk8=","KCcsJw==","X1NlcnZlcg==","IGJsYXN0ZWQ=","IEZhcm1lcnM=","cnV6","Y2tlZGl0b3I=","X0lNUExFTUVOVA==","IG1vdHRv","IENBUkU=","IHlkaw==","Qm9uZQ==","IGFkZW3DoXM=","KyIvIis=","UHJvcFR5cGVz","X1Na","LnBhaW50","LnBpeGVs","IE1lc3NhZ2VUeXBl","IHR3ZWFrcw==","YC4KCg==","VmVyaWZpY2F0aW9u","bmVjaw==","YmVycmE=","IG1pbmRmdWw=","U3Vydg==","IDotCg==","IGFueXdheXM=","IEFkbWlzc2lvbg==","YWNjZXNzaWJsZQ==","RmxhdEJ1dHRvbg==","ICInIik7Cg==","IGhhaGE=","VG9Qb2ludA==","IGJ1cmdlcnM=","Z2V0U3RhdGU=","XEhlbHBlcg==","IEZVTkNU","IEVMRU1FTlQ=","IENFUlQ=","IEFDQ09VTlQ=","Y2hhcmdpbmc=","X2NhbmRpZGF0ZQ==","X3JlY2VudA==","IEluc3RydWN0b3I=","IGRydW5rZW4=","WVNRTA==","b3JhdGl2ZQ==","IjoiIg==","IHRhZ05hbWU=","X05FRw==","IHFw","IFVuZGVmaW5lZA==","IGdyZWFzZQ==","CSAgCQ==","IGVhZ2VybHk=","VGV4UGFyYW1ldGVyaQ==","ZGlzdHJpYnV0ZWQ=","QWRtaW5pc3RyYXRvcg==","RGlzdHJpYnV0aW9u","IERlY29tcA==","IFRyYW5zZm9ybWVy","LmJ0blNhdmU=","IEdvcw==","KEVudW0=","Y2Fpcm8=","LWNp","L3JlcG9ydA==","IFBvc3Rlcg==","X2RlcGVuZGVuY3k=","IGV4cGxvaXRz","c2V0Rmxhc2g=","IHh0","IGpld2VsbGVyeQ==","IGRhaQ==","X1JBTQ==","IGJlcnJpZXM=","IGdyYW5ueQ==","RmF0YWw=","w6lhbA==","LW1vc3Q=","LlZpc3VhbEJhc2lj","IFBlbmQ=","YmVp","amFr","OyovCg==","Qm95","PlNlbGVjdA==","aW5kcmljYWw=","VGVjaG5vbG9neQ==","IEFsbGlzb24=","ZGF0YXR5cGU=","J2Nsb2Nr","IGtvc3Q=","IGJham8=","LkNvdW50cnk=","WmVuZA==","LndyYXBwZXI=","4L0=","IEZpbGlwaW5v","b2NyZQ==","U1NI","IFNBTVBMRQ==","X2luaXRpYWxpemVk","KTs/Pgo=","IHBvcm5vc3Q=","ZXNhbg==","IEN1dHRpbmc=","IG1peGVz","X2FnYWlu","IGZvcm11bGFyaW8=","W1Y=","IHRlbGVmb25v","L3Vz","IGxvYWREYXRh","LnJlZmVyZW5jZXM=","IG1hcFZpZXc=","KyJf","IFNRTGl0ZURhdGFiYXNl","aXRvbg==","Q29sdW1uVHlwZQ==","IEV2ZXJ0b24=","LlJlc3VsdHM=","L25vdA==","IGdldEZpbGU=","aGVyaXRhbmNl","IGdldEhlaWdodA==","JHVzZXJuYW1l","d2l0aGRyYXc=","Xyk7DQo=","LnV0","IFFBcHBsaWNhdGlvbg==","dXJuYWw=","LWRvd25sb2Fk","YnVyZ2Vy","cHJlY2k=","IFRoYW5rZnVsbHk=","LkVWRU5U","IGdyZWF0bmVzcw==","IGxvb3NlbHk=","IG1hc2g=","IGdlaGVu","X2FudA==","IGltcGVuZGluZw==","LmlzUHJlc2VudA==","IHN0YWlucw==","SU1T","LmJhY2tlbmRz","IGlycmlnYXRpb24=","IFRhdA==","L3Rlc3Rz","IEtpbmdzdG9u","LnRyYW5zbGF0ZXNBdXRvcmVzaXppbmdNYXNrSW50b0NvbnN0cmFpbnRz","IHZvbWl0aW5n","LXJlcXVpcmVk","IGJsYXpl","IFN0YWZmb3Jk","UklE","L2Z3bGluaw==","IGthbGU=","c29sZA==","KHByb2dyZXNz","KGNoYXJ0","IGN5c3Q=","IGRpbGlnZW5jZQ==","L21w","IGNsZXJneQ==","IEJyb3dzZXJSb3V0ZXI=","IEFQSw==","IENPTlRBQ1Q=","QmFySXRlbQ==","LURpc3Bvc2l0aW9u","IE1vdG9yb2xh","X3NhbA==","IFdvb2Rlbg==","IFRIRVk=","IGNvbW1lbnRhdG9ycw==","IGNvbW1lcmNpYWxz","PW1vZGVs","LiIpLAo=","IFBsdWdpbnM=","ZGFpbg==","aGVhZGVk","IENvb3JkaW5hdGVz","SmFuZQ==","IFByZWZlcnJlZA==","IHBvZGVtb3M=","LmlzQmxhbms=","IFN0YXA=","IHdzcA==","IENPTEw=","X2JpZA==","IHByb2Jlcw==","dWFuaWE=","KHN5bQ==","IGN1ZXJwbw==","IG1hbmlwdWxhdGluZw==","IGFtYXppbmdseQ==","LkRBWQ==","dW1wdGVjaA==","YWNvYmlhbg==","VGVybWluYXRl","IHN0YXRpb25lZA==","U2V0QnJhbmNo","U2NyZWVuc2hvdA==","ZXN0aGVzaWE=","IHdhbGtlcg==","I2Zyb20=","Y29vcmRpbmF0ZQ==","X2ludGVyZXN0","IGhlbHBsZXNz","CXB1Yg==","bmdh","X0V4","IG53","IHRleHR1YWw=","IHBsdWdz","IG1pbmlvbg==","bWFyZXM=","PD4K","QUNB","Q29tcGFueU5hbWU=","KGVj","IExhbmRzY2FwZQ==","X1BST1ZJREVS","Y3c=","lIQ=","QWNjb3VudElk","JDo=","IFBlcnNvbmFsbHk=","cHJvcGVydHlOYW1l","IEt1Yg==","J2k=","IEdpdWw=","IHByaW9yaXRpemU=","Rk9STUFOQ0U=","IFBhcmFkZQ==","KVwK","c3RkYm9vbA==","IGFsZXJ0RGlhbG9n","IExlaA==","LmNhdGFsb2c=","IHdlYmluYXI=","IGltcG9ydGVy","cHJvamVjdElk","VFlQTw==","X18NCg==","R1c=","c3VtbWVy","IHNpbmlzdGVy","LmZhaWxlZA==","IGJlc29pbg==","aXNtYW4=","REVTVA==","IG5o4bqtcA==","IG1vxbxuYQ==","X2luc3Ry","IHBhdmVk","IHByZWZpeGVz","IHJhbXBhbnQ=","IHlBeGlz","IOazqA==","X21pZGRsZQ==","IHNjaG9sYXJseQ==","IHByb3N0aXR1dGVz","IG1vcmFsZQ==","LnBlcm1pc3Npb25z","LmdldExpc3Q=","IHJlamVjdGluZw==","IGxvb3Bpbmc=","IFNwZWNpZmljYXRpb25z","IGltbWVuc2VseQ==","IE1lZGlhbg==","KGNoYWlu","IGNsaWNo","L2ZsdXR0ZXI=","YWNm","LnVybG9wZW4=","dXR0ZXJzdG9jaw==","IHNwZWN0cmE=","IGFkbWly","L21heA==","LkVtaXQ=","KHdlaWdodHM=","acSZ","SW5zdGFsbGluZw==","SnU=","IEZlbGw=","IEZSRQ==","LmRlbg==","IEJpZ0ludA==","Ij5A","ICopOwoK","IEJpb2xvZ2ljYWw=","IHBhdGVudGVk","LnBhZ2luYXRpb24=","LnJvbGw=","IER1bA==","IGRlc2Fycm9sbG8=","UmVnYXJkbGVzcw==","mOydtA==","IHJvYmU=","0J3QtQ==","IEJveWQ=","LyoqKioqKioqKioqKioqKioqKioqKioqKg==","cmVjZWlwdA==","IEFzc2lnbmVk","YXR0ZW5kYW5jZQ==","LWNob2ljZQ==","ZXRzeQ==","X2Vsc2U=","LG5leHQ=","X2V4aXN0aW5n","ICcnKSwK","IGxpYmVydGlu","dHJhaXRz","YXR0ZQ==","Q29tcGFyYWJsZQ==","IENvdg==","IEFkb2xlcw==","LHRoZQ==","IExvYWRlZA==","fHI=","PWluZGV4","IEdhc3Q=","IGluamVjdG9y","CXN0b3A=","LWdvb2dsZQ==","IGZldGFs","IGFsbG8=","eWxlZnQ=","Z2V0UGFyYW1ldGVy","4oCd4oCU","X3NlY3Rvcg==","LlV0aWxpdHk=","b3Njb3Bl","LmVhc2U=","IE1hZ25ldGlj","QXJyYXlPZg==","IGZlYXJmdWw=","IEluZmVy","IEZ1aw==","Sm9obnNvbg==","JGFycmF5","IHNhaXM=","X2NvbnRy","RGVzY3Jp","IERldGFpbGVk","X2xlYXZl","X1JPVA==","IG7DpGNo","IGthbWk=","RENBTEw=","OmVx","IG1vbms=","X29ianM=","KFNlcnZpY2U=","ZmluYW5jZQ==","IHBvZGVt","X3Jlc3RvcmU=","IGRlY29yYXRvcnM=","IGFkdmlzaW5n","INC/0LDRgA==","LnBlcm0=","IEhhaQ==","IGZr","dW50ZWVycw==","IFJUV0Y=","X2l4","QUNT","IGJyZWFrb3V0","ZGlyZWNjaW9u","IFN1bnNldA==","X2Z4","b2xrYXRh","LXJhZGlv","SGV0","LnV0aWxpdGllcw==","X2Jhc2lz","KGtpbmQ=","IENvbmM=","VGh1bWI=","IE1pY2hl","ZGVsaXZy","IGd1dGU=","IEZpbGVQYXRo","IFRyaWJl","XCIp","X2N1ZGE=","RGlmZmVyZW5jZQ==","IE1vbnN0ZXJz","IHNldFR5cGU=","LkNvbnRlbnRUeXBl","IGR1bQ==","RW52ZWxvcGU=","YWd0","IHVubG9hZA==","X2NoZWNrZXI=","IHJlc3Rv","X3Blb3BsZQ==","UHJpY2Vz","UHJvZmlsZXM=","KClc","RlVO","ICIjIg==","IFBhdHRlcm5z","IFNQRA==","X1JPV1M=","T3JpZw==","YmxhZGU=","IGzDqQ==","JWk=","Kysr","TGlmZWN5Y2xl","LS0tLS0tLS0tLS0tLS0tCg==","VGFy","VGhhbk9y","JnE=","IGNyaXRpY2lzbXM=","LXBo","RWxlbWVudEV4Y2VwdGlvbg==","X2d1ZXN0","IOu2","X0Fz","IENhcnJ5","X0JJRw==","YWtldXA=","X3JldHJ5","IG7DqWNlc3M=","IE1JU1M=","aXN1","IFNwaXJpdHVhbA==","XyRf","IHJlZmxlY3Rpb25z","PHQ=","IGZ1bsOnw6Nv","IG1vbmFyY2g=","IFBhdGVs","X3ZvbHRhZ2U=","IHJhaW55","Y291cnQ=","IHVsdHJhc291bmQ=","aU9T","X0FMV0FZUw==","V28=","X0JMRU5E","b2tzZW4=","IHRyYXZlbGVy","IGRhdGFUYWJsZQ==","c2V0Q3VycmVudA==","V29ya2Zsb3c=","LnllbGxvdw==","XSkt","QUJTUEFUSA==","X2l0ZXJhdGlvbg==","0LTRgA==","IHViaWM=","IG1lYXRz","L2Vt","IERpc29yZGVy","IGVudmlhcg==","U0VP","IGhlYXZlbnM=","X3N0dWI=","IGFkcmVzcw==","IFRyaWU=","IExpbmRzYXk=","bGVp","IHBsYXRh","LnNldHRpbmc=","IGVsZWs=","ICgkew==","QXV0b21hdGlj","IGRvd25zdGFpcnM=","UElY","aWNpb25hbA==","YWJhbA==","LXN0b3JhZ2U=","aWNoaWVy","IEFscGhhYmV0","LGxhYmVs","QAo=","IGludGVzdGluYWw=","IHZhcmE=","Lm1h","IHByb2du","IG5lcGhldw==","VGltaW5n","Y2xhc3NuYW1l","IGxvY29t","IFNhbWFudGhh","IEFjY29yZGluZ2x5","IFhDVGVzdENhc2U=","IFBsYWlucw==","IExlbmlu","bm9w","IFR5c29u","IHJlbmFs","b2luZQ==","KFRlc3RDYXNl","IExvbWI=","QmFuZw==","IHZvbHVt","X2dlbmRlcg==","IGx1dA==","IO+8","Q29uZmlndXJlcg==","IHN0cm9rZVdpZHRo","Lkh0dHBTZXJ2bGV0","fHg=","LkpTY3JvbGxQYW5l","IGNvbnNvcnQ=","LmJ1bXB0ZWNo","dHJpZGdlcw==","IGJlbmVmaWNpYXJ5","PXJlcXVpcmU=","cmVuYw==","IE9V","ZW50YXJpbw==","IHVyZ2Vz","4oCUbm90","Q2FtcGFpZ24=","ZHJl","IFJpdmVyc2lkZQ==","CXRi","IG91dHB1dEZpbGU=","IGFic3Q=","IHN0cnVjdHM=","IHJ2YWw=","XCI+Ig==","IGFjcXVpc2l0aW9ucw==","QkxBQ0s=","IHRydW5j","IGFubm90YXRlZA==","c2V0VXA=","VE9LRU4=","IENvY2E=","RGlzYXBwZWFy","OnZhbHVl","IGFpZGVk","dHRs","bHV4","IGFjdWVyZG8=","IEZpbmdlcg==","Lkdlb21ldHJ5","XScpOwo=","Lmdm","VFhU","IFNjb3RpYQ==","YXZyYQ==","IHZpcA==","IHdob3BwaW5n","LWdpcmw=","IGN1cnNlZA==","XVst","IGNpcmN1bGF0ZWQ=","dW5jdHVyZQ==","b3JtYW4=","IG1BZGFwdGVy","IOKAlAoK","RmlsZU1hbmFnZXI=","KGlQYXJhbQ==","SW1hZ2VCdXR0b24=","REFR","QXJtb3I=","IHNwYXQ=","LmpzZGVsaXZy","IG1pc29n","LmVjb3Jl","J119Cg==","aW1wb3J0cw==","IGRpbm9zYXVy","LUZyZWU=","IGFubm9u","IHRyaWJ1bmFs","WWE=","Lmd1aWQ=","bW9zdGx5","PT09PQo=","IGltYWdlbQ==","U3VpdA==","a2Fz","IENoYW5uZWxz","QnVkZ2V0","IERpdmlkZQ==","amVt","IEdyaQ==","IGluZGljYXRpdmU=","XEZhY3Rvcnk=","LnJlcG9zaXRvcmllcw==","IEFNUA==","LnNucA==","IGHDpw==","Ims=","IMK1","ZGVjb2RlZA==","X2FyYw==","LUNsYXVzZQ==","IEFkag==","IG5ld0FycmF5","KEdFVA==","IGxhdGlu","IHd6","OnVpbnQ=","5Yir","Ii4u","Q29ubmVjdGluZw==","ZW5ub24=","5bm2","IFNlcw==","IGJlbG9uZ2luZ3M=","Kycm","CXNldHRpbmdz","SU5W","IHDDqQ==","IGFkdWx0aG9vZA==","YW1ibGU=","X21hc2tz","LXJlc29sdXRpb24=","cmF0cw==","IO2BtA==","IHZvZw==","IFNobw==","IENvdmVuYW50","IHJlbWluZGluZw==","b3JuYWRv","aWFk","5byC","Q3JlYXRpdmU=","IFNUWUxF","IGFub21hbHk=","XEFwcGxpY2F0aW9u","IG1hbmlmZXN0YXRpb24=","IE5hbm8=","TWFwVmlldw==","aWRlYWw=","YWNoaW5lcnk=","IFZhdWdo","cHJpbnRlcg==","VmVyZGFuYQ==","L2NvbXBvbmVudA==","IGFkZENoaWxk","IGxlYXJuZXI=","IGRlY3J5cHRlZA==","IHRpZ2h0ZXI=","5p2f","IGplag==","IC4KCgoK","IExvYmJ5","bGVw","w6Rubg==","bGVpZ2g=","L3JvdXRlcw==","IGNhbm9weQ==","IEZpc2NhbA==","Ojsi","IGJ1cmRlbnM=","L2Z1bGw=","IENTUg==","LlNoYXJlZFByZWZlcmVuY2Vz","L3RyZWU=","IGRyb2l0","SW1wbGVtZW50","R2V0Q3VycmVudA==","KHB1c2g=","JHg=","0Y/Qtw==","QUNJVFk=","PT09PT09PT09PQo=","amM=","X2hyZWY=","LmdldFJvb3Q=","IEtE","KGxz","W2NudA==","IGRhbGw=","KGJw","IEVX","S2V5RXZlbnQ=","bG9iZQ==","IGh0bWxlbnRpdGllcw==","IGZhbHRh","IHZhbHZlcw==","IHNpemluZw==","UG9ybg==","IHNob3dFcnJvcg==","IEZyaWQ=","IMOH","LnJhbmRu","IHRhbnRy","IHNheA==","dXJvdmlzaW9u","dGhlb24=","X1JDQw==","eEZE","SW5pdFN0cnVjdA==","IGNhbm5lZA==","IHF1YW50aWRhZGU=","LldBUk5JTkc=","IEJyaXR0","LXJlZ2lzdGVy","YWN0aXZlbHk=","IE5hdGFsaWU=","44G/","IENPTk5FQ1Q=","emVr","IG1pbGxvbmVz","XWludA==","ICcsJyw=","IHByaW4=","IjpbLQ==","IC8vLg==","IGludGltaWRhdGluZw==","cmF6aW9uZQ==","LmlibQ==","IEpha2FydGE=","0LzQtdGA","IGxvYWRDaGlsZHJlbg==","X1VQTE9BRA==","IFdlZWtz","IGdldFRleHQ=","IPCfkg==","IF1dCg==","IENvc3Rz","xJlw","cGF5bWVudHM=","Lk1vdmll","bGg=","tIg=","X2NlcnRpZmljYXRl","PXE=","bGlicmFyaWVz","IEFlcg==","YXVzcw==","CWZhaWw=","T1VORFM=","c2VuZEtleXM=","IHNjYW1z","d2FydHM=","SGlzdA==","IEVzc2V4","IGZ1cnk=","IHRpdHJl","IENvcGVuaGFnZW4=","IHByZWRlZmluZWQ=","c2Nw","c2VycmF0","LmVuc3VyZQ==","aWxlZQ==","TWVyaXQ=","X1VOTE9DSw==","IENvcnJlY3Rpb24=","Tm9ybWFsaXphdGlvbg==","IOS/ruaUuQ==","IHN0b29s","IOWIoOmZpA==","U2hvcnRjdXQ=","Y2hvc2Vu","IGJ1bGx5","IGZ1bmNpw7Nu","44O844Or","IOeUn+WRveWRqOacnw==","LmFsaWFz","PlRvdGFs","IFNURU0=","cGVuZw==","Y2FsZXI=","cGVyZmVjdA==","IGJvbmRpbmc=","UGhvbmVz","IHB1bHA=","67aA","SUVXUw==","IERlZXI=","X0xDRA==","IENvbmNvcmQ=","V2l6YXJk","IG9mcmVj","IEVtZXJhbGQ=","dGVuZXNz","bmF2aWdhdG9y","VGhlb3J5","IGd1YXJkYXI=","IGZ1bGZpbA==","IFVuYXV0aG9yaXplZA==","IEJvdXQ=","CWhvc3Q=","IFJpYg==","KGZ0","RG9jcw==","LmdldEJvZHk=","5b+D","IFJpdmVyYQ==","IHdhdmluZw==","IHBlcmZpbA==","Qm91bmRpbmdDbGllbnRSZWN0","LmZh","cGFnZWQ=","IEFmZmlsaWF0ZQ==","IHByb2xldA==","fS0+ew==","KHNjb3Jlcw==","IHZpdGFl","e05hbWU=","c2NoZWR1bGVy","X1NBTg==","IE5lYw==","IEJlZWY=","X3Rj","TElO","IEV2ZW50VHlwZQ==","IEJ1ZmZlcmVkV3JpdGVy","IHNvZnRlcg==","IFZvdGluZw==","IEdlc3R1cmVEZXRlY3Rvcg==","IHVuc2Vlbg==","IFNDTw==","IGVsbw==","Y29tYmluZQ==","X21ha2VDb25zdHJhaW50cw==","IHVuZGVyZ29uZQ==","IE9mZmljaWFscw==","LG9wdA==","IGxheWVyZWQ=","ScOTTg==","IGJhbmtlcnM=","IHNlZ3JlZ2F0aW9u","IHJ1c3NpYW4=","IHZlbnRhbmE=","Z2V0S2V5","U2FudGE=","LlRvb2xTdHJpcFNlcGFyYXRvcg==","IEFlcm9z","LnB1dEludA==","IGluZm9ybXM=","X2JpbGw=","66aE","LnNldE1heA==","IH0+Cg==","IElQUw==","IEFsaWM=","In0KCg==","IHVzaGVy","IE5ndXllbg==","IGFic29sdXQ=","IGd1YXJkZWQ=","IFJlYmVs","IFp3","IEFubnVuY2k=","IHByw6E=","YWJjZGVmZ2hpamts","IFZlcmlmaWVk","W2l4","IHRpZXJz","w6J0","LiIpDQo=","aWp1","bGl2aW5n","R1BT","LlRlc3RUb29scw==","U2l6ZVBvbGljeQ==","IG1hc3NhZ2Vz","YXNzZXJ0SW5zdGFuY2VPZg==","IHBvc3PDrXZlbA==","IGJ1c2M=","IEp1ZGFpc20=","IGluZGlzcGVuc2FibGU=","IE1vc3RseQ==","SVRB","IGdldENvbnRlbnQ=","QnJvd3NlclJvdXRlcg==","LWNvdW50ZXI=","IG9idGVu","IC8+KTsK","0LjQuw==","aGVhZGxpbmU=","KGhvbWU=","YWxpY2U=","bGRyZQ==","X01vZHVsZQ==","Q29tcGFuaWVz","TlBD","IHRvcnNv","LmNvbnM=","CWFkZHJlc3M=","X3B1cmNoYXNl","IEJhcmQ=","Z3N0","LWFuaW1hdGlvbg==","X3BhaWQ=","LnNwZWNpYWw=","IGRlbGlt","IHRha2VvdmVy","KGhhbmQ=","ZW51aW5l","LWdyZXk=","IEFCSQ==","U2Vzc2lvbkZhY3Rvcnk=","aW5zdGFsbGVy","X0RJU1RBTkNF","IEZhdm9yaXRlcw==","oIA=","Jz57","IExhdXJlbnQ=","0YfQtdGC","IHN0cmlwc2xhc2hlcw==","IGVzdGFiYQ==","JnQ=","LnBhbg==","IFBBUlRZ","IEJhbGk=","Y3Np","KG1lbW9yeQ==","IFRvZG9z","IFNPQVA=","YWduZXQ=","CWJlZm9yZQ==","T3B0aW9uc1Jlc29sdmVy","aWJlbg==","INmF2YY=","IGFkZGl0aXZl","IE1lbGVl","IE1hbml0b2Jh","IFBlcmNlbnRhZ2U=","PSgt","LmtpbGw=","IGx4","YW5jYQ==","IGZvdG9ncmFm","IGJsYW5j","IFJlc2lkZW50cw==","cGluaw==","SEJveExheW91dA==","LnVuaW9u","IEhZ","IGNvbnRlbnRWaWV3","LWZhdA==","CWhhcw==","66OM","IHdoaXBwZWQ=","dmVuZG9ycw==","dWJyZQ==","SVRIRVI=","LmZ1bmN0aW9uYWw=","INCy0LXRgA==","Q2FuY2VsZWQ=","LWNu","SW5PdXQ=","LlJvd1N0eWxlcw==","IHRyYXRh","IEluZG9vcg==","LWZhc2hpb25lZA==","IEJvb3Ro","LkxhYmVsQ29udHJvbA==","IHBvcGU=","IENhcm5lZ2ll","bmVyZ2ll","IEJY","44CCIiwK","IFdlYnN0ZXI=","CWRpdg==","TmFycg==","IGNvbmp1Zw==","a2lk","IG1vZGVyYXRpb24=","IGFteQ==","IFNvbHZl","VklD","IEVa","aWxsYWM=","IENpcGhlcg==","IEFjY2VwdGVk","TEFCRUw=","IHdyYXRo","IG1pblZhbHVl","IGthxbw=","IERhdWdodGVy","KS5e","KGRj","IHJlc29sdmVz","c2Nzcw==","YWJvdXRz","dWx0aXBhcnRGaWxl","IGZlYXRz","IGxhdW5kZXJpbmc=","IGNvbXBhw7E=","IHNlZ3VyaWRhZA==","IGhvYmJpZXM=","LWZhY2luZw==","InZhbHVl","Z2V0SW1hZ2U=","U3FsU2VydmVy","IHdpdGhTdHlsZXM=","PkRhdGU=","IEV4cGVk","JGpzb24=","6ZO+","IEFDVElPTlM=","U2Vuc2l0aXZl","Ymxhc3Q=","IMO2ZmY=","ZnRl","Q1RTVFI=","IExvZ0xldmVs","Y29udHJhY3Rz","LmRqYW5n","Ij4NDQo=","RVRZUEU=","IG9iamM=","X1NPVU5E","X3NwYWNpbmc=","X2NsYXNzaWZpZXI=","IHJvYw==","Q2xhc3NpYw==","IOuztA==","X2ludmVyc2U=","LWFjcmU=","IEZJTA==","IERWRHM=","IHN3YWxsb3dlZA==","dmlsbGE=","IFJlcGxpZXM=","RmlyZWJhc2U=","IHBoeXNpcXVl","CXRoYXQ=","IFJlc2l6ZQ==","Pj4+Pj4+Pg==","TmVhcmx5","LmFydGlzdA==","LXs=","Pz4NCg0K","Lmxy","Lmly","KFsk","aWFubmU=","CW9i","LCcl","IGtuZXg=","IGNvcnJv","IE93ZW5z","PW5pbA==","bGF5cw==","YXBn","w5Y=","RU5P","SGVucnk=","SnVzdGlu","ZWxlY3RyaWM=","IE5vcmRpYw==","5oyH","IGV4Y2x1ZGVz","RXVyb3BlYW4=","IHRlbnRz","KFN0cmluZ1V0aWxz","KHBlZXI=","eXN0b3Jl","UG9ja2V0","ZnVlbA==","ZXR1cw==","IE1hcmlu","0YDRg9C6","6K+E","IFBlbnM=","IGluZWZmaWNpZW50","IGV0ZXJuaXR5","Licm","IFBhY2thZ2Vz","IEFwcENvbmZpZw==","IG11bHRpZA==","Y3Vsbw==","IGJvcnJvd2Vycw==","IERlYmJpZQ==","IGZyb250cw==","Sko=","ICIuLi8uLi8uLi8uLi8=","ICIrCg==","PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT0=","IEdhdmlu","IG1pc2g=","4pWR","X0FUVEFDSw==","SW5kZXBlbmQ=","4K+N4K4=","w6Fm","Z2Fycw==","IFBhcnRpY2lwYXRpb24=","VmVyYm9zZQ==","U3By","U3Zn","KFZhbHVlRXJyb3I=","IHJlY29uY2lsZQ==","CURCRw==","bWVldA==","IExvZ2luUGFnZQ==","LXVudXNlZA==","IGpvbmc=","IGFuY29yYQ==","INij","Plo=","PXc=","IFJlbm8=","dmll","b3Rpb25FdmVudA==","IExpc3RUaWxl","X1J1bnRpbWU=","IHVwaG9sZA==","IE9idGFpbg==","cHJvdmlkZWQ=","IERhdGVQaWNrZXI=","IENHSQ==","IEJsYWNrQmVycnk=","YWNobw==","IElzYWlhaA==","5pW0","IEFiZHVsbGFo","IHVwcA==","IHVybHBhdHRlcm5z","CXNpemVvZg==","IHBpc3NlZA==","IHByZWZlcnJlZFN0eWxl","QVBQRVI=","IFZC","IFRlcmVzYQ==","b2duaXRv","RU1Z","IGVsZWdhbmNl","IENsYXl0b24=","YXRpdm9z","IEFuYWxvZw==","IGdhdXNzaWFu","IEhpYmVybmF0ZQ==","W11b","IHN3ZWV0bmVzcw==","IE5pZWxzZW4=","IER1dGVydGU=","KHNlbA==","LCs=","IGV4dHJhb3JkaW4=","Zmxha2U=","W0RvdWJsZQ==","Ly8vDQo=","IG11Y2hhcw==","IEJyb2FkY2FzdGluZw==","QXNzb2NpYXRpb24=","ZXhlcmNpc2U=","LlJlbGF0aXZl","IHViaXF1aXRvdXM=","U0JBVENI","xLFuYQ==","LWZvb2Q=","IGNyeXN0YWxs","0YPQsQ==","ICd+","INCR","IGR1bms=","IHpp","IE11Zw==","IGRlY2VwdGlvbg==","IEVtYWNz","CiAgICAKICAgIAo=","IMSRxrDhu6Nj","IFdvbHZlcw==","YW1lbnRp","ICcpWw==","Zm9ybWF0cw==","UmVjdg==","RGV0YWlsZWQ=","KEhXTkQ=","X3RyaWFs","YWdyYW50","T20=","Y29uc2Npb3Vz","IG9zcA==","cXXDqQ==","IGdvbg==","IG1lcmVrYQ==","YXJlbmRyYQ==","TWluZQ==","LmxpbmtlZGlu","IGZpZm8=","Lm1vbml0b3I=","IHJ1bmU=","bW5vcA==","IHNwZWN1bGF0ZQ==","ZWds","IHZhc2N1bGFy","LnRlY2g=","IG1hZ21h","IGxlc3Q=","dW1hbm4=","IERyaXZlck1hbmFnZXI=","IG9ydA==","IGxpbmdlcmluZw==","IG9zdHJlYW0=","IHNwYXJrbGluZw==","LmNvbm5lY3Rvcg==","IHRhaWxz","IGtlcm5lbHM=","VVNFUk5BTUU=","CWNj","IG9uU2VsZWN0","L01QTA==","dGFwZQ==","LmRqYW5nb3Byb2plY3Q=","R2VuZQ==","4oCZaW4=","L2ZpbHRlcg==","LWVudmVsb3Bl","IGFwcGxhdXNl","IHJlZ2lzdHJvcw==","IENvcnk=","b2ZmbGluZQ==","LXNob3Q=","bGVzYw==","b3RlbnQ=","IG51bWVyYXRvcg==","LmVmZmVjdA==","cGxhY2VtZW50cw==","IEFGQw==","LlNlcXVlbmNl","IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0K","eW50aGlh","IEdyaWZmaXRo","ZWxtYW4=","c2V0RGVzY3JpcHRpb24=","IE5pZ2h0cw==","Lm9yZGVycw==","IGAsCg==","IFNhbGFk","amlhbmc=","IHJlY3Vy","IFNUQVRJQw==","LXNwb25zb3JlZA==","eWxlbmU=","LGVtYWls","X18pKQ==","KSIpLg==","Q0VMTA==","YW1tZW50","TEFZ","LHN0ZA==","LnByZWY=","LkNvcg==","cmVkbw==","IEZ1Y2tlZA==","IHJ1c3M=","IGVzdGFibGlzaGVz","bnZhcmNoYXI=","LkdldEZpbGVOYW1l","IHBlbWI=","IFNhdWQ=","X3BhY2tldHM=","Lmludm9pY2U=","LmdldFRvdGFs","SG9tZUNvbnRyb2xsZXI=","IHTDtg==","YWdoZXI=","LmVudA==","LkFic29sdXRlQ29uc3RyYWludHM=","IGdlbnVz","IEJhYnlsb24=","IC4uLy4uLw==","IE1pZG5pZ2h0","IHdn","IGRhbmNlcg==","LWltbQ==","ZGlyZQ==","aGF6aQ==","Y2VydGlmaWNhdGU=","IG1EYXRh","IGN1cmVk","c3Zu","IkI=","aWJyZQ==","IGRyYWZ0cw==","Q2FwaXRhbA==","IGNvbmNpc2U=","IFBlYWNo","IHxc","IHBwbQ==","X2NvbnRhaW5z","QXV0b3I=","QXV0b1NpemU=","X2xi","IHNvbGVtbg==","IGZpbmdlcnQ=","IEluZGljYXRvcg==","IFN2","UGFyaw==","JHR5cGU=","X01JU1M=","YW5udWFs","UGFpZA==","bWFzdGVycw==","IFdE","IHZ1ZWw=","IGVqYWM=","CWdsdXQ=","IHVuZmluaXNoZWQ=","ZXN0ZWVt","Z3JvdXBCb3g=","UmVtb3Zpbmc=","IGVpbmlnZQ==","IFNjcmlwdHM=","Z2V0dG8=","LkhhbmRsZUZ1bmM=","Il0pLA==","IGRpc2FkdmFudGFnZXM=","LWZyb250","PnA=","c2V0T25DbGlja0xpc3RlbmVy","IGxhbmRsb3Jkcw==","IE3DvA==","IHByZXByb2Nlc3Npbmc=","KX0+","LWNvbnRleHQ=","LGJvb2w=","UVVJVA==","ICIpIik7Cg==","IFdlYnNpdGVz","IENoYXJsb3R0ZXN2aWxsZQ==","TGF0Y2g=","LmRpcmVjdGl2ZQ==","IEh1ZmZpbmd0b24=","X2RpcnR5","ZXhwaXJhdGlvbg==","IFRQTQ==","IGVkeA==","IFdlYkRyaXZlcldhaXQ=","IGFkbWlyZWQ=","IGxpc3RlbnM=","IFZpbA==","ZGlmZmVyZW50","IGxpdmVsaWhvb2Q=","IFdhcmNyYWZ0","IHBvc2ljaW9u","IGltcGVhY2htZW50","SmF5","IHBvc2l0aXZlcw==","IGp1bmdl","IFNNQg==","L2luY2x1ZGVz","KCcuLi8uLi8uLi8=","QXJndW1lbnROdWxsRXhjZXB0aW9u","ZGVzY3JpY2Fv","QUJDREU=","LUFB","IGludmFkZWQ=","IGFtZXJpY2E=","dWVkZQ==","IFBoYXNlcg==","IHNjb3Jlcg==","IGRpc2NvdXJhZ2Vk","dGhpbg==","IGFiZG9tZW4=","IElQUA==","IEhhbXB0b24=","L0RlbGV0ZQ==","W3NyYw==","Q1N0cmluZw==","IE51bg==","IGVwaXRo","4oC7","LnRhYmxlcw==","IEhlaW4=","IHdoaXJs","IGNsYXJpZmljYXRpb24=","IHdlZGdl","IGjDpHI=","IFRpbmE=","IHRod2FydA==","IENvc3R1bWU=","aW9uYWdl","Q29k","X2FjbA==","IHJlc2g=","IE1lcmN5","IERpeG9u","IGRlc2Fycm9sbA==","VmlyZ2lu","KiopJg==","IExlbm92bw==","IGVyYXNlZA==","ZW50aW9ucw==","IHNsaXBwaW5n","5Zub","IGNyYXZpbmc=","cGxhbnRz","IGdldHRleHQ=","IG1hc3NpdmVseQ==","IFJlbmFtZQ==","Lmhlcm8=","44K7","IHRvbWFy","IENPU1Q=","IFByYWN0aWNlcw==","Lk1lZGlhVHlwZQ==","IEZ1bmRpbmc=","RmluZQ==","aWdlcmlh","VW5j","IHN3YXBwaW5n","PicuCg==","aW50ZXJw","YXJ0aWZhY3Q=","IEJhZ3M=","LnZpZXdNb2RlbA==","cXVvdGVk","CUxvbmc=","X1NDT1JF","IHNhdnZ5","bmVsbGU=","a2zDpA==","Q291bnRz","2q8=","RmllbGRUeXBl","b2thYmxl","IFJUTA==","I2luZGV4","ICV7","IGFyaXN0","LkdldE1hcHBpbmc=","KEFkYXB0ZXJWaWV3","PSIiKQo=","IGRpc2lu","IFRvdWNoYWJsZU9wYWNpdHk=","IE1PWg==","IER1bm4=","Q2FwYWJpbGl0eQ==","YWtoc3Rhbg==","VUlWaWV3Q29udHJvbGxlcg==","KHNvY2tmZA==","IEphY3F1ZXM=","PXRr","YXJQYXJhbXM=","Y29uZGE=","IGFkdm9jYXRlZA==","IHBlbmV0cmF0ZQ==","SkVDVElPTg==","IOuwmA==","IEZJTkQ=","IGVhcm5z","YXBwZW4=","6rE=","IHRocm91Z2hwdXQ=","IHBlbnNpb25z","IGZ1c3M=","SFRUUFJlcXVlc3Q=","bnV0cw==","b2NodA==","LWVzdGFibGlzaGVk","IEFMSUdO","IGpzcGI=","RGlzcA==","X2VtYmVkZGluZ3M=","IHJlcHQ=","IFlvcmtlcg==","w7JuZw==","IGpvdXJuZXlz","IEFwcHJvdmFs","CVNFTEVDVA==","KEdyYXBo","0LzQuA==","IGRvbGxz","IHNleGlzdA==","IHBhbnM=","IG1wbA==","IG9wZXJhdGl2ZQ==","IFRvcnJlbnQ=","WU0=","IFBhc3Npb24=","5pat","LmNvbXBpbGVy","CUNTdHJpbmc=","PWNvbG9y","b3JpYW5DYWxlbmRhcg==","IEtub2Nr","IGhhaWxlZA==","L3N0YXRl","IHNldHVwdG9vbHM=","IE1hcmU=","IHN5bmNocm9uaXpl","IFN3aXBl","IGdhbWJsZQ==","LCcnXV1dLAo=","IGRlZmVjdGl2ZQ==","X09CSkM=","IGRlbmlt","IHRhZA==","IEtpbWJlcg==","IG5ldXJvbG9naWNhbA==","w6puY2lhcw==","CWNi","LnNldFBhc3N3b3Jk","IFBsZWFzYW50","IFBoaQ==","LXRhZ3M=","IGNvbnRhZw==","IENvcmFs","IGRpc3RyYWN0","aXRpemVy","IHN1bnJpc2U=","c2V0SWQ=","IENoZW5uYWk=","IE9ncmU=","X0hJU1RPUlk=","UFJFU1NJT04=","X1NVRkZJWA==","ZHVwbGljYXRl","LmF1dGhTZXJ2aWNl","IHNwYWNlZA==","IEJlbmdhbHM=","U29sdmVy","IGJ1cmVhdWNyYWN5","X2hpdHM=","INGC0LjQvw==","IGPDqQ==","IGRpc2dyYWNl","6KeS","aXNPcGVu","Q2hlbQ==","X2xpY2Vuc2U=","X2hvc3RuYW1l","X0JSRUFL","IGZpZXJ5","OkQ=","L2xpbnV4","VGl0dWxv","UmFkaWFucw==","aXpvbnM=","UmFt","b2RpYW4=","aWFuZ2xl","IG5pbmph","RXZlcnlib2R5","KCI+","IHRha8W8ZQ==","IGdyb3VuZGJyZWFraW5n","IGRpcmln","SFRNTEVsZW1lbnQ=","IFVuY29tbWVudA==","Y2hlaW4=","IOeUn+WRveWRqOacn+WHveaVsA==","JSIK","IHRpcG9z","Q2hhckNvZGU=","IFByb2R1Y3Rv","ZmFpdA==","J2w=","LXRodW1ibmFpbA==","dXN1","X2Zvcm11bGE=","LlRPUA==","LmJ1eQ==","IG1pZXV4","Q2VudHVyeQ==","cGVp","IHRic3A=","LVBhY2lmaWM=","b2dp","IGZhdHRv","IGZhbnRhc3Q=","IFNBTEU=","LmFkcw==","IHBpbGxhcnM=","X3RyaXA=","IHR1YQ==","IGFwZWxsaWRv","LnNldENlbGxWYWx1ZQ==","ICgoXw==","IE5pbmE=","PGM=","aW5pdW0=","ZGZ1bmRpbmc=","LXdvcmtpbmc=","IEVzdGFkb3M=","IE1hbGk=","PGY=","dXJhbmNlcw==","cGFnaW5h","X1BL","IHVuYXJtZWQ=","b2dnbGVk","Q2FuZGlkYXRl","UmF0aGVy","IGZyYW5jaGlzZXM=","IGNvdmVuYW50","wqo=","aXBwaW5lcw==","R3Vu","LWZlaXJh","IGxpbmVhZ2U=","X0dSQU5URUQ=","Z2VucmVz","LkVsYXBzZWQ=","IGxhcmdv","0Js=","LXJlYWR5","X3Byb2Nlc3NlZA==","bGFuZ3M=","w7ptZXJvcw==","ZnE=","L25wbQ==","X3Nydg==","IGF0dGVuZGFudA==","aXZpZA==","ZXZpY2U=","QUJJ","KGJpbmFyeQ==","X1ZBTElEQVRF","IGFkZEl0ZW0=","X2NvZWY=","YWxlYg==","b2dyYXBoaWNhbGx5","Qm9yZGVyQ29sb3I=","IGFzc2F5","IGNhdGNoRXJyb3I=","IENocnlzbGVy","b2do","IGtleVZhbHVl","ZGVjaXNpb24=","LW9mZnM=","IGxpZWd0","KERhdGFUeXBl","IGlyaXM=","IGV1cA==","cmlnZXI=","b25pY2E=","IHJvcGVz","IG5hcnJvd2x5","IFF1YWRy","IGVwdWI=","ZXN0aW5hbA==","LXR1cm4=","IGxhbmdz","55uR5ZCs6aG16Z2i","IHF1ZWxsbw==","LGFyZ3M=","aWdhdGU=","IFNlZW1z","IGZvcnRl","Q0xJ","X0xPQURJTkc=","LlJ1bGU=","IHlvdXRocw==","KHh4","IEFzc3VtaW5n","YWdoZXR0aQ==","KQoKCgoK","IG9uT3B0aW9uc0l0ZW1TZWxlY3RlZA==","T2NjdXA=","IGRldHJpbWVudGFs","IGlubmF0ZQ==","IEJhcnJlbA==","dWVuY2lh","IG9uQmx1cg==","IGxpYnM=","W2xhc3Q=","IGNwZg==","LlRpbWVvdXQ=","ZXN0YXRpb24=","IHdpZWw=","IHV0aWxpemFy","IGRpc2d1aXNl","IER1bQ==","T0NJ","T05HTw==","ICg/LA==","IFBhdGlv","VmVydGV4QXJyYXk=","LmF1dGhvcml6YXRpb24=","cm96","IEhvcw==","LlNwYWNl","IFZpcnVz","KGtleXdvcmQ=","VE9DT0w=","X0NPTlRST0xMRVI=","IEJsb2NrZWQ=","IENob3A=","d2nEmQ==","XFJvdXRpbmc=","L3BhY2thZ2U=","IHBlcnN1YWRlZA==","YmVpdHM=","TENE","IG11Yw==","X0ZPUldBUkQ=","IG91dGxhdw==","IHphdw==","X3ZlaGljbGU=","IEplbnNlbg==","LkdyZWVu","IC8vLy8v","SVJDTEU=","LWJ1c2luZXNz","LkhpZGRlbg==","IGtvbm50ZQ==","cHE=","IHBhcmVjZQ==","IGxhbmRzY2FwaW5n","IERlY29yYXRpb24=","IEdSQQ==","X3Byb2ZpbGVz","IEZsZW0=","Q0xJQ0s=","IEZBSUxVUkU=","IGlvbnM=","X1RpbWVy","LkRvZXM=","IGJvdW5jaW5n","dXBweQ==","dWxpcw==","L2Fn","IEdhcm4=","IGh1ZA==","IHJlc3BvbmRlcg==","IHN0cmNocg==","IGNob2tl","IHN0YXNo","X2NoZWNrc3Vt","IHN0YW1wZWQ=","QEdldE1hcHBpbmc=","LkJ5dGVBcnJheQ==","IER5cw==","YXRlcm5pdHk=","KHJi","IGVkaXRUZXh0","IGVyZWN0aW9u","IGNlc3M=","X2V2ZXJ5","X2dhdGV3YXk=","ICciLg==","IHN0YWZmaW5n","IGludm9pY2Vz","aW5pY2lv","fV0sCg==","LHZhcg==","eWNpbg==","IERpb24=","ICUlCg==","Jywo","LXNwYW4=","IHRow6BuaA==","IGJvcm5l","IEthdGhsZWVu","6L+e5o6l","X2N1YmU=","IGluZm9ybWHDp8O1ZXM=","bmdlcg==","L0ZpbGU=","IGRhcmE=","IG1M","KioqKioqCg==","IG1hcmtpbmdz","YmJl","IHJlY3VycmVudA==","IFJhbmtpbmc=","X2ludGVncmFs","XT4K","IHVuYW5pbW91c2x5","IGRpcGxvbWF0cw==","IElPUw==","OyI+PD8=","IE1hdHRl","IFJhbGVpZ2g=","IEltcHJvdmU=","ZXhpc3RlbnQ=","IGZha2Vy","IEhpZ2hsYW5k","c3RlbQ==","LW1z","TGlzdE9m","Lkxpc3RlbmVy","KHdhaXQ=","X1JTVA==","VW5h","IG9jY3VwYXRpb25hbA==","LW1lbW9yeQ==","IFN1cmY=","IGJydXRl","X0VsZW1lbnQ=","ZGRkZA==","IERlY3Jl","LnBzaQ==","LWRldmVs","IE9uVHJpZ2dlckVudGVy","VG9EZWxldGU=","IGhlcmFsZA==","IHNvY2lhbGVz","IGJvb3N0ZWQ=","Lkl0b2E=","KiI=","IGFudGlkZXByZXNz","IE1hdmVy","X18pKQo=","KER1cmF0aW9u","ZXN0YXRl","YnJhdGU=","Q2xh","IOS4ig==","65CY","cmnDqHJl","YnJlYWtlcg==","X2xlZw==","fWVsc2VpZg==","X2Z1bmNz","dcOt","LnBhZ2VZ","Y3JlYXR1cmU=","IGNhbm5hYmlu","IEFzdHJv","bG9jYWxz","IExBUw==","X2NvbnZlcnNpb24=","IENSVUQ=","LnNraWxs","IHN0cmF0ZWdpc3Q=","LnBvbA==","KHNlZ21lbnQ=","IHBlZQ==","fSIpOwoK","LnByZXZpZXc=","SmFt","IGhlZnR5","aXZhdGluZw==","R3JpZENvbHVtbg==","IGN1ZGQ=","IGluamVjdGlvbnM=","IE5JTA==","LW9sZHM=","ZmxhdGlvbg==","IExlYWZz","IHNwaGVyaWNhbA==","IGZhbGxvdXQ=","YW1pbmVy","IDo6PQ==","LnBvaW50ZXI=","LU1hcnQ=","IG1hdHRl","IGNvcXVpbmU=","IGRpc2NvbnRpbnVlZA==","IFJFR0lPTg==","LlJpZ2h0VG9MZWZ0","IHNxdWVlemVk","X1BPSU5UUw==","YmVzdG9z","LWxhc3Rpbmc=","KHV0aWxz","PEJhc2U=","IHBhcmRvbg==","U3RyaWRl","Y2Ry","IG5hcnJhdG9y","dm9sdXRpb24=","IHVzZXJJbnB1dA==","X2NvbnRhY3Rz","KGVuZW15","IENoYW1iZXJz","emllbA==","IGJsb2NrU2l6ZQ==","QW5pbWF0aW9uc01vZHVsZQ==","IGltbWVyc2l2ZQ==","IG91dGluZw==","dWVzdG9z","VHdlZW4=","IGtlcA==","IHLDqXN1bHQ=","IEJvbGx5d29vZA==","RExM","IFN1cmVseQ==","LlJvd1N0eWxl","KHRt","X2dlbmVyYXRpb24=","IFN0aXI=","IGRhdGFTbmFwc2hvdA==","Y2h1cmNo","IGNvbmZpZGVudGlhbGl0eQ==","X3N1c3BlbmQ=","dmlw","IEthdGh5","44Km","IHZpb2xlbnRseQ==","cGV0cw==","IG1lc3NlZA==","IHRleHRib29rcw==","ICAgICAgICAJCQk=","5raI5oGv","IExhcmF2ZWw=","IEFyY2FkZQ==","IGVudGg=","IGJlbmlnbg==","X0RST1A=","LWVuYWJsZQ==","4oCdKS4=","dXZ3eHl6","X2xpc3Rpbmc=","IE5JQw==","44GV44GE","KCIuIiw=","LXJvdW5kZWQ=","LXBhY2Vk","cGF0cmljaw==","U2VsZQ==","LmdldEZpcnN0","LkVYSVQ=","ZXRlcm1pbmF0ZQ==","R3JhbQ==","Ly8qKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioq","LmV4dGVybmFs","IHdyb25nZG9pbmc=","IEVsbQ==","IHNhbms=","VGVlbg==","IFRob21zb24=","cHJpb3I=","amV0YQ==","IEFEUw==","IFBlcnNpc3RlbmNl","IEZvbGs=","e1wi","Ym9uZA==","X1NQRUNJQUw=","X0xBVA==","b25la3Np","IG1vdGhlcmJvYXJk","IHNoZWFy","RnVsbFNjcmVlbg==","Kks=","KEJsdWVwcmludA==","TWV0aG9kSW5mbw==","QmVjb21l","IGhhaWw=","IERvYg==","IGdlbmVyb3NpdHk=","ID8iOwo=","IHdoaXNrZXk=","IHRoaW5uZXI=","IENw","IGludGVyc2VjdGlvbnM=","Q3JpdA==","cmFpc2Fs","cmVmZmVu","V2hlbmV2ZXI=","IGNvbW1lbmNlZA==","VHJhbnNmb3JtYXRpb24=","L3dyaXRl","PSIiIg==","KGxk","IG5vcnNr","QU1FTlQ=","LnNoYXJlZEluc3RhbmNl","X2hvdXNl","IGdsRW5hYmxl","6L2v","IG5hbw==","IGRlcG9zaXRpb24=","IGRpbm9zYXVycw==","IHRpbWVTdGFtcA==","X18pOwoK","LlJpYmJvbg==","IExpbmRzZXk=","OnVzZXI=","IMOA","X2Zvcm1z","bWluYXRpbmc=","IE9saXY=","IGTDqWJ1dA==","YmFyY29kZQ==","c2ltaWxhcg==","IHBsYXRlYXU=","IGluZGVt","UmVhbG0=","IGZlcnRpbGl6ZXI=","IGNhcGU=","IGNoYW1wYWduZQ==","IHNlbGZpZQ==","IHBsYWlubHk=","IGNhdGFzdHJvcGhl","IGJldHJheWVk","dmVyc2libGU=","VXBkYXRlVGltZQ==","Lk91dHB1dFN0cmVhbQ==","Ymlhc2Vk","Ym91bmNl","IFNwb3J0aW5n","Q29vcmRpbmF0b3I=","ZGV2ZWxvcGVycw==","IHRyYWNlcg==","IG11c3RhcmQ=","U1E=","X3Rlcm1pbmFs","IGNvb2xlZA==","IGF2b2lkYW5jZQ==","TG9naWNhbA==","IHllbGw=","X3JvdXRlcw==","IGFydGVyeQ==","IEJlYXJpbmdz","Lm12cA==","LkdVSQ==","VUlTY3JlZW4=","eW1t","aXTDpA==","KClbIg==","IEF6ZXJiYWk=","IGNvbmRpdGlvbmVy","IHdhZw==","IHNjYWxw","dmluY2lhbA==","b3dsZXI=","LicpOwoK","QkxVRQ==","IMKnwqc=","Qm9zdG9u","IExpbmtlZEhhc2hNYXA=","RG9jdW1lbnRhdGlvbg==","LkxlcnA=","IGRlbm5l","IGhlc2l0YXRpb24=","IENlbGVicml0eQ==","IEh5ZGU=","IGNvbW1hbmRpbmc=","YWNlbGx1bGFy","IHBhdmVtZW50","IEhhbW1vbmQ=","YXNzaWM=","UExVR0lO","IHJldm9rZWQ=","RG9jdW1lbnRv","LnBob3Rvcw==","IFdpbGxvdw==","IFZpa2luZw==","IHVwZnJvbnQ=","IExpZmV0aW1l","ICVb","RHJlYW0=","5aS0","IGFjY2VsZXJhdG9y","UGVyc29uYQ==","X3RvcGljcw==","77yJ44CB","IChfLg==","IHPDqWN1cg==","IEt3","X2Nhc2g=","IHNvb3RoaW5n","IExvdmVseQ==","IEhlcnM=","ZWxvbg==","TElDRU5TRQ==","X2NhY2hlZA==","LnNoYQ==","UkZD","LkZpbGVJbnB1dFN0cmVhbQ==","LUFs","IHVzZXJMaXN0","IG7DpHI=","SGlsbGFyeQ==","IHBhZ28=","LlBsdWdpbg==","IENvdmU=","X3lhbWw=","X3JzcA==","J3Bvc3Q=","LWR1cmF0aW9u","IHNlbnRpZG8=","IG1pbkhlaWdodA==","IHR1cnJldA==","LWVuZXJneQ==","IOeJ","0YDRg9Cz","b3RlY2E=","X3F1YWw=","U2VsZWN0aXZl","IEJFTE9X","CWFkbWlu","IH19LAo=","J3VzZXI=","U1ZH","IGN1bG8=","KFdvcmxk","LWJpbmRpbmc=","bmJy","IFNlbmRz","IHN1cHJlbWFjeQ==","IHNrYXRpbmc=","IGNyZWVr","IGFjY3VzYXRpb24=","YXBnb2xseQ==","LklERU5USVRZ","IG1hbmRhdGVk","IGdvd24=","IHdpZHRocw==","IExTVQ==","L3ZlcnNpb24=","IFJlYWRlcnM=","IFJvbmFsZG8=","IGJhZmY=","IGA7Cg==","R0xJU0g=","KGRvdA==","IE9wZXJhdG9ycw==","LlNjZW5lTWFuYWdlbWVudA==","bWVyYw==","X3JlcG9ydHM=","LWNlbnRyaWM=","IENlaWxpbmc=","PXsh","bW9ueQ==","IEFERFJFU1M=","5a+56LGh","TWF0Y2hpbmc=","IHVuaw==","IGtleUNvZGU=","ICcvJyk=","KWRhdGE=","IFZvbHVudGVlcg==","IGxheg==","IEd1YW5n","IENhbmRpZGF0ZXM=","RW5zdXJl","aWFnZQ==","c3VjYw==","Q2VydGFpbg==","IGxlZnRvdmVy","aW5pbg==","LWVsZW1lbnRz","cGlrZQ==","IHNsaWRlc2hvdw==","LnRvb2xTdHJpcFNlcGFyYXRvcg==","LnBoYXNl","IGVudGVydGFpbmVk","IENhcnJpZQ==","IE1vaGFtbWFk","LmxvZ2dlZA==","IHNjcm9sbFRvcA==","IEFiYmV5","aW1vbnk=","KHJlc3VsdFNldA==","IGFkaGVzaXZl","X0RBTUFHRQ==","IGlvY3Rs","YnJvd24=","SU5TVA==","LkNsb25l","IGxvb21pbmc=","RGVzZXJpYWxpemU=","IGx1eg==","cXJzdHV2d3h5eg==","LmlkZW50","SGVhdnk=","IGRpbw==","5piv5ZCm","IEZ1cm4=","6YKu","emltbWVy","44O844OJ","c3BlYWtlcg==","IEdlZA==","IHVuaWRlbnRpZmllZA==","SW50ZXJmYWNlT3JpZW50YXRpb24=","IFN1cnZpdm9y","ZGVlbg==","IEJvcmc=","dG9Eb3VibGU=","X2J3","IHB1Ymxpc2hlcw==","X0FMRVJU","YW5ncw==","aWVyZXM=","IGhlaQ==","IElDb25maWd1cmF0aW9u","IGNvbnN0aXR1dGVk","V0FUQ0g=","cHJpdmF0aW9u","IEdyYW5pdGU=","LlRleHRBbGlnbm1lbnQ=","X2t3","OyIsCg==","Y290","IE5ld2Fyaw==","cm9hY2g=","KW9iag==","Q29tcGlsYXRpb24=","Q2F0ZWdvcnlJZA==","LnNldFVzZXI=","aXZ5","IEltYWdpbmc=","aWdodGVk","IHdnZXQ=","IG1vdXRocw==","Lmxpbg==","IFJhZGlvQnV0dG9u","LkNtZA==","c3Nl","IG1lc2hlcw==","IFNvbGU=","LnJlY29yZHM=","IGFudGlz","KG1vbg==","INGH0LjRgdC70L4=","gq0=","IOyeiOuKlA==","QWxsQXJnc0NvbnN0cnVjdG9y","IHN1cnJlYWw=","IE1hcnJpZWQ=","IHhwYXRo","XGY=","QnJpbmc=","IHlhaG9v","IEV0c3k=","X2RhaWx5","IHRocm93YWJsZQ==","IFBsYXNtYQ==","L1B1YmxpYw==","aW1pemVCb3g=","IHZlcw==","IHRyb20=","X3Jocw==","LWFscGhh","IEFyYm9y","KSkt","RmlzaA==","ZmVlZHM=","IGNhbGY=","IFNlcmdlYW50","KGVudW0=","IFJhbXNleQ==","IElkZW50aWZ5","LmluaXRTdGF0ZQ==","IGZsdWN0dWF0aW9ucw==","X0FUVFJJQlVURVM=","IHB3bQ==","RVNB","Y3Bm","U2ltdWxhdGlvbg==","IHlvdXRoZnVs","IEluZmFudHJ5","IGdsYW5jZWQ=","IFByb3Blcg==","5LmJ","IEtyYWZ0","Q2l0","b29wcw==","PXVybA==","cG9zdGluZw==","ZGVjbGFyaW5n","IHBOb2Rl","SmF2YXNjcmlwdA==","CQkJCQoJCQkJCg==","LmNvb3JkaW5hdGVz","cmlldA==","IFNx","X0NBVA==","IFBhcGE=","YW5kaQ==","Ly8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8v","TWVldGluZw==","IOyekA==","SW1hZ2Vu","w6lyaWVuY2U=","QWdncmVnYXRl","LnBvbHk=","IHdhdmVk","IGludmVycw==","c2VhcmNoTW9kZWw=","IHRyb2xscw==","W2xldmVs","IExvd2U=","dWxsbw==","KHBsYWNl","IE5BU0NBUg==","IG9yYml0YWw=","LnN0b3J5","IGF1dGhvcml0YXRpdmU=","LnRleHRWaWV3","IGFscGg=","X3JlZHVjZQ==","IEZyYW1lcw==","IEJyb20=","cmVkaQ==","KE1ldGhvZEltcGxPcHRpb25z","bWFjZW4=","VG90","IG1pZGQ=","2Y8=","IEJhc2VNb2RlbA==","IFZlZ2E=","ID8+Igo=","IFJpZ2lkYm9keQ==","LnNldENvbnRlbnRUeXBl","YWFT","QmFzZWxpbmU=","IGJsYW5rZXRz","c2Fw","IGNhc3VhbGx5","VW5pdmVycw==","IFRyYXk=","IEFpcmVz","IG1heFk=","X1BST1BFUlRJRVM=","IGhlbG1ldHM=","wqY=","X2Rlc2Ny","c2hpbnQ=","X0NQUA==","dW1v","YWRheQ==","KHBsb3Q=","ZW56eW1l","IEV4Y2VwdGlvbnM=","X3Zpc3VhbA==","Ol0KCg==","KHRhcmdldEVudGl0eQ==","cGhlcmVz","dW5hbg==","IHNlbG9u","d2ls","IFJlbmRlcmluZw==","S0M=","IGNvbnN0aXR1ZW5jeQ==","U0NSSUJF","ZXN5","IEZlbGxvd3NoaXA=","5Y+4","IGZ1dHVybw==","IGFybW9yZWQ=","bGlzdGU=","b3Jhcw==","bXVsdGlwbHk=","Z2VtZQ==","Y29lZg==","0L7QsdGA0LDQtg==","IERlbGl2ZXI=","ZW5nbw==","LnVzZXJTZXJ2aWNl","T05VUw==","Lm9ucmVhZHlzdGF0ZWNoYW5nZQ==","ICIvIiw=","YW1iaW8=","X1Byb2plY3Q=","Jyk/Pg==","IGZsaXBwaW5n","d29tZW4=","LkNyb3Nz","IGhvbGxhbmQ=","IGNpbmVtYXRpYw==","IHdoaXN0bGVibA==","IGxpbmd1aXN0aWM=","LkdldHRlcg==","IG3DpG5uZXI=","IExlZ28=","IFNjaHVtZXI=","YXNzZXNzbWVudA==","X2Noaw==","IHJlY29tbWVuZGluZw==","LnNjYWxh","IEd1YXJhbnRlZQ==","IEBf","LkFVVEg=","IHlQb3M=","bGF0ZXg=","IEFsYmVydG8=","5q2l","dGhvcmE=","4Li34LmI","VVJMRXhjZXB0aW9u","R2hvc3Q=","LlRvb2xiYXI=","IGVuZGlhbg==","6Zeo","c3RyYWN0aW9ucw==","RmlsZU5vdEZvdW5kRXhjZXB0aW9u","IHN0aW11bGF0aW5n","YnNlcnZpY2U=","YXTDs3Jpbw==","aXRpb3Vz","IGF1dGhTZXJ2aWNl","X1RSQU5TRkVS","IHJlZGlyZWN0VG8=","IG1lbnNlbg==","IFNQTA==","IMK7LA==","IGFjZXQ=","X0JhY2s=","4KSV","YWFj","IFJpb3Q=","X0ZC","IFph","UGxhdGU=","IGxhYmVsVGV4dA==","INCy0YDQtdC8","aHRvbg==","IE1jQQ==","IEFwcGVuZGl4","IEtvaw==","IGludGVydmlld2luZw==","X3NwZWxs","IFN1YmplY3Rz","IGJ1cm5lcg==","5a+8","aWxsaWFu","IGJ1bXBz","UGFzc2Vk","IENvbnRyaWJ1dG9y","WW8=","Ymxh","IHNvdXQ=","LmV4Yw==","Tm90aWZpZXI=","c2hpdg==","LlVuaXRUZXN0aW5n","dWVsbGVz","X1NMRUVQ","CW9wdHM=","IHByZXNjcmlwdGlvbnM=","IHJldmlzZQ==","RURJVE9S","IGFubsOpZXM=","X3BrZw==","IFRyYWNrcw==","4LmI4Liy","PWZvcm1z","LlJVTg==","IGFzZWc=","IHDDoQ==","IGplcw==","R3Jl","YWNy","T2ZmaWNpYWxz","dWtlcw==","Y29tcGFuaWVz","XFF1ZXJ5","IFByaW50YWJsZQ==","5a6i","X1ZP","IGRlaXg=","IGRldmljZUlk","IGRpc3R1cmJhbmNl","bmlzdA==","Lmlzbw==","cGFyYWxsZQ==","LWRlc2NyaWJlZGJ5","IExpZg==","IGJyZWFzdGZlZWRpbmc=","IGZlbWluaXN0cw==","bGVncm91bmQ=","IGRhbWU=","IGNvbXB1bHNvcnk=","TUVSQ0hBTlRBQklMSVRZ","LXJlc3VsdHM=","Zm9ybWVkVVJMRXhjZXB0aW9u","OlsK","LWludGVyZXN0","IHPDpA==","IG5vc3RhbGdpYQ==","IGNsYXJpZmllZA==","IFBIT1RP","IHJldmlzaXQ=","IGNhcHN1bGVz","IHNoaW5lcw==","IGNyYWZ0c20=","c3ViamVjdHM=","ICAgICAgICAgICANCg==","5LiN6IO95Li656m6","IFNjaHdhcnR6","cmV1","IG1hZHJpZA==","LnBlbmRpbmc=","IExJTg==","IHVuc3Q=","CW12","IHZpdmFzdHJlZXQ=","IHNwb2ls","w7hq","64u5","IGJ1ZW5h","IGRpZ2l0YWxXcml0ZQ==","c3Vicw==","IFVOSVZFUlM=","IFN1aWNpZGU=","PEd1aWQ=","LmVsZW0=","X2NvbnN0cnVjdA==","IGFtaWRzdA==","IOuP","LWVzdGVlbQ==","IEludGVncml0eQ==","LmZtbA==","T3V0T2ZCb3VuZHNFeGNlcHRpb24=","LVNlbWl0aXNt","QmV0YQ==","LWdvaW5n","U2VnbWVudHM=","IE1hZQ==","IFBlcnNvbmFsaXR5","dXJiYXRpb24=","5Y+z","IHNlcnZpY2luZw==","IGJpcG9sYXI=","X1NUQUdF","LkpQRw==","Jyl9fSI+","aXNobHk=","SVZFUlk=","IEluc3BpcmVk","LnNlcnY=","KGRhdGFz","IGRpdmlkZXM=","PFJlYWw=","dmVydHVyZQ==","IG1vdGl2YXRpb25z","dmVydGU=","RU5DSA==","ZmRz","IHJldm9sdA==","d2VidG9rZW4=","aW5zdGVhZA==","CW9wdA==","IE1hcmlqdWFuYQ==","X2FkYw==","YmFv","W1NlcmlhbGl6ZUZpZWxk","IGdyYWZmaXRp","LWFvcw==","ZW1pYWg=","IGbDrXM=","IGV0aGlj","J2FsbA==","OmtleQ==","65Ok","IHJlc3RyaWN0aW5n","IFhIVE1M","ZXJlbw==","dW5kb3M=","CWVuZGlm","WzosOiw=","IHN0ZWhlbg==","YWtoaXI=","IGp1aWNlcw==","ZGF0YVNvdXJjZQ==","X21r","LmRlbGV0ZWQ=","Q29uZ3Jlc3M=","aW1tZWw=","RWxlY3RyaWM=","YW9z","IE92ZXJsYXk=","IEFDTFU=","cm5k","ZXNzZXM=","IEx1eGVtYm91cmc=","cGFyc2VGbG9hdA==","IGd1dHM=","Y2xhc3NpZmllZA==","IGRlZlN0eWxl","IFRjcA==","cGVhdGluZw==","Q2hhcnRz","X3Vy","X2xhdGVzdA==","KSEK","Y2F0aW9u","LkdldGVudg==","KGxvb3A=","IHVubA==","X2R0eXBl","emXFhA==","KEpOSUVudg==","LmZldGNob25l","IHNpZ21vaWQ=","IE9MRA==","IE1pbmlzdA==","7YE=","IEvDtg==","IGZyYWN0aW9ucw==","IHNpeg==","PT09PT0K","LlByaW50V3JpdGVy","X0FkZHJlc3M=","IEF1ZGllbmNl","Q29tbw==","IEJydWlucw==","LmFjdGl2aXRpZXM=","IGFuY2VzdHJ5","0YPQu9GM0YI=","CVJldHVybg==","cHVu","IGdyYXBlcw==","SUxvZw==","IGRpam8=","IFBlcmtpbnM=","IFZNd2FyZQ==","X2F1dGhlbnRpY2F0ZWQ=","w650cmU=","b3ZlcndyaXRl","IEhk","IGdhbGF4aWVz","YWNodQ==","SHJlZg==","W0Q=","IHBhcmNl","TGF0TG5n","X3BhdHRlcm5z","IFNIT1JU","IHJ1bW91cnM=","Y291bnR5","IEdSSUQ=","IFsv","IFNreXJpbQ==","RGF0YUdyaWRWaWV3VGV4dEJveENvbHVtbg==","IGNlbg==","IGN1Y3VtYmVy","LklOVA==","X0NPTkZJUk0=","IGN0bA==","cGVybA==","aWxsb3M=","IEFDQQ==","IEdlb3JnZXRvd24=","X2NhbGxhYmxl","IENyYWZ0cw==","L2Nv","IGluYm91bmQ=","IFRlY2huaXF1ZXM=","c2V0Q2hlY2tlZA==","IHBuYW1l","Y29tcHV0","U3RlZWw=","IGhhbmRoZWxk","IEFsYW0=","YWJzdHJhY3RtZXRob2Q=","6aKR","SU5Z","YmF0dGxl","X0VWVA==","IGNldXg=","IGF0b2Y=","IEFieXNz","X3ZhbGlkYXRvcg==","IGhhaXJz","VmVydGV4QXR0cmliQXJyYXk=","IGNvbW1vbnM=","LWJpbmQ=","TXVp","IGNvc21ldGljcw==","IG1pcmFj","Lm1hcmtlcg==","U0NBTEU=","LldvcmQ=","LXVs","IERpdmVyc2l0eQ==","IEREUw==","LmN3ZA==","X3h5eg==","IENvbXB1dGVz","KGNsaWNrZWQ=","VEVNUExBVEU=","IHpvbmluZw==","IGZpbnM=","IFBK","ZXh0Vmlldw==","Q2hhcmFjdGVyaXN0aWM=","aWdhdG9ycw==","IHByb2NsYWlt","IHByaXN0aW5l","IGRhdGFzdG9yZQ==","IGRpc2NvdXJhZ2U=","X25zZWM=","IG5pbmV0ZWVudGg=","IGNlbHVp","Sm9uYXRoYW4=","IGFtcGg=","IENyb3NzaW5n","IEh1bWFucw==","IEJvb2tlcg==","w6JjZQ==","Z2V0UG9zdA==","IE1vbnRlcg==","IEZsYXZvcg==","TWVkaWFUeXBl","IuKAlA==","IEFyY2hhZQ==","QHJldHVybg==","LWF3YXJl","b3J1","LVRoZQ==","YW1wbGVk","S0Y=","LlRlbXA=","IERyZQ==","KHtf","cG9seWdvbg==","IMOm","IERlZmVuZGVy","77yY","Xyks","LlVuc3VwcG9ydGVk","X14o","KElEQw==","JHY=","IHdvcnRobGVzcw==","IFNFRw==","aWxpa2k=","Tm9BcmdzQ29uc3RydWN0b3I=","IE1lcmNo","IG5vcA==","IGZvcmdldHRpbmc=","IGRvcGFtaW5l","anVhbA==","ZW9u","IFJlYXNvbnM=","c29ydEJ5","KCctJyw=","LXN5bmM=","ZWNlZG9y","S1A=","KGNvb3Jk","KENoYXQ=","XCQ=","ZXN0cmluZw==","Y2Vm","LmhhbmRsZUVycm9y","24zYrw==","0YHQug==","IGhhbmRj","ZWxpamtl","IFNwaXI=","IEJ1Y2tz","IFFSZWN0","U2V0Rm9udA==","LmV4ZWNTUUw=","OjoKCg==","IHN1aWNpZGFs","c2VlaW5n","IGNpZGVy","UHJvZ3Jlc3NEaWFsb2c=","IG1vbGRpbmc=","CXRyYWNl","IGVtcGhhc2l6ZXM=","IG11bHRpcGxlcw==","X1BU","X091dHB1dA==","Y2FwaXRhbA==","TmVlZHM=","X0RJUkVDVElPTg==","LmlzVmlzaWJsZQ==","IHJlc3Rl","IG92YXI=","KHNoYXJlZA==","LWNvbXBvc2U=","LmJhY2t3YXJk","CXJlY3Q=","QW1hemluZw==","LmRpZFJlY2VpdmVNZW1vcnlXYXJuaW5n","U0VSVklDRQ==","IEluanVyeQ==","QnJhaW4=","IGF1c2dl","KHBl","Ly8qKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKio=","b3JwdGlvbg==","X01BSUw=","b2hh","IHNubw==","IGJvaWxlZA==","aWxkZW5hZmls","IFdlbGZhcmU=","IFF1YXJ0eg==","IGNhcHRjaGE=","IFdFU1Q=","IE1hemU=","IGdyYXBoZW5l","IHBlcms=","IG1pc3RyZXNz","LkZvcm1TdGFydFBvc2l0aW9u","IGV4cGVyaW1lbnRhdGlvbg==","KikoKA==","IGJyb2FkY2FzdHM=","IHJlbW92ZUFsbA==","CUdVSQ==","5YOP","YWJjZGVmZ2hpamtsbW5vcA==","IHVuaW5z","QVNQ","K3c=","bXVy","IGRpbmU=","IGFyb3U=","IGVzY2FwZXM=","IFRvYmFjY28=","Lm5hbWVk","IFBhdHJlb24=","X0ZBQ0U=","X3NwaW5uZXI=","bW92aW5n","X3ZvdGVz","T2hpbw==","LmVuY29kaW5n","RGVncmVlcw==","IlRv","IHByZXN0aWdl","b3NwaGVyZQ==","IExhbmNhc3Rlcg==","77yX","IG9uQ2FuY2Vs","IEhJUw==","0J7RiNC40LHQutCw","IG9yY2hlc3Ry","IHJlZnJlc2hlZA==","RGF0aW5n","KG11","IEplZA==","IEVkaXRvcmlhbA==","U2V0QnJhbmNoQWRkcmVzcw==","Q3BwVHlwZURlZmluaXRpb24=","IEJyb254","IGdhdGhlcmluZ3M=","ICcnDQo=","cG9zdERhdGE=","IEZyYW0=","Q2xpcGJvYXJk","IFhQYXRo","cmF5cw==","IGJha2VyeQ==","IHJvd0NvdW50","IGxvd3M=","YW5kV2hlcmU=","X3ZlcnNpb25z","IEd1bm4=","IHdlZXI=","IGNvbnRleHR1YWw=","IEtleUNvZGU=","IFNhc2thdGNoZXdhbg==","IFBoaWxseQ==","IE1vdXRo","IGRvUG9zdA==","IHBlcmNlbnRpbGU=","IGJ1ZmZlclNpemU=","KGZyZXE=","JHNtYXJ0eQ==","aWVydGU=","aXNzYW50","X2Zwcw==","IGludGltYWN5","X2Jvb2tpbmc=","IGRlY29tcG9zaXRpb24=","dW5pY2lwaW8=","IE5TSW5kZXhQYXRo","IEtS","IHR1cmJpbmU=","LXByb20=","X0NBUlQ=","KGNvb3Jkcw==","ZWNvbQ==","IGNvd2FyZA==","IHdheXBvaW50","LUNvbGE=","IHByb2ZvdW5kbHk=","IEVSUA==","Ym91bmRhcnk=","IHBvb3Jlcg==","L2V4YW1wbGU=","IHJlbmNvbnRy","IG5pY2Vy","54E=","LWNoYWlu","IEVudGl0eVN0YXRl","IGdyYWRpbmc=","QUxJR04=","IFBpY2tz","LmFr","LXZlY3Rvcg==","IEVudHJpZXM=","IFNlcmdpbw==","ICoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioq","T0RC","IOW9","IGNvcm9uYXJ5","IHNoYXZlZA==","IGFxdWU=","ZW1wbG95ZXI=","IHBhcmNo","IG1lYXN1cmFibGU=","IGJvaXM=","am9pbmluZw==","IHZvbGNhbm8=","Ok0=","LnRocmVzaG9sZA==","IERveWxl","dmVyYm9zaXR5","IOKWug==","IHNwb3VzZXM=","IHJlc3VtZXM=","TmF0","ek0=","X0VuYWJsZQ==","IFVTRUQ=","IENhcmV5","CWZw","UGF0cmljaw==","IE9zdw==","UG9zc2libGU=","LmxlYWRpbmc=","YWhydW5n","4pmqCgo=","CQkJCQkJCQkJIA==","44CC44CM","LmFkZEVkZ2U=","IGVjeA==","J0xCTA==","IFRDTA==","IGJpcnRocw==","IHRoZWF0cmljYWw=","IHBpag==","Z3JlYXRlcg==","IEZTdHJpbmc=","QkVE","7ZmY","LkNhc3Q=","Q1g=","L01haW4=","cGVhdGVy","IHBlcnN1YXNpdmU=","Y29udG8=","eGxzeA==","X0FCUw==","IEJ1bg==","bWFuYWdlZFR5cGU=","0LPQvg==","IFNjYWxh","cmFkb3I=","IHJlY29nbml6YWJsZQ==","dHJ1","IHRq","XE1hcHBpbmc=","X0JPQVJE","IHRvSnNvbg==","IGJvd2Vs","KWQ=","J30p","KGhXbmQ=","aHJz","Y2FudA==","X18oKQoK","IGludGVycm9nYXRpb24=","bGljYXRpdmU=","CQkJCgo=","IFR3aW5z","IEFP","QmlyZA==","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg","cGVyaGFwcw==","b2ZpbGU=","IHBlbmM=","IHRyZWVOb2Rl","IHRvcGljYWw=","LXByaXZhdGU=","54m5","IERpc2N1c3M=","IGRlc24=","UnVh","LlZFUlRJQ0FM","44CN44Go","SUZPUk0=","IGNvdXJ0eWFyZA==","INGB0LXRgA==","ICMjIwo=","IGVtcG93ZXJpbmc=","IEZhY2lsaXRpZXM=","XCIsXA==","vZQ=","Ok9iamVjdA==","IFZvdGVz","aXNlbA==","IGV1Y2g=","b3JzdA==","KENsb25l","LmNvb2tpZXM=","JHRtcA==","KGluZGljZXM=","ZXJnZW5jeQ==","IHBsYWd1ZWQ=","IERpYQ==","eWNsaWM=","fSkp","6rK9","IGR1ZWw=","IGhldGVyb3NleHVhbA==","LmFkZENvbXBvbmVudA==","U0VDUkVU","bGVybw==","Y29uc3RyYWludHM=","IGdldENvbm5lY3Rpb24=","IExlYmVucw==","IFBvbg==","IENocm9uaWNsZXM=","ICAgICAgICAgICAgICAgICAgICAgICAgDQo=","IE1vdXJpbmhv","IG9jY3VwYW5jeQ==","X3NsYXZl","T1JJWkVE","CVk=","LmhpZ2hsaWdodA==","X3NlbnNpdGl2ZQ==","IHNwZWN0cm8=","LmVuY3J5cHQ=","IHNwb2lsZXJz","LlNpemVNb2Rl","IHByb2Zlc3Npb25hbGlzbQ==","Pklu","RXhwaXJlcw==","QXU=","IEhWQUM=","cmVsYXRpb25z","IEFUSw==","X0dFTkVSQUw=","IFNpZ2h0","IGtpdGNoZW5z","OlJlZ2lzdGVy","IGVkbQ==","IHRvbGVyYXRlZA==","IFNFU1NJT04=","aWVyeg==","IElOU1Q=","LnBhdGhz","IHBlcnBldHJhdG9ycw==","ZWJw","cGVjdGluZw==","ZWR1Y2F0ZWQ=","IFBpb25lZXI=","X1JFVg==","IGJ1c3R5","c3RhdHVzZXM=","UmVzcG9uZA==","c2h1ZmZsZQ==","IFRpbmRlcg==","RXhhY3RseQ==","aWxsaXNlY29uZA==","INC30L3QsNGH0LXQvdC40LU=","KEFjY291bnQ=","LiY=","aXpy","YXNzdW1pbmc=","CU9wdGlvbmFs","U2VuaGE=","IGVucm9s","dHVy","IGFycm9nYW50","IEpPYmplY3Q=","b2xpdGhpYw==","bWFwcGVk","IHRpcHBlZA==","LlVQREFURQ==","w6htZXM=","R05VQw==","V1g=","IG1vbmtz","LmJvcmRlcldpZHRo","IFNodXRkb3du","IEhhcm1vbnk=","Y2xhc3NpZmljYXRpb24=","IGRlcXVldWVSZXVzYWJsZUNlbGw=","IF07DQo=","Lkdlbg==","IGxhdm9ybw==","IExlb25hcmRv","ICYp","IGRlcG9pcw==","IFZvbHQ=","RXRo","IExlb25l","IE5lZGVybGFuZA==","IEVYVFJB","UmVzb2x2ZWQ=","IHBlbmluc3VsYQ==","X1ZN","R2Vy","2KfYrw==","LnByb21wdA==","LmFsaWdu","aW5nZ2E=","ZmlsbXM=","SEFORExF","IGNhcnRz","KFNvbWU=","PEF1ZGlv","IGVubGFyZ2VtZW50","IGdyb2Nlcmllcw==","LWhvbGRlcg==","IGlycml0YXRpb24=","Q29tbXVuaWNhdGlvbg==","IHByaW1hcmllcw==","aHR1Yg==","X2luaWNpbw==","IGNvb3JkaW5hdGluZw==","KHF1","IGZhaXM=","IHZpc3Rv","Z3VpZGVk","IHZsYW4=","IGVzcHJlc3Nv","w6h0ZQ==","c2VoZW4=","X3Blbmc=","IHJvb2Zpbmc=","IEFsaXZl","QXhpc1NpemU=","IHN0dW4=","IHJlc3RlZA==","dWxsZXRz","IE1hbGF5c2lhbg==","LFVuaXR5RW5naW5l","IGVudnk=","J107DQoNCg==","IE9zdA==","X2p1bXA=","IGNvbnRyYXNlw7Fh","Ing=","CVBhZ2U=","KVsi","IFNJUA==","IEdlb2dyYXBoaWM=","IGNhdWN1cw==","X1RFUg==","4oCdOw==","UG9zdEV4ZWN1dGU=","aW1zaG93","IENPTVBBTlk=","IE5lYWw=","IEhlYXJpbmc=","KGFjdG9y","Qmlk","LlBS","LlByb2R1Y3Rz","IEVtbQ==","IOab","IHB1bHNlcw==","X0VW","L2V4cA==","X21vdGlvbg==","IGdiYw==","IG5hdmlnYXRpb25Db250cm9sbGVy","IENvdXJ0cw==","IEljb25EYXRh","d3U=","X3Jm","IFJhZ2U=","LWZsYXQ=","IEhpbXNlbGY=","X2NodW5rcw==","IG92ZXJzaA==","IGNpZg==","KElz","cGVha2Vy","IENQVXM=","aXJlY3Rvcg==","LHRpdGxl","LnNldERlc2NyaXB0aW9u","IGVhcnRocXVha2Vz","IHdu","Z2x5cGg=","dWx1bWk=","IHNwZWVkeQ==","IGVzcGFjaW8=","IGVtdWxhdGU=","IFwiJA==","X0lORg==","Y2FsbG9j","LXF1ZXJ5","KHZhbHM=","IHNlYWI=","IGhhdm9j","IEludGVyc3RhdGU=","IHRyaWFuZ3VsYXI=","YmluZGluZ3M=","CQkJCQkgICAgIA==","IAkg","YmNyeXB0","IGNyZWRpdG9ycw==","IHNlbWlm","bGxl","aWVuemE=","IEtlbGxlcg==","IG1vbnN0cg==","IE1hcmNvcw==","KHJlaW50ZXJwcmV0","IGhpdmU=","U2Ny","X2hyZXN1bHQ=","IOyhsA==","IFNxbERhdGFSZWFkZXI=","YW5ub3VuY2U=","X3ByZWZlcmVuY2Vz","IHRydXN0cw==","RXJvdA==","LXdvcmtlcg==","IHR3ZWVu","IFN0cmVldHM=","gq3soJw=","IEZyYW56","IOKApi4=","VUlUZXh0RmllbGQ=","LmdldEl0ZW1z","IHRvbHVh","4oCcT3Vy","IHPhu5E=","IHZpcnR1ZXM=","IHBvdWx0cnk=","PXJvdw==","Y29kZWQ=","Tm9TdWNo","IGtvZA==","bHNp","IGtldG8=","IGdyb3VwTmFtZQ==","YXNu","IHVuY29tcA==","IHRleHRpbGU=","dG9vbFN0cmlw","LlBvcGVu","IHByb3N0aXR1dGU=","IHByb21vdGVy","Ijt9Cg==","IGNvbGxpZGVy","QnJva2Vy","ZGF0YXNldHM=","CU5TU3RyaW5n","YW5nbGVy","UklFUw==","YXRvbXM=","IHJlbmRleg==","YXBv","IOuE","Lmdj","IFNPTUU=","IGZnZXRz","R0xF","IHphbA==","IE9wcG9zaXRpb24=","aGFuZGxlU3VibWl0","X21hdGg=","IHNwcmU=","IHNob3J0ZW5lZA==","IGNhdmVz","U01T","LWNvbnNjaW91cw==","IFNhdmVz","LkJhY2tncm91bmRJbWFnZUxheW91dA==","IGVsZWN0cm9tYWduZXRpYw==","KGl0ZXJhdG9y","IHVuYmU=","amVjdG9yaWVz","IG1lZGlhbnRl","IMOubnQ=","Iiwt","IEFTTQ==","6K6w5b2V","IGNvbmZpbmVtZW50","4oCmCgoK","RXhjZXB0aW9ucw==","LW1ham9y","IFZhbmlsbGE=","IExPQ0FUSU9O","IGVsdXNpdmU=","VUFSSU8=","IElOTElORQ==","IHByb2R1Y3ROYW1l","X3F1ZXJpZXM=","Li4uIjsK","IFhpYW8=","V2luZG93VGl0bGU=","bGV0dGVz","IHBlcnBldHVhbA==","U2V2ZXJpdHk=","IEFjaGlldmVtZW50","w6JuY2lh","IHJlbWluZGVycw==","c29ydGFibGU=","IGFmZm9yZGVk","IGluZmx1ZW5jaW5n","IFR1bm5lbA==","LmxlYXJuaW5n","IFF1w6k=","cGhldGFtaW5l","LkJBRA==","Lm1ldGFtb2RlbA==","LWRldmljZQ==","IEtvbnRha3Q=","4pSB4pSB","LXN1bW1hcnk=","KCc8Pw==","KTw9","IHdpc2VseQ==","X290","Om1vZGVs","IFVX","IE9wZW5TU0w=","IEpwYVJlcG9zaXRvcnk=","Q29uZXhpb24=","VE9U","LmNyZWF0ZWRBdA==","KHRyYWluaW5n","IGJpc2hvcHM=","IHZlbnR1cmVz","LkVucXVldWU=","IFRoZXJtYWw=","IEJyZXdlcnk=","b3Rlbg==","IEZhdGFs","X3N1cHBseQ==","IGNvbmRpdGlvbmVk","IHN1cGVyaW9yaXR5","IElicmFoaW0=","IGNvcnBv","dW91c2x5","IFByYWN0aWNhbA==","Ly9b","IEFmcmljYW5z","IEJhaHJhaW4=","IHN0ZXJpbA==","IENsYXNzTm90Rm91bmRFeGNlcHRpb24=","LlJlZ2lvbg==","IHRyYW5zaXRpb25hbA==","IGludGVycHJldGluZw==","LlNvdW5k","IGZyb250YWw=","IGhhcnZlc3Rpbmc=","fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn5+fn4=","YXRhaXJl","Lkh0dHBTdGF0dXM=","S00=","IEVyb3Rpc2NoZQ==","IGVyb3Rpc2tl","RmlnaHQ=","UGFja2FnZU5hbWU=","IENBQ0hF","d2luZ0NvbnN0YW50cw==","IFppbW1lcm1hbg==","L2Nhcg==","IFF1cmFu","TWV0YWw=","IHVzZXJNYW5hZ2Vy","IG1hc3Rlcnk=","KFVVSUQ=","IHZpZXdXaWxsQXBwZWFy","IHN1bW1lZA==","KC0o","ICAgICAgIAoK","VGFrZW4=","IGNsb2Nrd2lzZQ==","IENhZsOp","KGxldHRlcg==","IENyb3NzUmVm","IEFzdG9u","IEFzc2VtYmx5VmVyc2lvbg==","6Z2e","bnRz","ICQoJ1s=","X1JBVElP","aWNpZW50ZQ==","IHJpY2h0aWc=","IHBlZGln","KGl4","0YHRi9C7","QXNzaWduYWJsZUZyb20=","Ym91bmRlZA==","IGFsa2Fs","X3ByaWNlcw==","IGfFgg==","YW5jaGlzZQ==","X3JlY2VpdmVy","SUdBVElPTg==","X3B1bGw=","IFN0YXRpc3RpY2Fs","X3Rvb2xiYXI=","YW1pZGU=","IEFzeW5jVGFzaw==","cmV0YQ==","IOyi","IFJFQUxMWQ==","IGJ1cnN0cw==","IElucXVpcnk=","IGJpZ290","c2FuaXRpemU=","IEhvbWVy","UXXDqQ==","IFJvdXRpbmc=","LmNvbGxlY3Rpb25WaWV3","IEJpbGxpb24=","U1RSVUNUT1I=","LmVqYg==","IGVuY2g=","LnNldFRpbWVvdXQ=","UnVi","LXJvYWQ=","Lm91dHB1dHM=","Y29udGVzdA==","IHNwaGVyZXM=","IHJlc3VycmVjdA==","Ii4i","IElyaXM=","IOya","IFhL","IFJhcml0eQ==","IElTZXJ2aWNl","YXRoYQ==","IOWH","IHByZXZhaWw=","CXBw","Lkxv","Z2V0V2lkdGg=","IHd3","IHdpY2h0aWc=","QEdldHRlcg==","IEpheXM=","IHNwZWN1bGF0aXZl","KGF0dA==","IHRlZGlvdXM=","IHNjcmF0Y2hlcw==","IHBlbMOtY3Vs","IGJvcm91Z2g=","IG3Dsw==","UmVwcmVzZW50","YXRvcml1bQ==","KENhbWVyYQ==","IGNvbHVtbk5hbWU=","IHJlaXRlcmF0ZWQ=","IENhc3Rpbmc=","LmdldEhlYWRlcg==","IOKAnFs=","IEp1aWNl","Y2h1","LkhUTUw=","IEFudHdvcnQ=","R0x1aW50","CUl0ZXJhdG9y","IEFOQUw=","IHVucG9wdWxhcg==","KExvY2FsZQ==","IG1pdGlnYXRpb24=","IGFkcmVz","4bq3","fSx7Cg==","IFNjaHdhcg==","X1BBSVI=","PigpLAo=","b3V2","IEFsZg==","eEVG","55yB","IGVzY3Jp","TE9VUg==","U0VMRg==","IFRtYXg=","VHJl","bG90cw==","ICguLi4p","XSsk","IGFtZXJpYw==","L3JlZmVyZW5jZQ==","IE9keXNzZXk=","IE1pbmVz","IGFnb3Jh","IHByb3BoZWN5","IE9wcG9ydHVuaXRpZXM=","cHJvZmVzc2lvbmFs","KHByb3h5","cGhhbnVtZXJpYw==","IEVkaXRlZA==","b2xvZ25h","LmlzT3Blbg==","KHZlcnRpY2Vz","IFJpY2t5","X292ZXJsYXA=","Pjs=","LkRPTQ==","e31f","IENPTVBVVA==","cmVkaXJlY3RUbw==","IHNoYWtlbg==","IHJhdGlvbg==","IG5lbGw=","X2Jj","IE5lcg==","YW5kUmV0dXJu","IGVyZWN0ZWQ=","Q2hpZWY=","IGRpbmVybw==","IGphc21pbmU=","LS0tLS0tLS0tLS0tLQo=","ZmFybQ==","IEhhdGU=","VEFTSw==","QU5ORVI=","J11dXQo=","IE5pZ2Vs","aGliaXQ=","IFFUZXh0","Lkxlbg==","IHRlxbw=","c2xpZGVz","ZmVsdA==","IFJFVg==","X2hvbGQ=","IENvdXBsZQ==","ZXNjYXBlZA==","LWV4cG9ydA==","Pkk=","ZXdpc2g=","KEFwaQ==","ICghWw==","Tm91cw==","T1RPUg==","IHNlYWxpbmc=","V2ll","IGthbm5zdA==","K3htbA==","IG14QXJyYXk=","IGFkbWlyYXRpb24=","Lm5i","IGpld2Vs","LlRlYW0=","IHByb3NlY3V0ZQ==","LnhtbGJlYW5z","Y2h3","KGJhY2tncm91bmQ=","IEF2aXY=","CWZpbGw=","IGRpc3Bhcml0eQ==","4Lo=","X0FQUEVORA==","IFB2UA==","44OQ","IFZpdmU=","IGdyYW5kc29u","LmFkZEVsZW1lbnQ=","QXRvbWlj","IHByaW1hcnlLZXk=","IGNvbnRpbmVudHM=","IEZ1Y2tpbmc=","JScK","QG1haWw=","IGN1bHR1cmFsbHk=","YW5nYW5lc2U=","7KCE","Zm9sbG93ZXJz","IHVybg==","IHJhY2tz","IFNBRkU=","Ly8NCg0K","KCIvew==","X0lOSVRJQUw=","X1Jlc3BvbnNl","RXZlbnREYXRh","Jz4k","c3RhcnRz","4Kk=","IHRoYWltYXNzYWdl","IHNwZWNpYWxpemF0aW9u","IOyEpOyglQ==","ZWRv","IGNvbXBlbnNhdGVk","X2NoYXJzZXQ=","fS57","L2VudGl0aWVz","X2Zr","LS0tLS0tCgo=","YXNjYXI=","IGNlbGxGb3JSb3dBdEluZGV4UGF0aA==","IFByb3Bvc2Fs","IE90dG8=","IF9fX19f","ICIqIg==","IHRvb2xraXQ=","IGV4cGVjdGFuY3k=","RG93bkxpc3Q=","LWRh","IHByb3ZvY2F0aXZl","IG1laW8=","ID09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PQ==","KCgpPT57Cg==","JGxpbms=","aW5jYXJl","IGljeQ==","IEhpc3Q=","QWNjZXB0ZWQ=","IGNsb25lcw==","IFFB","IGNvbmZvcnQ=","IHByb3ByaW8=","IFZvZw==","KG1hcms=","X1NlYXJjaA==","IGVuZHdoaWxl","ICQj","44GX44GL","X0xU","SW5zdGFuY2VJZA==","YmFyZA==","cm5l","cmVnb3I=","IG5vcmdl","XDo=","0YDRg9C3","LmJ0bkFkZA==","IHBpbGxvd3M=","IFBhcmFtZXRlckRpcmVjdGlvbg==","SGFuZGxlcw==","IGRlYWxpbmdz","IGNvbnZleA==","IENoYXJpdHk=","Lk51bWVyaWNVcERvd24=","IFNrZWxldG9u","IFp1Y2tlcmJlcmc=","ZXNlbg==","IEZBQQ==","X3N0ZQ==","IGh1bWlk","am0=","Y2hn","LmdldExvY2Fs","IHRhbmRlbQ==","aXN0bGVz","X210","LmFjY291bnRz","IEluc3BlY3Rpb24=","IEZyYXVk","IGvDvA==","IHN5bmNocm9ub3Vz","IFJpY2FyZG8=","IEh1ZQ==","IENvbm5lY3Rpb25z","SU1FTlQ=","b2NoYXN0aWM=","XGRhdGE=","IEVudGVycHJpc2Vz","LXNpbXBsZQ==","IGltYWdlRGF0YQ==","IFVtYg==","LXNjcmlwdA==","L2dlbmVyYWw=","QVBU","IFR1dA==","aW1pemF0aW9u","IGlkYWRl","IEtlbQ==","ZWxzaWY=","LkFMSUdO","IFRvcmllcw==","IEJhc2ls","b2dvbmFs","aGFjaw==","TnVsbE9yRW1wdHk=","IiksCgo=","44OD44OI","ICclJw==","X1JG","ZWdvdA==","LmFzcGVjdA==","KFByb2plY3Q=","TEVOR1RI","cGxlbWVudGFyeQ==","X3ByZWRz","IEhvbGRz","Y2Fycmllcg==","CWxheWVy","QXR0YWNoZWQ=","LXByZXNpZGVudA==","aW5kaA==","J10uJyI=","LkFDQ0VTUw==","IENFTlRFUg==","UXVhbGlmaWVk","IG9zdHI=","LlN5bWJvbA==","dGFodW4=","IExBTkc=","X2J1c2luZXNz","CVN0YXJ0","ZXJyZQ==","IGFzaGVz","IEFkdmVydGlzZW1lbnQ=","Lkhvdw==","IC8vLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0t","IG9ibGl2","IGJsZWVk","IHN2bw==","Lm5vZGVOYW1l","IGl0ZW1OYW1l","IEJBTks=","w61jdWxvcw==","IEVtbXk=","IERvbWluaWNhbg==","JylbJw==","IHJlYWxsb2M=","dWxzZXM=","6L6T5Ye6","IE9mZmVyaW5n","64ql","LXByb2dyYW0=","INGB0L7QvtCx0Yk=","TU9W","IG5vZGVJZA==","0LXQvw==","Zmx1aWQ=","IHRlYXNl","w7hyZQ==","IGNvbXJhZGVz","IHVucmVsaWFibGU=","IHBvc3RJZA==","Z2V0SUQ=","b2dyYXBocw==","VGFuaw==","IFFWRVJJRlk=","IGZsb2F0ZWQ=","X1RISVM=","Y2ltaWVudG8=","IE5pY2Fy","c2hy","Qm91bmRpbmdCb3g=","IGlub3JkZXI=","IEdsb3Nz","V2l0aFRpdGxl","dW5jaW8=","IHBlcnNpc3Rz","IGRpcmVjdHM=","YWNjacOzbg==","U2FtcGxlcg==","IGJsYWNrbGlzdA==","IGFEZWNvZGVy","IGludm9rZXM=","X3NraW4=","Pklm","dHJ1bmNhdGU=","LlNpbg==","c29vbg==","IGRpc2Zy","CVZlYw==","IyNf","LnNjaG9vbA==","IGJsaW5kcw==","IGFjYWI=","IHBhdGhldGlj","IHZvbGNhbmlj","IHJkZg==","IGN1bHRpdmF0ZWQ=","IFVJTmF2aWdhdGlvbkNvbnRyb2xsZXI=","IGlwdA==","IGdsYW5k","IGV2aWRlbnRseQ==","UGh5cw==","IHN3YW1w","IGltYWdlTmFtZQ==","LkxheWVy","dWZl","LFsn","IENyaW1zb24=","6YCg","PGZvb3Rlcg==","IGJpa2luZw==","INC00LDQvdC90YvQtQ==","bW92ZXM=","Y3Jj","aWxsYXRpb24=","IGxhdXJl","0YDQsNCx0L7Rgg==","0YPQug==","IENhaW4=","IHB5cw==","IGNvbGxpZGU=","IHxffA==","KHNwYW4=","IGdpbmc=","IG9iZWRpZW5jZQ==","b3V0ZXJz","U29vbg==","IFdoaXRuZXk=","IEltcG9ydHM=","OlVJVGFibGVWaWV3","KiY=","IGJr","V2l0aEVycm9y","LWV4dA==","X1JET05MWQ==","X3RyYWNraW5n","bm9vcGVuZXI=","w7xucw==","IEd0a1dpZGdldA==","c2ti","U0FWRQ==","T2Jz","KCcuJylb","IGF1dGhvcmVk","LS8=","TG91aXM=","LmdldE91dHB1dFN0cmVhbQ==","IGdlbmVyYWxpemVk","7Yw=","IGFydGlzYW4=","KGNwcw==","IERtaXQ=","0LvQuNGG","LkltYWdlTGF5b3V0","IHN1Y2hlbg==","XX0s","LmNvbGxpZGVy","VGFiUGFnZQ==","XT1b","aHlkcm8=","X3N0cmlw","IGxpY2tpbmc=","IGJvb3N0cw==","IHNrZXB0aWNpc20=","IGpvZ28=","IGNvbXBldGVk","IOuCtA==","Tm9kZVR5cGU=","WEY=","IHBvc3NpYmlsaXQ=","LWNvcHk=","IHRyaXR1cg==","IEF0dGFja3M=","IG7Dqw==","SURBRA==","b2dyYXBoaWVz","VGltZVN0YW1w","b3R5cGluZw==","LUFwcg==","INC/0L7Qu9GM0LfQvtCy0LDRgtC10LvRjw==","ICI7Ig==","IEhhbGU=","L2FwaXM=","IDpdCg==","X2hkbA==","IERpYWw=","CUNvbmZpZw==","X0ZSQUdNRU5U","X0VkaXQ=","LyoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioq","IGNhbmRpZGFjeQ==","IENvbXByZXNzaW9u","X2xvc3Nlcw==","Kj4oJg==","SW50ZWdyYWw=","IHBhcm9keQ==","IGluaXRpYWxpc2U=","ZmlsbHM=","IGFsdHJp","X0VMRU1FTlRT","YWRhc3RyYXI=","Y29ycmVv","IHdhdHQ=","X0RSVg==","IEZvcmdvdA==","IGdldENvbnRleHQ=","IHNob3J0YWdlcw==","IE9DVA==","d2VldGFsZXJ0","IE9wZW5z","Kmw=","IEtpdHR5","4oCZw6l0","IFBpY2Fzc28=","LnRvQnl0ZUFycmF5","0L7Qu9GD0Yc=","IERFTg==","5aeT5ZCN","V2ludGVy","YW50YW4=","X19b","UHJpbQ==","IHJvb2Z0b3A=","IEJpbGxib2FyZA==","dGVzdENhc2U=","cHJvZHV0bw==","LXRodW1i","IHJlc2V0cw==","Z2Vibg==","PkVycm9y","LmRlcGFydG1lbnQ=","IGVhcnJpbmdz","IENhcm91c2Vs","KGV4YW1wbGU=","CWVt","XENvbnRhaW5lcg==","IEVsdmlz","IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0=","RW5nbGFuZA==","Y3JlZGl0ZWQ=","X2NvbnN0cnVjdG9y","IGxvcg==","IERhd3Nvbg==","QnVybg==","IEJyaWdhZGU=","IE11dGV4","IFRyYW5zaXRpb25hbA==","IE1vdXNlRXZlbnQ=","Z3Jvdw==","Lm1pbnV0ZQ==","IEdNTw==","PVtdLA==","IHN1c2hp","IGFlc3RoZXRpY3M=","T0NVUw==","IFNFTEY=","IEFzc2VydGlvbkVycm9y","IE1DVQ==","IGhpbnRUZXh0","IHNlYXc=","bmdsZQ==","IGV4cGVsbGVk","UFJPUEVSVFk=","KS48Lw==","LW9wZXJhdGlvbg==","IEltbXVu","IGxpY2Vucw==","aWJpYQ==","IGJpZXRlbg==","IGdyaXBz","Q0hBTk5FTA==","X0VSUk9SUw==","X3JlY3Vyc2l2ZQ==","VWx0aW1hdGVseQ==","IE1hamVzdHk=","IGRlYWN0aXZhdGU=","IEVYQU1QTEU=","dWNpb25lcw==","IGN1cnJlbnRWYWx1ZQ==","IGV2YWx1YXRlcw==","L0dyYXBoaWNz","InRleHQ=","X3BhbGV0dGU=","IFRNUA==","IEJlZHM=","LkNvcw==","4Lix4LiZ","PXRvcmNo","IFBBQ0tBR0U=","aWxsYXJk","LmNw","leyduA==","LWFwcHJvdmVk","IE5vcnRod2VzdGVybg==","PHRleHRhcmVh","IENvbXBhdGlibGU=","X1JEV1I=","LlF1YW50aXR5","QElk","X29yaWVudGF0aW9u","Z2V0VXJs","IHRyYW5zbGF0aW5n","IFdlYXZlcg==","IGpzb25BcnJheQ==","IGVtYmxlbQ==","LklzTnVsbA==","IENoYXJ0cw==","W119","Z2Fl","X25lc3RlZA==","dGVtcHM=","cGF0aG5hbWU=","Q1c=","LXdyaXR0ZW4=","IFBBUks=","KGNvbmQ=","X2FsYXJt","IGdlcmU=","IEdpeg==","IE5nYg==","IC5f","YXBwaW5lc3M=","IERlcGxveW1lbnQ=","aVBhZA==","Il1d","IHN0cnN0cg==","IHRvbnVtYmVy","KGRs","CXdvcmQ=","W3Rv","X0ZJWEVE","RXhwaXJhdGlvbg==","OnJldHVybg==","T250","PlBsZWFzZQ==","Z2V0VGl0bGU=","LnNwbGl0ZXh0","Y29tYmluZWQ=","T2Q=","IG5vdmVsdHk=","IlM=","IHN2bQ==","Q292ZXJhZ2U=","IEh1dA==","IHJlc2lzdGVk","IGVsbG8=","IG3DtmNodGU=","S2F5","Lmxpa2U=","Y2Npb25l","IHJlc2VtYmw=","RGVhdGhz","IGVwaXQ=","KHJnYg==","LkNsYXNzZXM=","INC00L7RgdGC","Y2FwdHVyZXM=","XStc","YW1pZW50","IFBhc28=","LlNlbmRNZXNzYWdl","IFJlbmF1bHQ=","IE5hcmVuZHJh","dG91dA==","IGhhZGRl","IFR3ZWVu","w6VkZQ==","IG91dGZpZWxk","Lz48Lw==","QFw=","IER1cmFudA==","IGFicmU=","X3N0b3J5","IHBlcmZ1bWU=","Q3BwVHlwZURlZmluaXRpb25TaXplcw==","INC/0LDRgNCw0LzQtdGC","Y2hlbWVz","IFNhZGRhbQ==","cHJlbm9t","dXNwZW5kZWQ=","IEJlbmVmaXQ=","IHNjZXB0","X01vdmU=","IE5hag==","LU9u","cnVk","SW1hZ2VQYXRo","wq4s","IGFuYWx5c2Vk","IE9H","ZWxsZWljaHQ=","YmlyZHM=","ZWt0ZQ==","IEFsaXNvbg==","IGF0aGVpc3Q=","eyU=","YWJo","LXBob3Rv","aW5zdHJ1bWVudA==","IGhpbnRlZA==","IE9mZmxpbmU=","KSIpOwoK","X1BSRUY=","IHN0eWxpc3Q=","IEt1YmVybmV0ZXM=","IGZlcnY=","CgoKCgoKCgoKCgoKCgo=","KCI9Ig==","LmdldE0=","IG5vdGV3b3J0aHk=","IHNjb3V0aW5n","X3RyYW5zbGF0ZQ==","IGJlZ2lubmluZ3M=","IEx1bw==","IHFs","X2FsaWduZWQ=","IGVydw==","dWFycw==","X1BhdGg=","LicuJA==","IGhvYw==","IGRlcnA=","bG9p","IE1jS2lu","6K+05piO","Lz0=","TGlua0lk","c3RkZGVm","cmVkdWNlcnM=","aXNhbnM=","Lmhpc3Q=","Jy8+Cg==","IFRveGlj","IGRpc2FwcGVhcmluZw==","IGNpcw==","KGRv","IG1haW5TY3JlZW4=","X0JBTks=","IGRlbW9uc3RyYXRvcnM=","IFBhbGV0dGU=","dWVseQ==","UmFyZQ==","IHJlc2lkaW5n","IGFtYmllbnRl","IG1pc20=","LXF1ZXN0aW9u","IG9wcHJlc3NlZA==","IGxldHJh","PGR5bmFtaWM=","IEZvdG9z","LXBvbGljeQ==","aXN0ZW0=","LmV4Y2hhbmdl","c3RyZQ==","JC8s","7ZWY6riw","JAoK","IFJlbmU=","IHRvdXRlZA==","LUNvcmU=","IENyYW4=","IFRyYWRlcg==","IGRldw==","IGZsYXA=","CWZpbGVuYW1l","IGlubWF0ZQ==","KE1vY2s=","IFNvYg==","aXNibg==","IG5vZQ==","IEZvcmJpZGRlbg==","IGVsZXM=","IGRpbmc=","X3Nh","KSovCg==","YXJpZQ==","IFN1cHBvcnRz","IG1vZHVsYXRpb24=","IGVuc2w=","IFNoYWRvd3M=","cHJpbmNpcGFs","YW5nZW50","LUphbg==","IFBhbnRz","LHRy","IGZpdHRl","IGdhcm1lbnRz","TWFyZ2lucw==","TFRS","IE1peQ==","dmVudHVz","IE3DtmdsaWNo","W2F0dHI=","L3Jlc3BvbmQ=","IHR0aw==","IG9sZHXEnw==","IENvbnNl","UHJlbWl1bQ==","IGZyYW5jYWlzZQ==","X2hvcml6b250YWw=","X2li","IEZhcmU=","IGhhcnZlc3RlZA==","ZW5kaXI=","KGhpdA==","PiovCg==","IElSZXBvc2l0b3J5","eWxpZQ==","IGRldGVjdHM=","Om5v","4pi0","IGRpc2XDsQ==","IHVuc2VyZW4=","IG1vY2tpbmc=","c291dGg=","cmF0ZXM=","IGh5cG9j","IFNob3J0bHk=","IEJsYWNrcw==","0YLQuNGA0L7Qsg==","IEFTQVA=","cmViYmU=","aWVj","LkFkZERheXM=","IGVwaXM=","LWluZmxhbW1hdG9yeQ==","LW5ldA==","IHBhbGw=","65Q=","IGlzc3VhbmNl","IGNvbnRlbnRpb3Vz","LkFyZWFz","0LjQu9GM","IGNvbnRpZ3VvdXM=","W2FjdGlvbg==","IGV4cHJlcw==","ISIpCgo=","VUxP","IHdyZQ==","IHN1YmRpdg==","IHR1cm5hcm91bmQ=","IGFjY2Vs","IFVuaXY=","IFVuaXZlcnNpZGFk","c2V0dA==","ZGVzY3I=","LkdlbmVyYXRpb24=","IHBhdHJpb3Q=","IGZhcw==","KioqKgo=","UVA=","IOWN","b3BwZWw=","IGp1ZWdvcw==","LmRyYXdTdHJpbmc=","LWNvbmZpcm0=","CSAgICAgICAgICAgICA=","PFByb3Bz","IGZhbWlsbGU=","IEhlbG1ldA==","ZXJ0aWFyeQ==","YXRoaQ==","IGN1bHRpdmF0ZQ==","IGR1cGxpY2F0aW9u","IHNweU9u","Ki8pCg==","IEh1bmdlcg==","T3J0aA==","IHBpbnBvaW50","IEhhZw==","IHRpbWV0YWJsZQ==","bWFyZ2luVG9w","IHJlY2lwcm8=","ZmVsbA==","IFBlcnNpc3RlbnQ=","44Gp","cGx1cmFs","cXVldWVk","IGdyYWNpYXM=","w6F0aWNv","IGhhcmRzaGlw","IEFwYXJ0bWVudHM=","IEp1bms=","IFJldmU=","X01zaw==","IHN1cHJh","IEFUUA==","IHNldFNob3c=","5a2X56ym5Liy","IE5vdHRpbmdoYW0=","U3RldmVu","IE11bmQ=","cmFuZ2Vz","IHVwbG9hZHM=","IGJmcw==","cHo=","dWx0aW1hdGU=","IEVmZmljaWVuY3k=","QU1J","5b6E","X1JFUEVBVA==","IGFjYWRlbWlh","LnRvb2xTdHJpcEJ1dHRvbg==","VG9FbmQ=","cnZpbmU=","IFRoeQ==","IEVsZWN0b3JhbA==","IFJFUVVJUkVE","IHBsdW5nZQ==","IFJldm9sdXRpb25hcnk=","IFRlbnQ=","IGdyZW5hZGU=","IjpbeyI=","IG1vdXI=","UG93","IGV2YW5nZWxpY2Fs","VEVDVEVE","IG92ZXJ0dXJu","CUlucHV0","cmVjb21tZW5k","JUM=","IHNsYWc=","IEJoYXI=","X2VuY3J5cHQ=","IFdhcmZhcmU=","KGFnZQ==","QVRFR09SSUVT","bWlsZQ==","IGhlYXZlbmx5","YW1tZXI=","KCkpWw==","YWRlcmE=","aGc=","IExBVw==","IHBhY2thZ2VOYW1l","X3R5cGVEZWZpbml0aW9u","KGJl","REJOdWxs","X3Rhcg==","IGhldXJpc3RpYw==","IFdhbnRlZA==","IFN0dWI=","IGtpdHQ=","UkVD","IHBhc2Fy","Lm5ld0J1aWxkZXI=","CWdyYXBo","aW9zYQ==","LmNvbHVtbkhlYWRlcg==","IHNldE9wZW4=","IFRoaXJ0eQ==","ICIlLg==","QWxiZXJ0","IHNhbWE=","IHJvY2tpbmc=","Q29tcGxl","TVY=","fCgpCg==","X3JlYWRz","KHZhcmFyZ2lu","b3Vsb3VzZQ==","IFNJTUQ=","IGNhcmJvaHlkcmF0ZQ==","d2hvbGU=","LE5vbmU=","i+ivlQ==","IENoYW5k","Y3phcw==","X3F1ZXJ5c2V0","IGV4aXN0ZW50aWFs","IGVkaWJsZQ==","IGFnaWxpdHk=","IFdpbGxpcw==","IGh5bQ==","IEJyaWxs","0LjRhQ==","IE5vdEZvdW5kRXhjZXB0aW9u","ICgoKQ==","QVBTSE9U","IHN1YnN0YW50aXZl","X3R5cGVEZWZpbml0aW9uU2l6ZQ==","IHZhY2FuY2llcw==","RU5HSU5F","IGFuZGVycw==","IHN5bWI=","IGV0cmVl","KS5f","IHRyYW5zcG9ydGluZw==","aW1wcw==","L2NvcA==","YWN0YWJsZQ==","X2ZsdXg=","IG5ld0luc3RhbmNl","YXRvaXJl","IGNvbHVtbkluZGV4","IEdpbw==","IHN1YnRpdGxlcw==","LldpbkZvcm1z","0LvRj9C10Lw=","IGFsZXJ0ZWQ=","IHN0cmlwcGluZw==","d2VuZHVuZw==","IE1ldGhvZEludm9jYXRpb24=","RXJyb3JIYW5kbGVy","U2Nyb2xsYmFy","UG9ydGZvbGlv","Y29uc3Vt","IENPTU1PTg==","TGY=","X2Jhc2Vk","b2NhbHk=","IGVmZmV0","dnZt","cmlwc2k=","IGZsb3VyaXNo","Y2h0ZXI=","PT09PT09PT09Cg==","IHJlcXVlcg==","LnF1ZXN0aW9ucw==","KCI/","IHBvc1g=","IFBDUg==","IE9yZ2FuaXphdGlvbnM=","cHLDvA==","RXhhbQ==","IEluY29ycG9yYXRlZA==","X3BocmFzZQ==","IHByYXllZA==","IGhvbWVvd25lcg==","IFRhag==","eng=","IElkZWFsbHk=","X01BQ0hJTkU=","IFJlbW92aW5n","Q29lZmZpY2llbnQ=","IGVkdWNhdGluZw==","ID8+Jg==","IHBvdXJz","aXJhbQ==","X3BlYWs=","IG5lc3Rpbmc=","YWJ5dGU=","bmF0dXJl","IGFmcw==","IFJvbw==","Y2FyZ28=","b2JqZXQ=","IGZyZWVpbmc=","cXVha2U=","RGVuc2l0eQ==","IGRlc2NyaWNhbw==","LyoqKioqKioq","IGRhc2hlZA==","IGdyb8Of","b29reQ==","IFBFT1BMRQ==","X1Bvc3Q=","IGNlcnZpY2Fs","IEFkanVzdGFibGU=","ZW5zdWFs","IFJldmlzZWQ=","KHJlZmVyZW5jZQ==","CUJhc2U=","ZXNzaW0=","TWFpbnQ=","IGdldFNpemU=","IFNhbmR3aWNo","cmFkaWVudA==","c2luaw==","Oi8vJw==","X3R0","RlBT","IEFybWVuaWFu","cHJldlN0YXRl","X0xJTkVT","IHRpZ2h0ZW4=","PFs=","XTw8Ig==","IFRyYWZm","IGxpcXVpZHM=","IGFyY3M=","X0NvbW1hbmQ=","QHByb3RvY29s","LWlzaA==","IHJ1YmJlZA==","QkJD","L2ZpcmViYXNl","QXBwQmFy","PFg=","IFNJTkdMRQ==","LlN0YXR1c0ludGVybmFsU2VydmVyRXJyb3I=","IHZlcnRl","L3F1ZXJ5","IGdldENvbmZpZw==","IERpcmVjdFg=","cGh5c2ljcw==","eWNvcA==","IGJyZWFrZXI=","LXZvbHVtZQ==","ZGF0YVRhYmxl","4oCZZQ==","cmlvdHQ=","IEV0ZXJuYWw=","Z2V0SGVpZ2h0","IG9uSXRlbUNsaWNr","IHF1YXRlcm5pb24=","IGtpbmt5","ZGVzZXJpYWxpemU=","KFNwcmluZw==","IHBlYWNlZnVsbHk=","X0RldmljZQ==","KE1hdHJpeA==","acOocmVtZW50","KHR5cA==","LnZhYWRpbg==","LmdldE1ldGhvZA==","IOKAnQoK","IHRocmVhZGVk","IEZhbW91cw==","IEdhbWI=","IOyngA==","INCk","IGZha3Q=","IGVjaHQ=","X3Vi","LkpwYVJlcG9zaXRvcnk=","IHVuZ2U=","LWVuZGluZw==","IENBTUVSQQ==","Y3JlZGVudGlhbA==","IFBhc3Nwb3J0","CVJUREJH","IGV4dHJhZA==","LW9yaWdpbg==","IHNhY3JpZmljZWQ=","IFNjaHVsdHo=","IFR1cnRsZQ==","LmNlbnRlclg=","IHNob3djYXNpbmc=","IGJ6dw==","eXJv","aXNOdWxs","LmlzRGlyZWN0b3J5","bWFpbnQ=","X2Jp","IFNwcmluZ2Vy","fSgpCgo=","aXNzdWVy","LWFybQ==","ZXNr","bGluaGE=","IGtvcnQ=","YWphcw==","YWxpbms=","KEJ1dHRvbg==","IFJlc3RvcmF0aW9u","IGluY3I=","IFpob3U=","CSAgICAgICAgCQ==","IERpc2NsYWltZXI=","IGt2aW5ub3I=","IERhcmU=","IDwtPg==","6K+m","CQkJCQkJCQkJCQo=","LkNsYW1w","CXNjb3Bl","IE11bQ==","PDw8PDw8PA==","L3t7","X2FydGlzdA==","IFJlYWN0aW9u","IE5pY2tlbA==","X1JlbW92ZQ==","KCgoKA==","64yA","IGR5bmFzdHk=","IFRocm93cw==","IENvdWw=","X3JuZw==","IERvaw==","Lmxpc3RWaWV3","IFR1Y3Nvbg==","KHRvaw==","IFBoaWxpcHBl","VG9TaG93","IGRpZXRh","IFVsdHI=","LlRpY2s=","IEdldFR5cGU=","aWV0ZQ==","IExlYWg=","SGFyZHdhcmU=","IENvbXByZWhlbnNpdmU=","Q09NTU9O","IGluZHVzdHJp","aXJpY2Fs","LWJlZHJvb20=","IGd5cm8=","INC60L7RgA==","IC0vCg==","Y291cg==","IEJydXNoZXM=","TXVsdGlwbGllcg==","IHVzZXJkYXRh","IFJlY29nbg==","IG9ibGlnYXRlZA==","IExldmlu","YW5jZXN0b3I=","IG1lbmluZw==","IFVk","LGpzb24=","KGFzc2lnbg==","IG5kYXJyYXk=","X2Nvcm5lcg==","QEFsbEFyZ3NDb25zdHJ1Y3Rvcg==","6aqM6K+B56CB","YWRvcnM=","IHJlc3BvbmRlbnQ=","R09SSVRI","IHRlbmdv","IHNldE1lc3NhZ2U=","IElQTw==","YXJyYXlz","IEFHQUlO","J1s=","ICItLy8=","w6Rt","44CCXA==","Lm9uY2U=","Y3VycmVudFRpbWU=","R292","IGdldG9wdA==","bWx4","IFRvbmU=","J11dOwo=","IHByZWRhdG9y","V3k=","L2VudGl0eQ==","IG1hbnRyYQ==","KT49","b2dyYWQ=","IG1lbGFu","IHNvcnRCeQ==","IERFRklORQ==","UHJvdGVjdGVk","Y2RlY2w=","Jz4iLiQ=","PGN2","Y3JpcmU=","LVRydW1w","IHVjZmlyc3Q=","Y2Fzc2VydA==","IGFja25vd2xlZGdlbWVudA==","IElOVg==","IFVOVQ==","LnNxdWFyZXVw","IFNheA==","cmV0dGU=","KCkKCgoK","IERhdGFCYXNl","IFBhdHJpb3Q=","X1Jvdw==","IEV4aGliaXRpb24=","IGRldGFpbmVlcw==","IFN0cmluZ0lP","X0RFTg==","TW9kaWZpZXJz","YXNhcg==","aXJ0aW5n","IHRyYW5xdWls","KGVuYw==","IOOCsw==","bmNvZGVy","X3VudXNlZA==","IEJpYW4=","VmVyYg==","X2V4Y2VycHQ=","L2V4cG9ydA==","IFNleHQ=","RHM=","QU1QTA==","T2ZTdHJpbmc=","X3RyYWNrcw==","d2o=","b3Rvbmlu","IElURQ==","SVZFTg==","LW9yaWdpbmFs","IEZJTkFM","X18pCgoK","IGVuc2U=","IFV0dA==","Oioq","IFN1cnJleQ==","IEthaXNlcg==","YWRtaW5pc3RyYXRvcg==","LWxhcmdlc3Q=","IGxldHp0ZW4=","IGNoYWluZWQ=","J0g=","IGRvY3VtZW50aW5n","IExlY3R1cmU=","Ukg=","b2xsYXBzZWQ=","c2tpcnRz","ZWxkZXI=","IFNpeHRo","IGFsbGVnaWFuY2U=","SVNPU3RyaW5n","VXNhZ2VJZA==","LmhhcmR3YXJl","IHBhcmk=","IHfDpGhyZW5k","IHJkcg==","IGhqZW0=","TE9PUg==","IExQQVJBTQ==","INC80L7QttC10YI=","IGhvbWFnZQ==","b3V0c2lkZQ==","IENoYXJTZXQ=","PEdhbWU=","77yZ","X01VVEVY","KSkvKA==","X3Jlb3JkZXJlZA==","dGV4dElucHV0","QU5DRUQ=","IFRlZQ==","IGNvcm5lcmJhY2s=","UXVlcnlTdHJpbmc=","IGxvbmdpdHVkaW5hbA==","IEhvbGlkYXlz","QUJDREVGRw==","LktleVByZXNz","LnVs","eWRybw==","IFRhdGU=","CXJvdXRlcg==","c3BvdHM=","IHBhdWw=","LXByZXY=","IGtub3dpbmdseQ==","IEt1cmRz","IEV1cm9w","LmNlcnQ=","QklH","KGNvZWZm","IENsYXVz","L2V4YW1wbGVz","IEZhcm1z","IC8vKA==","U1BBTg==","IGNpcmN1cw==","IE1JUw==","IFRyYWl0cw==","LWNsZWFy","IHJlZ2ltZW4=","IGJhY2tncm91bmRJbWFnZQ==","dXNhaGE=","X01ldGFkYXRhVXNhZ2VJZA==","IHJoZQ==","Q2xpbg==","IERvbWluaWM=","Lm5leHREb3VibGU=","KGRldGFpbA==","VGhyZWFkUG9vbA==","IENhcnBlbnRlcg==","c29ydGluZw==","IGdvdmVybm9ycw==","IHNpbmdlcnM=","dW5saW5r","IHJpbmdpbmc=","IHNjaGVtYXRpYw==","IGVycm1zZw==","IGJlYg==","LiIr","IEluY3JlYXNlcw==","IkFsbA==","IGFjb250ZQ==","emlh","LlRleHRDaGFuZ2Vk","IFRvRG8=","LDopOwo=","bmFnZQ==","Y2hs","b3dlbA==","IGdlcmFkZQ==","X2ZmdA==","IGVzdGFtb3M=","U1RBUg==","IGRpc2d1c3Q=","Z3Jhbg==","cG9ydHVuaXR5","IGF1dG9iaQ==","e317Cg==","IENvdXBvbnM=","X0dBSU4=","IFRDSEFS","L3Bhc3M=","55Sx","IGZvb3R3ZWFy","KGJvdW5kcw==","YXB1cw==","Y2l0ZQ==","Qk9PVA==","IENvZGVj","bG9ndWU=","LXByb3BlcnRpZXM=","YXV0b21hdGlvbg==","IFNob2U=","c3BlY3Q=","KG1t","IEtldA==","W3BhcmFt","IGJhc2ls","IEFuZ3VsYXJGaXJl","IGFkdmVudHVyb3Vz","X1VDbGFzcw==","IGluZHVsZ2U=","CWN1ZGE=","IGluc3VsdGluZw==","LkV4cHJlc3Npb25z","IG9uQ3JlYXRlT3B0aW9uc01lbnU=","VUVM","IGJpdGluZw==","KCFf","IEVuY3ljbG9wZWRpYQ==","IGJlcnQ=","IFZlcmE=","IEJpYmxpY2Fs","aW5zaWNz","X1NJTVBMRQ==","IHNhbGlkYQ==","cmVxdWVzdGVk","IENvbXBvc2l0aW9u","LkF0b2k=","KEtleUV2ZW50","ZXJlYQ==","IGRlcG9ydGVk","IFF1cg==","IG5pcHBsZXM=","aXNBcnJheQ==","INGD0LrQsNC3","IGJyaW5r","bWV0cm9z","RW51bWVyYXRpb24=","IEJ1aWxkcw==","ZXJ0b3M=","IHNhaW50cw==","LmRlcGxveQ==","ZXRoZXJldW0=","IGtpbmRlcmdhcnRlbg==","dmFuaXplZA==","IGNvbWJpbg==","IHBvdXZvaXI=","S2lu","YXLEsQ==","IC4uLi4u","77y+","Lkdv","IHF1aXJreQ==","xLFuZGFu","IGFjdGlvblR5cGVz","IFFVRVJZ","VGF5bG9y","IFJL","dGF0","LnBhY2tldA==","IElNUE9SVEFOVA==","IGN1c2hpb25z","YnVsaw==","ZHVjdGl2ZQ==","YmVuZWY=","b2NyaXN5","IGZ1ZXJvbg==","IGN1cnNlcw==","IGZpbGluZ3M=","ZWxpZXI=","KD86","X2RyaXZl","IGNvbnRhY3Rv","IFBhcmt3YXk=","dmlkZXM=","Z25l","YXZhZ2U=","XFwu","ZnVsbE5hbWU=","ZGxs","IHNob2Nrcw==","ICMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIw==","X3B4","QFdlYg==","LlBlcnNpc3RlbmNl","IHN1bms=","LnRvb2x0aXA=","YXV0aWNhbA==","TmV3c2xldHRlcg==","IHdhaXRlcg==","IGlucXVpcmU=","0LDQtdGC0YHRjw==","KCdfXw==","dG9n","SUVOVEFUSU9O","IGNvbXBhbnlJZA==","IEJhc2ljcw==","CUpMYWJlbA==","IG1hY09T","IE1hdHM=","X3RlbA==","LXByZWZpeA==","IG11dGF0ZQ==","fScp","Y2hlbmc=","IE1pbGl0","IiY=","ZmluZGluZw==","IERhdGFMb2FkZXI=","LkdQSU8=","IExldnk=","IHNuZWFrZXJz","IGNyw6lk","YXduZXI=","eGlh","L3NpbXBsZQ==","Q0hS","IGZsb3RhdGlvbg==","LnNlbnNvcg==","QnJhemls","IFNlYXNvbnM=","IFNwZWFr","LWJhbGw=","IE11dGF0aW9u","dWtrYW4=","IE9tYWhh","4oCZb24=","IEN1b21v","IEp1ZGljaWFs","IGNoZWNrcG9pbnRz","IEZyZW0=","CUlk","ZWdyaXR5","X2Fm","QE5vQXJnc0NvbnN0cnVjdG9y","IHRhYmVsYQ==","WyM=","bm90YQ==","IEZhY3RvcnM=","KGdyb3Vwcw==","aXN3YQ==","SVZP","IHNjcmk=","YWNldA==","IE1laA==","KGNsYXp6","IFs8","cGVyaWFs","IHN1cnBhc3NlZA==","IGpva2Vk","IHJ1ZA==","IGltYmFsYW5jZQ==","IEZyYWdl","c3Nw","IGluZGljdGVk","Lm1hcmtldA==","O20=","IHJlcGFpcmluZw==","LW5vdGU=","RGVidWdnZXI=","KFdlYg==","IHNpbmdz","IExveQ==","IERFU0lHTg==","LkNvbXA=","LWNvbnRyb2xsZXI=","IGF2b2NhZG8=","IEJvd2ll","Y29udGFkb3I=","dWxpbmdz","dWNob3M=","c3BlY2lmaWVy","IFZvbHZv","IGRlbW9z","IFByb2R1dG8=","Lk5vdEZvdW5k","IG5pw7Fvcw==","IEJvbHM=","X291dGVy","U2hlcg==","QVVUTw==","IGpvdg==","IEZyZWRkaWU=","b3JpYXM=","IGFmZWN0","IGZhY2lsaXRhdGluZw==","IGRvbWluYXRpbmc=","UGFyY2VsYWJsZQ==","JywnLQ==","bW9vbg==","IG1ldGFzdA==","IHNjYXJm","IFRoZXJt","Q2FsbEJhY2s=","0YHRgtCw0LI=","LkltcG9ydA==","IGJldHJheWFs","aWN1bG9z","IHdlacOf","5YyF","X14=","d2lmaQ==","IFNFTlNPUg==","X0JVU1k=","JGI=","X0ZJTkQ=","IHBsYXN0aWNz","IENPTlZFUlQ=","CWNhbGw=","IFByYWd1ZQ==","IGdhcm5lcmVk","X2xlYXJuaW5n","c2hvb3Q=","J10pKQ0K","IEdpbmdlcg==","PXBk","LHRlc3Q=","UHJvZml0","IGVzdGltYXRvcg==","IGJyZWU=","IC8vPC8=","X2hhdmU=","IEtvZA==","X0lNTQ==","aXp6YXM=","bWlnaHR5","154=","IE9uQ2xpY2tMaXN0ZW5lcg==","44OH","IFNjaWVudGlzdA==","RmlsdGVyZWQ=","YXZs","aGF5","X2dlbmVyYXRlZA==","XScK","IEF1dGhvcml0aWVz","OnBhcmFt","IHN0YXR0","LW1hdGVyaWFs","IGxpZGVy","IENyb3A=","IEJ1bmlmdQ==","IG5leHRQcm9wcw==","b3J6","X29yZA==","PHg=","X0lPQ1RM","IE11c2NsZQ==","CWV4ZWM=","RU5BTUU=","X2xldHRlcnM=","IyMjIyM=","IENz","J109PSI=","ICInKQ==","Q2xlYW51cA==","LnN0cnVjdHVyZQ==","zro=","6YCa6L+H","J107Pz4i","IExhdGl0dWRl","YmJpbmc=","IGJhbmFuYXM=","cmVjdGlvbnM=","IFJhbmRhbGw=","TllTRQ==","IGFwcmVuZA==","LlJlc3BvbnNlRW50aXR5","IHRlc3REYXRh","XGU=","IFdL","LkFkZENvbXBvbmVudA==","X3J1bnM=","w6dvaXM=","LW1pbmk=","Zm9sZGVycw==","IGxvc2Vycw==","IFRvd2Vycw==","LUVuY29kaW5n","OnI=","Y2hvb3Nlcg==","IGZsYXR0ZW5lZA==","0YHRgtCw0L3QvtCy","CVB5","5Lic","IGRhbW5lZA==","RGVwdA==","d2Vk","IHBpc2M=","Z2llcw==","X2dhbWVz","Lm1hc3M=","KEVxdWFs","IG5hdGl2ZXM=","LnRodW1ibmFpbA==","bHRy","IGVxbA==","X2luY29tZQ==","CWhlYWRlcnM=","LWhhaXJlZA==","IG1lZGlvY3Jl","IFdpdGhkcmF3","IGJpdHRl","2b4=","PWlu","b2NrZWQ=","RnVsbHk=","IFRFTVBMQVRF","w7pkZQ==","T2Rk","aWxsZXo=","VGVsZXBob25l","IAoJCQo=","KCInIg==","X3NjaGVk","ZXJuZQ==","wr4=","LnBpY2s=","IE1TSQ==","CWZm","RGlzY292ZXJ5","IENPRA==","IExhY2s=","IHNlbnNhdGlvbmFs","bW90aA==","IExlZ2lzbGF0aXZl","0Y0=","IHZpYWJpbGl0eQ==","IGdldEVtYWls","IHVuYW5pbW91cw==","IHBlbGxldA==","ICIoKQ==","Y29hdA==","YWdvb24=","IEFMV0FZUw==","XHVD","X3N0ZG91dA==","QW5keQ==","IG5ld0xpc3Q=","IE1haGFyYXNodHJh","LF9f","PXVzZXJuYW1l","IHNjcmlwdGluZw==","IFRtaW4=","PEFjdGlvbg==","PXt9LA==","c3ltYm9scw==","IGZlbmNpbmc=","IHbDrWRlb3M=","IE1hdXJpY2U=","Y29ybGli","IGtlbQ==","In0pLAo=","IENsYXNzaWNhbA==","Y29sbGVnZQ==","IEhvbWVwYWdl","IH19Cgo=","X01zcA==","IENvbXBsYWludA==","IHNhbmR5","QXNpYW4=","X3NlcmlhbGl6ZXI=","IExhaA==","IGJ1ZHM=","b2xvZ25l","IHJlc3BvbnNlRGF0YQ==","b3BoaWxl","a2F0ZWdvcmk=","RW5kZWQ=","bGVjdGlj","IGNsYXdz","Li4uJyk7Cg==","IHBsYW5uZXJz","IFphaw==","IEdsb3Zlcw==","Iil9","IGZhc2hpb25lZA==","YnJvbg==","IG5ld2NvbWVycw==","dmFuYQ==","IHBpZXJ3cw==","UmVjZWlwdA==","LWVudg==","IHJ1dGE=","IEZhcm1lcg==","b2RvcmU=","bXVp","IHJvbWFudA==","IGluZmxpY3Q=","IHNlbWluYXJz","PWN2","KHN0b2Nr","IGV4dHJhY3Rvcg==","IFRpZmZhbnk=","X3V2","LmNvbnRhY3Rz","JyksKCc=","IHNvbHZlcw==","LkNvbm5lY3Rpb25TdHJpbmc=","L2RlYnVn","IEF2ZXJ5","44Oj","IG1heFg=","U3Bhcms=","PHRoaXM=","IGhpa2Vz","S2V5VmFsdWVQYWly","IFF1aWV0","c3RhYg==","IEtvbW1lbnQ=","bHljZXI=","IE1TTQ==","IExhbnRlcm4=","IGNvbmp1bnRv","aHNp","TVVMVA==","V2l0aER1cmF0aW9u","YXR0YWNoZWQ=","IEFzdGVy","CXBvaW50cw==","IFNpYmVy","IE1ldGhvZGlzdA==","L3NpdGVz","IGZvcnR1bmVz","UGFydGljaXBhbnQ=","IGN1c3RvbWVySWQ=","KWluaXQ=","X3NlcnZlcnM=","IHdlYXZl","IFRSQUlO","IGhhcmFzc2Vk","7J6R","YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXo=","X2Zhcg==","QWxjaGVteQ==","LmxpbmVXaWR0aA==","IHRoZXJhcGlzdHM=","IExvYg==","ZXF1aXBtZW50","IHJlY2h0","Lm1pcG1hcA==","Lm5pY2tuYW1l","IHVudG91Y2hlZA==","QUdPTg==","IFNhdWw=","IHdvcmtzaGVldHM=","IFZldGVyYW4=","b3VkZW4=","YWNsYXNz","X2FzbQ==","IHRlbXBs","IEV4cGVuc2U=","ZWlnaHQ=","I1NCQVRDSA==","em9uZXM=","LnBhcnRz","YXRyaWNl","bGF3cw==","dG9CZURlZmluZWQ=","RWZmZWN0aXZl","IFBpZWNlcw==","YXJ0aQ==","IGluaGliaXRvcnM=","CXBhcmFtZXRlcnM=","IHRlbGVncmFt","Ym91cmc=","X25vdGlmaWNhdGlvbnM=","IHBvc2l0aW9uYWw=","LWRlYWxz","IC8qLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQ==","IHNoYWRlcnM=","XT0k","IGRlY28=","ZXR5cGVz","Y2xhcmU=","IEdTTQ==","LnV0aWxpdHk=","VG9TdHI=","YWZlbg==","IFht","X3BhcnRpY2xlcw==","IGZsdWZmeQ==","TWFya2V0aW5n","IHN0YW5kaW5ncw==","PwoKCgoKCg==","VU1BTg==","X1BBWU1FTlQ=","CVRpbWU=","cmF3bg==","b3Jybw==","IGVlcnN0ZQ==","IHBhZ2VOdW0=","IENPUA==","IHBsYWdpYXI=","VXBsb2FkZXI=","JHNlbGY=","bGF0ZXI=","ZXJpYWxpemVk","IGFsaWduU2VsZg==","IOKZpQ==","LmFycmF5Y29weQ==","IG5vc290cm9z","CWdwaW8=","IHBsb3R0ZWQ=","aXRlcmF0aW9ucw==","IFJlbGF4","Y2lwaGVy","R2lmdA==","IEJldHQ=","IFhS","IHN0cmlwZWQ=","KGVudmlyb25tZW50","ZWdlcnM=","X1JFU0VSVkVE","IGvDtm5udGU=","IGluZmVycmVk","UGRm","c29ycnk=","cGFyYXRl","LkNvbmNhdA==","IGxpcGlk","LkJP","IG9ybQ==","IENvbnNvcnQ=","IG92ZXJzZWVpbmc=","IGFtYmVy","IHBsZXRob3Jh","CUFjdGlvbg==","cXVlcnF1ZQ==","IGh1aXM=","ID1b","IHByb2dyZXNzZXM=","anVkdWw=","IGNvbnZlcnRpYmxl","LmVtYmVkZGluZw==","IHs/Pgo=","IHJlZHV4","W2xhYmVs","OiIpOw0K","Lm9ubGluZQ==","cXVhcnRlcmVk","IHNjaG9vbGluZw==","ICJcIiI=","W2xpc3Q=","QWxhbg==","J30KCg==","eXBzdW0=","IHN0cml2aW5n","IFJlc3BvbnNpYmxl","IO2MjOydvA==","LkludFB0cg==","cmlrZXM=","ZW52aWxsZQ==","LnNldExheW91dE1hbmFnZXI=","IFBhc3Nlbmdlcg==","IGRpc29i","IGZlcm1lbnQ=","LlBpeGVs","Pign","IGNvbnRlbmRlcnM=","LWJldGE=","IGFmZmlybWF0aXZl","0L3QvtGB0YLQuA==","aWHDp8Ojbw==","UmVjb21tZW5k","aW1pdGVycw==","X3lsaW0=","IHN1YnNpZHk=","IGVyYg==","RmlsZVNpemU=","KHNy","IHBvb3Jlc3Q=","IHZvaQ==","U2lk","IHNsaXBz","X21pbnV0ZXM=","IHVn","xqFu","IG5hdMO8cmxpY2g=","44Oe","YmVhcg==","fV8kew==","IGZpc3Nl","IGRpc2NyaW1pbmF0b3J5","CQkgIAo=","IENvaWw=","X2lmYWNl","LnZlcg==","IG1pbmVk","IGFzc2Fzc2lu","IHVuc2V0dA==","LnJlcXVlc3Rz","LlVT","aW1hZ2VVcmw=","IHN0cmF0ZWdpY2FsbHk=","LWJhbmQ=","IHRyb3VzZXJz","WEQ=","ey8=","bGVjdGlvbnM=","YCgp","IlA=","IHNrZXRjaGVz","Y2xpZW50SWQ=","IFNyYw==","b3BlbmluZw==","UHV0aW4=","IFBvZXRyeQ==","IFBST00=","SUxMSVNFQ09ORFM=","IGJvb21pbmc=","U2ltaWxhcmx5","Omxhc3Q=","Lndvcmtlcg==","LmdldElE","LlNQ","c2VydmVycw==","b2N1bGFy","IHNwaW5hY2g=","SVNL","w7A=","J10pWw==","IGNoaWVmcw==","IGdyb8OfZW4=","cmlldmluZw==","LmFzaw==","LXN1cg==","VlY=","Lz4iOwo=","KHJlbW92ZQ==","IEtM","IEhhbGV5","QFJlc3BvbnNlQm9keQ==","LSY=","U3dhZ2dlcg==","IHpuYWo=","Lm9uRXJyb3I=","cmVnbw==","ZWxpeA==","IEFWQUlMQUJMRQ==","IHNlcGVydGk=","aWFw","X21pc3M=","IHN1cmdlcmllcw==","IGltcGFydGlhbA==","IENvdA==","YWt0aW9u","IHdoaXRlbGlzdA==","INCw0LI=","X21peA==","IEJlZHJvb21z","IHByaW1laXJh","IHNpZ25pZmljYQ==","L2J5","IHN0YXJ0bGluZw==","IFNQRQ==","dWNjacOzbg==","TnVtZXI=","SUJN","LmZyYWdtZW50cw==","UmVudA==","IHLDs3duaWXFvA==","LkFVVE8=","LkZvckVhY2g=","IFpodQ==","IEN1bm5pbmc=","IFdhcm4=","IEJI","X0RPV05MT0FE","QnlLZXk=","KeKAlA==","IGNvbW1hbmRl","X0FOUw==","Q2hyb24=","RklU","X2F0b21z","X1NLSVA=","IHZhcA==","KEJveA==","IGxkYXA=","dW5wcm9jZXNzYWJsZQ==","SVRJT05T","w6lyw6k=","LG1zZw==","IG91dHNldA==","IGRyaWxsZWQ=","IGTDqXZlbG9wcA==","IENvYXQ=","IEJlbmdoYXpp","SG9va3M=","IE1pc3NpbGU=","X1Jlc2V0","Pi88","ICItIgo=","KCk9PnsK","IEhvY2g=","LmF3YWl0","QWRyZXNzZQ==","IGRpZ2l0YWxseQ==","IlRoZXNl","b3BsZXZlbA==","IGFzeW5jaHJvbm91c2x5","IER1Y2tz","UkVTUA==","SVJP","LmZpeA==","IFJhZGFy","dmVydGlzZQ==","w61zZXM=","SXRlcmF0aW9ucw==","bW91c2V1cA==","bWludA==","RklSU1Q=","IHBheXBhbA==","X3VwZ3JhZGU=","V3JhcHBlZA==","Ow0NDQo=","K3M=","IGNhdGNoZXI=","Lk9w","X05PVElDRQ==","cGFyYWxsZWxlZA==","Q1ZF","Zm9yZ290","IHBhbm9y","IG9mZnJl","IGVub3JtZQ==","KCkNCg0KDQo=","YWRpYXRvcg==","YWRkQWxs","W3RleHQ=","KHV0aWw=","LlByb21pc2U=","YW5pc20=","X29mZmVy","RU5ESUY=","ZG90cw==","IEtybw==","IHNwZWxsZWQ=","IGFwcE5hbWU=","QWN0aXZpdGllcw==","IFNwaWNl","ZWF0ZWQ=","IHNrYg==","IGvDtno=","IHRvcmNodmlzaW9u","Q2l2aWw=","IGhvcw==","X0hlbHBlcg==","acSH","X3Vuc2lnbmVk","6K66","4oCcQW5k","CWtmcmVl","LnJhaXNl","IGNhbGxl","IExhbnM=","IGFudGln","XCI+IjsK","YnJhbmNoZXM=","bG9ncmFkb3Vybw==","IHN0YWxsZWQ=","YWx5emVk","RGVyaXZlZA==","Om5vdA==","IGdpYmk=","IFR1cm5idWxs","LnVzZXJEYXRh","KFRhYmxl","IERlcml2ZWQ=","CWNvbmY=","IGFsZ2Fl","IGthZmth","IG5ha25l","IEhlYXRpbmc=","IFRpcmU=","YWR1bHQ=","IERhdGVGb3JtYXQ=","b3Bj","ZW5zYWdlbQ==","LlRvb2xz","Lk1peGVkUmVhbGl0eQ==","cmFp","IFdvbmRlcmZ1bA==","KV0pCgo=","aWFyZA==","VGhlbWVQcm92aWRlcg==","IGV2ZW50RGF0YQ==","I2Fk","LmdldFVybA==","IHRvb2xib3g=","IG92ZXJyaWRpbmc=","Q09OVEVOVA==","LXByb2R1Y3Rz","d2lsZA==","X2V4cGFuZA==","aW5haXJl","QnJ1","b2xscw==","INGN0YLQvg==","Y3Rlc3Q=","IHB1bmNoaW5n","RFJW","X3NwYWNlcw==","IFN1cGVyaW50ZW5kZW50","IGxheXVp","KGZlZWQ=","dG9k","IHZo","IGluc3VsdHM=","IFN1Yw==","aWtz","VG9ycmVudA==","Lmty","X2FjdGl2YXRl","k5g=","amVl","aW1lcnM=","cnVpdHM=","IHByZWNpbmN0","LlJlcXVpcmVk","IHNhdGlzZmllcw==","IGNoZWVyaW5n","IGFycml2","CXJlYw==","IENvYmI=","IGNvbmN1c3Npb24=","dWpldA==","Tm90Rm91bmRFcnJvcg==","SmVhbg==","IHBob3Rvbg==","Pl8=","IEJhcmNs","YW1k","ICV9Cg==","PVwiIw==","SW50ZXJu","IENvbW1pdHRlZXM=","LmJlbA==","bnVtbWVy","IGxldml0cmE=","X3ZlcmJvc2U=","KGNvZGVj","IFN0aXRjaA==","PSIiOw0K","IHJlZ3JldHM=","IG11bHRpbmF0aW9uYWw=","IHJlc3RydWN0dXJpbmc=","IE1FTg==","eW5jaHJvbml6YXRpb24=","IG1lZGlhdG9y","a2ly","UHJpbmNl","IGluaGliaXQ=","IGdvc3Q=","IE1NQw==","IHNpZGVk","X2Rhcms=","KGJsb2I=","PkxvcmVt","PiIpOwoK","c2Nhbm5lcg==","OmlubGluZQ==","LmNhcm91c2Vs","b3RpZGU=","IFdXVw==","IGRydW1tZXI=","LmZhbWlseQ==","IG9yZGluYWw=","5b2T5YmN","IGRpcGxvbWF0","IHN1cHBsZW1lbnRhbA==","IGRhZsO8cg==","IEZBVA==","IFlvbmc=","aGFwdXM=","IEp1bmN0aW9u","emw=","LlVzZUZvbnQ=","IGhhc2hNYXA=","LVJl","ICIqKg==","LnNldEJhY2tncm91bmRSZXNvdXJjZQ==","IGltcGVyZmVjdA==","LkZpbmRFbGVtZW50","IExMUA==","IG11cmRlcmVy","IHRleHRl","aXPDqQ==","YWN0aWNz","VG95","R3JhbnQ=","X2Rpc2Nvbm5lY3Q=","IGJyYXNpbGU=","IGVtZXJnZW5jaWVz","X2x2bA==","IEAiXA==","fSovCgo=","X1NPQw==","Tk9STUFM","L2dhbGxlcnk=","YXNpY3M=","RXZlbnR1YWxseQ==","IGdyYXA=","IGNyaXN0","IHByb2plY3Rvcg==","IGdlb21ldA==","IGRldGVjdG9ycw==","IGNyaXRpY2l6aW5n","IGNoaWNrcw==","IEhpag==","L2ZyYW1l","LW1vbmV5","ImRlc2NyaXB0aW9u","IHRleHRpbmc=","IHNleGlzbQ==","IE1WQw==","LWdlbmVyYWw=","IG92ZXJ0dXJuZWQ=","IG1vdmVy","IFBocmFzZQ==","IFVOVVNFRA==","IEVudHJlcHJlbmV1cg==","VEVHUg==","ZWxsaXBzZQ==","TWFya2Rvd24=","X18oKg==","IEthcmRhc2hpYW4=","cHBlbGlu","IEdvdHQ=","IGR5c3Q=","IFJlZHV4","SG9sYQ==","PyEKCg==","IFJlYWx0eQ==","U3VydmV5","IE1jR3JlZ29y","X2hhbmRsZXM=","IGludHJpZ3VlZA==","IGdldFVybA==","IGRldmlzZWQ=","IFBheXBhbA==","IHRoaW5rZXJz","IFN0YXR1c0Jhcg==","IEVsaWc=","IGNvbXBsZXhlcw==","INC60L7QtA==","c3RvY2tz","LWluaXRpYWxpemVk","IHNjYW5kYWxz","IGNvbWZvcnRpbmc=","IFJvY2tz","IGxpb25z","bG9jYXRvcg==","IV0=","IFBvbnk=","RGF0dW0=","IEZldA==","IG9mZnNldFk=","IFJFVFVSTlM=","IGJyZWFjaGVz","VGltZUludGVydmFs","IHZpZWxlbg==","VmVyc2U=","IGthZA==","IGdhYXQ=","KCItIiw=","IG1vdXNlWQ==","KFBvc3Q=","IFVo","ZWxpZ2libGU=","YWx0YQ==","IHV0aWxpc2U=","ZmFjdHM=","SElQ","IG9yY2hlc3RyYQ==","IFNwYWNlcw==","aXNwaWVs","IG11bHRpcGFydA==","LW9wYWNpdHk=","U2VhcmNoaW5n","IFBsYXRv","VmlzaW9u","IGx1bA==","IEFwcHJlbnQ=","57uc","W3JhbmQ=","LWRpc2FibGVk","IEZsZXRjaGVy","IHRyYW5zcG9ydHM=","JmU=","dHBhcmFt","cG9sZQ==","IEJ1ZW5vcw==","w7pibGljYQ==","aW50ZXJhY3Rpb24=","IGhvYg==","IGluZmxpY3RlZA==","bGl0ZQ==","IFBBUkFNRVRFUlM=","IFN0YW0=","KG14","IEF1dG9NYXBwZXI=","aWxpYW4=","IHF1aXR0aW5n","PXt9","IEpvbmFz","IGxvY2FsaXR5","IFNpbGVuY2U=","X2ZsdXR0ZXI=","IG5icg==","bGl0ZXI=","IE5vcm1hbGl6ZQ==","IGFjdW0=","QnJhaW5z","ZXF1aXA=","XT09Ig==","IGRlc3Rpbm8=","IERpb3M=","Lk11bHRpbGluZQ==","YWdyZWU=","KQoKCgoKCgoK","IHN0ZWxsZW4=","IGN1cmx5","Lk9mZmljZQ==","LWFib3V0","ICcuLy4uLy4uLw==","IFVUSUw=","IFJw","4oC6","IG1hcGE=","LkRP","YWdhbA==","LndpbmRvd3M=","IGFkdmVyc2VseQ==","Llh0cmFMYXlvdXQ=","bWVkaWNhbA==","IHVuc3Vy","dGhlcm1hbA==","Lk1vZGVsQWRtaW4=","LmFjdHVhbA==","c2V0Q29udGVudA==","IHBvc3RmaXg=","UFc=","IENoYWlycw==","IGdyYW1t","IGNvbXBsaWM=","RElTUExBWQ==","IE1vb3Nl","aGFhcg==","QUxFUw==","IGxkYQ==","LyoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqCg==","ICcvJwo=","QVNO","IEJhcmJlcg==","IG1haW5z","IG1haW5XaW5kb3c=","0LDQt9Cy0LDQvdC40LU=","IGVtYW4=","X2NvbGxlY3Q=","IHJlbXBs","LnRheA==","YmFo","IFBzeWNoaWF0cnk=","RGVzY3JpcHRpb25z","IGV4ZWN1dGlvbnM=","CUxPR0dFUg==","JkU=","OmJn","IGtk","LmRhbWFnZQ==","IG5pc2k=","5qy+","IENhbWVs","aW5pZGFk","IExpZmVzdHlsZQ==","IFRISVJE","IOCkuA==","IHBvbHlnb25z","IGF0dGlyZQ==","YWxlbnQ=","X1VTQVJU","IG1hbGFyaWE=","bG9icw==","IF19Cg==","KHJlZ2lzdGVy","LXBz","X29wdGltaXplcg==","KEFMT0FE","IHZhcGU=","LnNvY2s=","kOiXjw==","JHByb2R1Y3Q=","KEVSUg==","Y2twdA==","YnVxdWVycXVl","IH19Ij57ew==","IEhpdmU=","IE1hc2g=","IEVwaWQ=","IEx1bmQ=","X3RyYW5zYWN0aW9ucw==","IHN1YmNsYXNzZXM=","RWFzZQ==","X0Nsb3Nl","X2NoZWNrb3V0","IicsCg==","U2VjdG9y","b2lzZQ==","LXRlbXA=","KSIp","aHlwZXI=","ZXJjdWw=","c3RhY2twYXRo","X05S","SUxMRQ==","IHJlbGFjacOzbg==","IE1hdHRo","X0NPREVD","IGhhbmRsZUVycm9y","X09uZQ==","YWxib3Jn","CQkgICAgICAgICA=","IFVwbG9hZGVk","Tm0=","Ly89","KlM=","X0VYUEVDVA==","IGZyYWN0aW9uYWw=","Q291","IHNjYWxhYmxl","IENJRA==","PFBvc3Q=","CXRocmVhZA==","aGFyZHdhcmU=","LmNoYW5nZWQ=","LkVsZW1lbnRBdA==","IGFydGljdWxhdGU=","ZWRvcmVz","RXN0YWJsaXNo","PXtbCg==","ISo=","IFNK","TWV0ZXI=","LnJlcA==","IFZPTA==","IE91","bMOp","IHBuZXVtb25pYQ==","X3BpY2tlcg==","ZXhwbG8=","IOyekQ==","IFN3aW0=","ZHJlc3M=","c3Rvcmllcw==","L25hdg==","VmE=","INit","L3NlbGY=","IHZldGVyaW5hcnk=","KERlbnNl","CWJvb3N0","IElzTm90","IHRydXN0aW5n","IExlYmFuZXNl","JHJlcXVlc3Q=","eGZmZmZmZg==","X3JlbW92ZWQ=","IHVwZGF0ZXI=","2KfY","RE9XTkxPQUQ=","IEltbWVkaWF0ZWx5","IHJvYW1pbmc=","IEhvcm55","LmNvZGlnbw==","IEZpZ3VyZXM=","IHBhbnRyeQ==","KHNhbXBsZXM=","IEJFTA==","IHNldENvbnRlbnQ=","dW1vcg==","5pSv5LuY","X01JTlVT","IHVubGVhc2hlZA==","IHByb2ZpY2llbnQ=","CVVJ","LkV4Y2VwdGlvbnM=","IHNyYW5k","UHJlc3N1cmU=","LmFzc2VydE5vdA==","KHNlcmlhbGl6ZXI=","CXR4dA==","UG9ydHM=","IG5lY2VzYXJpbw==","IHJldml2ZWQ=","IG1pbGVzdG9uZXM=","Y2Fubw==","RXNjb3J0","IGVudGVuZA==","QVBF","aXBj","LmF0b21pYw==","IFBlbWI=","IHJlYWNoYWJsZQ==","IGthbnM=","d2hhdGV2ZXI=","TGlzdEJveA==","IENseQ==","cGljdHVyZWQ=","IEVsZWN0cm8=","YWJpYw==","IGZ1bms=","IGRpYXJyaGVh","IOeZ","IFNvbHZlcg==","IEJhYw==","IHNrZWxldGFs","IO+C","IEZpbGVOb3RGb3VuZEV4Y2VwdGlvbg==","ICIpWw==","IFRyYWl0","dWRva3U=","LS0tLS0tLS0tLQoK","QW5nZWw=","YWdy","IHNpbXBsZXM=","IGJhbmM=","IEFsZXJ0cw==","IENvbmZpcm1hdGlvbg==","IEFseQ==","Y2FsbGJhY2tz","IGZ1bmt0aW9u","IGdyYWZ0","WVBE","L0FGUA==","V0s=","a3Vy","Q0tFVA==","IFNsYXRl","IFN0ZWY=","CVJ1bnRpbWU=","IEVTTA==","IHByZWFjaGluZw==","QnJvYWQ=","IHNldERlc2NyaXB0aW9u","YXplbA==","PQoK","IGphY2twb3Q=","IC8vIQo=","dmlhcg==","IGVpZA==","IGF0aXY=","IHJlZmxleGl2aXR5","Lkxpc3Rlbg==","IGx5cmlj","IHZlcms=","IGNvbGx1c2lvbg==","YXphYXI=","IHdpbms=","IE11ZA==","L29wZXJhdG9y","IGV4dGVybmFsbHk=","IGJhcnU=","IGJhc2tldHM=","dGlja2Vy","KHBob3Rv","X2V2ZW4=","IHNwb25nZQ==","IGhlaWdodEZvcg==","Z2V0Q2hpbGQ=","X2Zvcm1hdHM=","LkV4ZWN1dGlvbg==","X1Byb3BlcnR5","cmVwb3M=","dGhlaWQ=","X1BIWVM=","IGV2aWRlbmNlZA==","LmhlYWRpbmc=","QW5ndWxhcg==","IFZlbnVl","IEhPVVNF","IEVzdG9uaWE=","0LzQsA==","cmdhbml6YXRpb24=","L2RldmljZQ==","SVJS","X3RoZW4=","YXJlbQ==","IGFnZ2k=","RU1PTg==","INGB0Lo=","IEVwaA==","IE1TUA==","IGxvZ2ZpbGU=","LWxlYWRpbmc=","YXRoYW0=","IHVubWF0Y2hlZA==","IFNpdHVhdGlvbg==","KCl7fQo=","CWNoYW5nZQ==","IENoYXB0ZXJz","LlJFU1VMVA==","IG9l","RVRZ","X3ZpZA==","Li4uJyw=","IGFsdGVybmF0aXZlbHk=","X1dT","IFBsZW50eQ==","IENyYXRl","YXNpb25hbGx5","IExhd24=","IElNTQ==","IFZhbml0eQ==","IFZvb3I=","5ZCv","IG1pag==","c3RlcnJlaWNo","IFJERg==","IENyaXRlcmlvbg==","Lkludg==","LlN0ZXA=","X0ZyYW1l","IEVOVU0=","774=","SG9wZWZ1bGx5","TmF2Q29udHJvbGxlcg==","IOy2lOqwgA==","IFZhZGVy","IHJ1dGhsZXNz","JGtleQ==","Y2t0","aW5lbQ==","aWxlbnQ=","IHJlc3BlY3Rpbmc=","bGNk","KGJ0","IEVsbGlvdA==","IFVuaWRvcw==","KENoYW5uZWw=","IGVpdXM=","IGFzdHJvbmF1dHM=","IEhvc3Rpbmc=","IGNhc3Rl","IGhhcm1lZA==","b3VwbGVz","PFJvbGU=","LkRlc2M=","LWNvdXJzZQ==","IENhcnRvb24=","aWxlZ2Vk","IG15c3RpY2Fs","IOex","KGZpZWxkTmFtZQ==","V0lUSE9VVA==","LHN1bQ==","J2FjYw==","CXJvd3M=","IGdldFBhc3N3b3Jk","IGNvY2tz","cGl2b3Q=","bmFtZW9m","IGZlYXNpYmlsaXR5","IGNvbW1lbmNlbWVudA==","IERvbWU=","LkpTT05FeGNlcHRpb24=","IEh5ZGVyYWJhZA==","IExpc3RlZA==","IENvbXB1dGVycw==","W3ZhbA==","IGlzb3Q=","CXdpbg==","IG5laA==","KElOVA==","UmVwdWJsaWNhbg==","INC/0YDQvtCy0LXRgA==","RmF0","IGVxdWl2","IERhdHVt","YXN0aQ==","IHNvaWxz","dXB1bmN0dXJl","cHJlc3NpdmU=","XykpOwo=","Lldhcm4=","IGhhcmI=","Lm9uT3B0aW9uc0l0ZW1TZWxlY3RlZA==","IGNsb3du","IE9XTg==","IGV4YW1pbmF0aW9ucw==","IEV4aXN0aW5n","am91cmQ=","IGNvbmNlc3Npb24=","IEZpcmViYXNlRGF0YWJhc2U=","IHVwdGFrZQ==","IGVubGlzdGVk","IENhcmI=","IGZ1cw==","IGFidXNpbmc=","LnByb2R1Y3Rpb24=","eW5jaA==","aWx5bg==","cmVmdW5k","LWhhdmU=","KGFyZ3VtZW50","IGZzY2FuZg==","Y29uY2VwdA==","X0xBTkU=","IGVuZ2FnZXM=","IEV4YWN0bHk=","YWx0dXJh","KEFkZHJlc3M=","IHN5bm9ueW1vdXM=","VG93bg==","IFBheW5l","cm9pdA==","cGVyaWVuY2Vz","cGFydGljbGVz","X2Jk","IEdyaW5kZXI=","TWFuYWdlZE9iamVjdENvbnRleHQ=","KGJi","W3RtcA==","LWNvbnM=","YW9rZQ==","IHN0ZXdhcmQ=","IFZpZXdDaGlsZA==","LmRyYXdMaW5l","IFdBUk4=","IHB1ZXM=","bW9kYXRpb24=","IHpz","QWdyZWdhcg==","ICIuIiw=","LmNlbnRlclk=","IGZsYXdsZXNz","IGRldXRzY2hl","IExpcXU=","aXRlaXQ=","X2ludHJv","LXVzZWQ=","LHRhcmdldA==","IEhERA==","ICUr","b3JlbnQ=","L09iamVjdA==","IGRpc3J1cHRlZA==","w6J0ZQ==","IGFjY2Vzbw==","IExvd2VzdA==","IFdpbGxpYW1zb24=","X2NyZWF0b3I=","U2VsbA==","IEJVRw==","X3JlcHI=","6ICM","IGFyY2hhZW9sb2dpY2Fs","b21lcnM=","IEVsb24=","IFNjcm9sbFZpZXc=","IGxpbmVzdHlsZQ==","aXNSZXF1aXJlZA==","aXNrbw==","X3Ji","ZsO8aA==","ICAgCQk=","KGRlZmluZQ==","IFNDTQ==","IERJRkY=","X2Jz","cGVuZGljdWxhcg==","cGFjZWQ=","IEpvdXJuYWxpc20=","LkpTT05BcnJheQ==","IERhdGFBY2Nlc3M=","TWFyaWE=","IELDvA==","SEVMTA==","IE1BVFJJWA==","T0xUSVA=","YXBzaWJsZQ==","XToKCg==","bmFpcmVz","X2hpc3RvZ3JhbQ==","IGZsYWly","aGF2aW5n","IFVzZXJJRA==","IFJlbGF0aW9uc2hpcHM=","UmVwbGFjZW1lbnQ=","IHJzYQ==","IGVucmljaGVk","IHJlaGVhcnM=","IHfDpHJl","IGxvYWRlcnM=","IEVsZW5h","IFdhdGNoaW5n","CWpvYg==","TkVXUw==","L3NldHRpbmdzZGlhbG9n","aXZlYw==","X0VRVUFMUw==","VGVtcGxhdGVOYW1l","IEJPRFk=","LmFkYXB0ZXJz","d29mZg==","Y29tYm9Cb3g=","Lk5ld1JlYWRlcg==","fHJlcXVpcmVk","X3Byb2JhYmlsaXR5","ICg6Og==","IGNyYXo=","IFVG","VGVzdElk","IGVzcGVjaWZpYw==","aWJlbA==","cGF3bg==","640=","IE1hcnI=","IHN0YXJ0WA==","X3NpdGVz","Lz4KCg==","IGltcGxpY2F0ZWQ=","KGlubmVy","IGVmZm9ydGxlc3NseQ==","wq10aW9u","YXdhcmQ=","IGhvdmVyaW5n","cHJp","JHRlbXBsYXRl","dWFuZw==","IGF1dG9tYXRl","ICoqLwoK","aWJsaQ==","IG51dHJpdA==","KS4o","ZWVlZQ==","QXBpQ29udHJvbGxlcg==","L293bA==","IFdvbWVucw==","LWRvdWJsZQ==","IE9yZGVyaW5n","c3Bt","TW9kZXI=","Lk5hdGl2ZQ==","IEJlcmdlcg==","ZXNkYQ==","ZXJkaW5ncw==","X2VjaG8=","IHN1bW1hcml6ZWQ=","IGVsZXZhdGU=","X3F1YWQ=","IHdvbw==","dWxhbnQ=","UHJvcGVydHlWYWx1ZQ==","IHBsaXN0","IEdSQVBI","IFNUREVSUg==","KScpLg==","QXNzZXJ0aW9u","bGlua3BsYWlu","IGFjY2VsZXJhdGluZw==","IHNuaXBwZXRz","IFNhbG1hbg==","YWJjZA==","LmVjaG8=","X2lkeHM=","IHBjbQ==","b2NhbHlwdGlj","X2Nvb3JkaW5hdGU=","KHByZXZpb3Vz","LXNob3J0","LnN1YnRyYWN0","KEJpdA==","P3Q=","IE5vdGVib29r","IEthdHJpbmE=","aWZmZXJlbnRpYWw=","c2lsZW50","dGVybWluYXRlZA==","IHRhbmdlbnQ=","OlQ=","IGNvc8Os","IHBhcmFub2lk","IGRlcHJpdmF0aW9u","L3t7JA==","IGhlbWlzcGhlcmU=","IHJlaW5zdA==","ZWN6","dGVycg==","IFBMQVRGT1JN","IHRyb3VibGVzaG9vdGluZw==","IHZhbGlkYXRpbmc=","IE9yaW9u","YXN1cmluZw==","0LjQvdCw","IGh1YnM=","YXJlbmNl","IENoYWxsZW5nZXM=","IHplYWw=","U3Bv","IFNjcmVlbnM=","IG11bmRhbmU=","IER1bms=","ICMjIyMj","IFJFRkVS","b25ldA==","LmNhc2U=","LXBvc2l0aXZl","SU5URUdFUg==","Lm1ldHJvTGFiZWw=","U0FO","IHByb2Zlc3Npb25z","IHR5cmVz","UGFsaW5kcm9tZQ==","IFNFQ09ORA==","LkdSRUVO","IFNuYXBzaG90","VUxL","X2NpZA==","JEk=","IGN1bnQ=","ZXN0cnVjdGlvbg==","UHN5Y2g=","IEh0dHBSZXNwb25zZU1lc3NhZ2U=","ZW1iYWxp","X3Jldmlld3M=","U2VsZWN0YWJsZQ==","X1BSRVNFTlQ=","IEpzb25SZXF1ZXN0","IFRoZXRh","X2ludGVycA==","UmFzdGVy","I2Vycm9y","LG9iag==","IHR3ZWV0aW5n","X0dQVQ==","X3RvZGF5","X3NlY3M=","bmVlcw==","LmdldFN5c3RlbVNlcnZpY2U=","IHZub2Rl","IFJlZ3VsYXRvcnk=","IEZhaHJlbmhlaXQ=","IHNjYWxlcg==","X21hcmtldA==","LmFsbG9jYXRl","dGlja2V0cw==","YXRhaw==","IFBpa2U=","IExvcg==","ZGl0b3I=","IGxvY2F0aW9uTWFuYWdlcg==","IGluaXREYXRh","IFdhcmU=","IEluY2lkZW50","IGNvbW1lbnRhdG9y","dWVudGVz","IEluZmxhdGU=","IOWG","IGFjdGl2aWRhZA==","IEJq","RU5VTQ==","IHJldXNlZA==","INC80LXQvQ==","IHNlc2nDs24=","LicpKTsK","44GT44KT","L2dl","YWdhaW5zdA==","LGxpbmU=","KFVubWFuYWdlZFR5cGU=","KT0i","IHl0","dWRpYW50ZXM=","cm9sbGFibGU=","5aGr","X0NPTExFQ1RJT04=","b2xpcw==","dW1iZXJsYW5k","KCIiIgo=","IHppcHBlcg==","DAo=","L3NpZ251cA==","IHN0cmFuZHM=","cmF4","LmNvbnN1bWVy","IHVuY2VydGFpbnRpZXM=","RGVidWdFbmFibGVk","IGRlZmVhdHM=","IGRydg==","IHJlYWxpc20=","YWdyYW1z","WEU=","IEhhemFyZA==","LW5lZWRlZA==","KHRhYmxlVmlldw==","LkVsZW1lbnRz","IFNBUg==","CWVsZW0=","KHBrZw==","U2ltb24=","VGludENvbG9y","IFBoZW4=","X0VNUA==","2Iw=","Pz4KCgo=","X2F0dHJpYg==","IGJveFNoYWRvdw==","IENHQWZmaW5lVHJhbnNmb3Jt","IENhbmJlcnJh","IHN0YXJ0UG9z","IFJhaw==","CWNlcnI=","IFRhbnphbmlh","dW9uZw==","Y2Fm","LmJhc2ljQ29uZmln","b2lucw==","Q29udGFpbmVk","PXNldA==","X2dpdA==","CXBhY2tldA==","IGNvZg==","KFRS","5qC85byP","KHt9KQo=","IGRpcmVjY2lvbg==","IHBsYXlsaXN0cw==","IGFmZmluZQ==","LnNldFNlbGVjdGlvbg==","IGFtbW9u","IGNvbnF1ZXJlZA==","IFJhbW9z","IFBTUA==","PXN1bQ==","IGNvcnJlbGF0aW9ucw==","IHJvYWRtYXA=","IGV4dGluY3Q=","IGFkdmlzYWJsZQ==","IGJvbWJlcnM=","IFVJUmVzcG9uZGVy","X0JQ","INCx0YPQtNC10YI=","IFByZW1pZXJl","IFJV","dHJhc2g=","KGNsanM=","Z251","LlBhZ2Vz","IGluc3BlY3RvcnM=","TWV4aWNv","IFZlcmU=","UHJlYw==","IFNjYWw=","aXNwZXJz","UnVubmFibGU=","Lm9yaWc=","IHNhaWxvcnM=","UGFyc2luZw==","IFZpc2l0b3Jz","JnR5cGU=","cG9wb3Zlcg==","PCgpLA==","IG93ZXM=","IHJlYWN0cw==","IERlZmluZWQ=","IHJlYWxtZW50ZQ==","IGRpY3RhdG9yc2hpcA==","YWRtaW5pc3Ry","aWRlbmQ=","PUw=","c3RyY2FzZWNtcA==","XSU=","0L7Qs9GA0LDQvA==","ZWR1bGE=","LWRlc2lnbmVk","Q09WRVI=","X0NoYW5uZWw=","IHByb2pldG8=","eW1vb24=","Q0hLRVJSUQ==","6YeK","IHZlcmlmeWluZw==","L2tleQ==","LmZyb21DaGFyQ29kZQ==","LkJpdA==","X2J1ZGdldA==","ICUi","dmV5b3I=","IHl1bQ==","IGV4dHJlbWVz","X0NSRQ==","Z2V0U3RhdHVz","c3Vic2VjdGlvbg==","IHNvYWtlZA==","IGdlbmF1","X0NIQVJBQ1RFUg==","5oyB","LW9ubGluZQ==","LnRvQ2hhckFycmF5","Y2VyZXI=","Il0sIg==","IHN0cm9sbA==","IFl1YW4=","IFdhbmRlcg==","IHNpc3RlbQ==","X3Vj","KG5vbWJyZQ==","Y2hhbnRtZW50","KGNsb3Nl","bWV0aA==","LXNlY3JldA==","cHNldWRv","Q291bnR5","Q09OVFJPTA==","IHNvbHZlbnQ=","IHNvYXJpbmc=","IHNwaWVz","TmF2SXRlbQ==","IHJlc2VtYmxhbmNl","KGJpdHM=","IGNlbGx1bA==","IGFzc29jaWF0aXZl","Lmltd3JpdGU=","LmNvb3JkaW5hdGU=","XSwk","KHNr","Ki8p","IG1vY2tz","IGp1bmc=","X0RPQw==","LXJ1bnRpbWU=","IEdpdmVz","dW5q","KHNlZw==","KFtc","IG5haA==","X2V4cGVjdA==","Um93SW5kZXg=","KGZvcmNl","IEdldFZhbHVl","IHN1bW1hcmllcw==","X1NIQVJF","LXRyYWluZWQ=","IEJsYW5j","IGZpdHRpbmdz","IHdhdGVyZnJvbnQ=","Lk5vdGU=","IFdhbmQ=","b3ZlcmU=","cHJlZGljdGlvbg==","IGNzcg==","LnRvcEFuY2hvcg==","IFN0cm9rZQ==","X0ZpbHRlcg==","YXRoZQ==","ICJcXCI=","IEFGRg==","PSIvIj4=","LlJlcXVlc3RNZXRob2Q=","kJzntKI=","IHdpdG5lc3Npbmc=","QXBwYXJlbnRseQ==","IG1kaQ==","c3RpY2tz","IEFsdg==","w6TDnw==","X2NvbnRpbg==","IGJvaWxlcnM=","IE1hcnhpc3Q=","SU9D","bmVybw==","aW5uYWNsZQ==","TGl0","Y2Vj","S2V5UHJlc3M=","R2V0RGF0YQ==","IGlzbnQ=","0YDQvtCy0LXRgA==","IHFyeQ==","Um9vdEVsZW1lbnQ=","IE5TQ29kZXI=","LmdldE51bQ==","IHRocmVlc29tZQ==","VXNlcw==","LiJf","IENvbnRpbnVvdXM=","IHBvcHVsaXN0","IFBzeWNob2xvZ2ljYWw=","X2N5Y2xlcw==","IGlmZGVm","aXBoZXJhbHM=","CSAgICAgICAgICA=","IGFkdmlzZXM=","IENvbXBhbmlvbg==","dHJpZ2h0","IGdyb3dlcnM=","IFNPQ0tFVA==","eW1jZQ==","UlNT","bWVtYmVyT2Y=","VG91Y2hhYmxl","X2FycmF5cw==","IGp1bXBlcg==","IGhlcnBlcw==","IFRpdHM=","IFRlbGVmb24=","X1BBTkVM","dWdlbg==","5YyX5Lqs","LlNpdGU=","X3VucmVnaXN0ZXI=","X2Nocg==","LnRm","LWh1bWFu","IGFzb2Np","IHF1ZWVucw==","QW50aG9ueQ==","IHN0cmluZ2VudA==","IG1vbGVzdA==","c2V0SWNvbg==","SEVFTA==","SEVMUA==","RERT","LmNtcw==","SVNUUklCVVQ=","Y2llcw==","LmZvckNoaWxk","LmNoaw==","IE90dG9tYW4=","IFRQUA==","IG1pbw==","IEJ1Zg==","Ym9h","VmVyc2lvbnM=","KGxvY2FsZQ==","IFJhaWxyb2Fk","YmNj","LyoqPA==","LXBhaWQ=","IGNlbGVyeQ==","YXRpc2NoZQ==","Z2V0T3B0aW9u","b3Jpb3VzbHk=","IGFkYXB0ZXJz","U3RvcmVz","L3NhdmU=","IEJhc2lz","0Y7Rgg==","IExhZA==","X3JlbGF0aW9uc2hpcA==","IENsdWJz","IOCo","OiI8PA==","X01JU0M=","VmlzdWFsaXphdGlvbg==","IG1pcnJvcmVk","ZXNwZXI=","U3RyTG4=","IHJlc3BvbnNlT2JqZWN0","5ZCR","LmVuY29kZXI=","LS0tLS0tLS0tCgo=","IGdyaWRWaWV3","X2luZGVudA==","YW50d29ydA==","IGFycml2YWxz","IFNldHRsZW1lbnQ=","Vmlld0luaXQ=","LXZhbHVlcw==","IHdhdGVyZmFsbA==","IGluY2FyY2VyYXRpb24=","IFRlZW5z","CXNpZ24=","aW1tdW5l","LnNlY29uZGFyeQ==","IHZpZGVvZXI=","IOi+k+WFpQ==","IGludGltaWRhdGlvbg==","ZW5kYWxl","IyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMj","IGluc2lnaHRmdWw=","IHNhbmRz","IHBob3RvZ3JhcGhpYw==","UGFnaW5hdG9y","IGRpc2NpcGxpbmVk","X1RMUw==","XSkpLA==","cmxlbg==","PGNlbnRlcg==","X1BDTQ==","S2VsbHk=","LWJpbGxpb24=","LmN4","IGpldXg=","IGZpbGVMaXN0","IFFEaWFsb2c=","dHJhY3RpdmU=","RHQ=","IGVzdHJvZ2Vu","IHN0YXJjaA==","X2VtaXQ=","INC30LDQv9GA0L7RgQ==","IFF1YXJ0","IGluYWR2ZXJ0ZW50bHk=","IHRyb25n","c2hpcG1lbnQ=","IE5PUg==","IFNjcmVlbmluZw==","IERpc2Nvbm5lY3Q=","bWVubw==","IFdvcnN0","IE5y","e2s=","c3Bs","X2N0cg==","LnNvcnRlZA==","LXBsYWNlaG9sZGVy","KCk7Ig==","aHVyc3Q=","LWhpdA==","LnNvbHZl","566X","IHVuZGVhZA==","IHdoaW1z","IGdldERlZmF1bHQ=","IE5pa2tp","YXNzZW1ibGU=","IHJlbG9jYXRlZA==","LXJldA==","SXRhbGlhbg==","OlN5c3RlbQ==","LnNjaGVkdWxlcg==","4oCcU28=","Rm9yYmlkZGVu","QVZPUg==","emlhxYI=","LkFkYW0=","CWNhbnZhcw==","IHBhcnRuZXJpbmc=","IGd5bW4=","IG1hbmlj","RGlmZmVyZW50","IMOlcmh1cw==","IGZlcnRpbGU=","Y2xm","LQ0K","LnJldmlldw==","b2RhYmxl","IEJvdW5kcw==","b2Jhbw==","IFBhcGVyYmFjaw==","IG1vZGlmaWM=","Y2hlY2twb2ludA==","IEFwcEJ1bmRsZQ==","IHN0YWJpbGl6ZQ==","IEF1ZGlvQ2xpcA==","bW9udGhseQ==","LmJlaA==","IGZsb3I=","IGJvbmRlZA==","IFdvcmtvdXQ=","Y29taW5ncw==","IHJhYmJpdHM=","IEJBTA==","Q0NS","X3Z1ZQ==","IExldml0cmE=","IGxpYmVydGluZQ==","IGNoYWxsZW5nZXI=","IFZhY2F0aW9u","VG9G","fSQv","X0RyYXc=","IGZlbmNlcw==","IGRhdGFzb3VyY2U=","IHBhcGVs","c2xpY2s=","X21lcw==","IFVJU3Rvcnlib2FyZFNlZ3Vl","KFRhZw==","IOWvuQ==","ICctJyk=","X0NMQVNTRVM=","KFJlbmRlcg==","CWZ3cml0ZQ==","VUVE","QUVT","KGpzb25QYXRo","IHNsb3dz","PkRlc2NyaXB0aW9u","IGVucmljaG1lbnQ=","IGl0ZW1wcm9w","IFBvdmVydHk=","IGFic29yYmluZw==","IFBzeWNobw==","5rGf","LC4KCg==","SW52ZXJzZQ==","IGFkanVk","aWdpZEJvZHk=","emlvbmk=","ICInLiQ=","5LiN5a2Y5Zyo","VGhhaQ==","IHNsYWlu","IGJydXRhbGx5","IFBlcnNwZWN0aXZl","IFJldGlyZW1lbnQ=","JHJz","IHNlcnZpY2VOYW1l","IOyI","LXByb2Nlc3Npbmc=","YnJhbmRz","OmVycm9y","KHByb3BlcnR5TmFtZQ==","IEJvZWg=","L2Nt","L3JlYWQ=","QU1C","IHJvdGF0aW9ucw==","LndvcmtzcGFjZQ==","Onk=","IHVwaG9s","dW5reQ==","IEJyYWNl","L21ldGE=","IEJyYXZl","YWNqZQ==","KFVJbnQ=","IHZpZWlsbGU=","cmFkaQ==","X2R5bg==","Tlc=","bG9zZXI=","ZXJ1c2Zvcm0=","IEJhcnRvbg==","IGZhcmVz","IE11aw==","4buHdQ==","IEF1ZGlvU291cmNl","KChf","LkJpZw==","Lm9yZ2FuaXphdGlvbg==","IFRyaWNr","IGJsdXNo","KFRZUEU=","IFJlbGF0aXZlTGF5b3V0","bGVjdHJvbg==","XX0i","IFphcA==","IFR3ZWx2ZQ==","Okw=","IHN0aWZmbmVzcw==","X0hFTA==","IHNwZXA=","KGNvZGVy","IHRhbWFuaG8=","IGFudGlveGlkYW50","IGhvc3BpdGFsaXplZA==","R1BD","IHNjcnV0aW4=","4buBbg==","IFNa","IEp1bGl1cw==","IFNhYmI=","ZWxvcg==","KG1j","6YeM","IFBpbnM=","IG1vZGVyYXRlbHk=","IEvDvA==","b3JnYW5pemF0aW9ucw==","IFNDT1JF","IHNjb3Vy","IGNob3I=","IFVJRWRnZUluc2V0cw==","IHNrdWxsZQ==","X29wZXJhbmQ=","LmdzdGF0aWM=","L25naW54","IGdldFdpZHRo","QmF0dGVyeQ==","IFNldHRlcg==","bUE=","KFJlc291cmNlcw==","X3BsYXlsaXN0","IG1hbmdv","IE9SRA==","YW5raW5k","ZXdheXM=","Pyks","IEdMVVQ=","IGp1c3Rl","IHBheWVy","KGNhbQ==","IFRlYWNo","IEZsdXg=","IG91dHNwb2tlbg==","IFN0cmluZ1V0aWw=","IFpoYW8=","LkhlbHBlcg==","IGVzdGlsbw==","IEFudGhyb3A=","IEd1YXJkcw==","Vm9jw6o=","Olsn","CXByb2R1Y3Q=","dXBkYXRlZEF0","IGluc3BpcmVz","cXc=","QkxFTQ==","YWtpc3Rhbg==","IGN6xJk=","LWhlYXJ0ZWQ=","IENvbXBlbnNhdGlvbg==","0LjQsw==","IGNvbWE=","IEZpYXQ=","IHhtbGh0dHA=","IHJlZmVycmFscw==","IHNwZWN0YXRvcnM=","IFRvcw==","aXNvcw==","SU1QTEVNRU5U","IGVudHJlcHJlbmV1cmlhbA==","IFNjb3V0cw==","IEFsb25l","YnJva2Vy","UHJvZHVjdElk","IEtvYmU=","IGNoYXVk","L2ZlYXR1cmVz","IHJvb21tYXRl","IFByb2plY3Rpb24=","YXZvdXJpdGVz","X0pPSU4=","IEFWQw==","X3BoeXM=","S2V5UHJlc3NlZA==","LDw=","IHVucmVhY2hhYmxl","IENpdGF0aW9u","W2NoYW5uZWw=","c3RhcnRzd2l0aA==","IEphZ3VhcnM=","LklzRmFsc2U=","bWVtYmVyc2hpcA==","QXR0ZW50aW9u","IHJlbW9kZWxpbmc=","IENpbmR5","IGNsaW5pY2FsbHk=","IG1pbGxlbm5pYWxz","IM60","IHJmbA==","ZW5ldA==","IG9icmln","IHZvbHVudGVlcmluZw==","Q3JlZGl0cw==","CWFy","IHJlc2lzdGluZw==","IFByb2R1a3Q=","PT09Ig==","IGNvbmVjdA==","IHJpag==","INeU","IHB1YmxpY0tleQ==","IG95","IEJ1dHQ=","X21pc2M=","IEJlc3Rl","IFBMQw==","IOafpQ==","IEJveEZpdA==","IiIu","VGVzdEZpeHR1cmU=","IGNoYXR0ZXI=","IGRvb3J3YXk=","eXNpemU=","INGH0YI=","SUNUVVJF","PScuLi8=","c2hvd24=","X3dlYXRoZXI=","IExvZ01hbmFnZXI=","XX0iCg==","IGNvbG91cmZ1bA==","IHJ1bW9yZWQ=","IGzDpQ==","IHByb2Jz","CWJ1aWxk","IOWmgg==","LnJldg==","IGludGVyY2VwdGVk","R2F5","TGlzdENvbXBvbmVudA==","IHBpw6g=","IkF0","IGFnYXI=","IEd1bmQ=","X0FFUw==","7IM=","jpjsnbQ=","IGF1dGhvcmlzZWQ=","IENoYWxs","X2xvZ291dA==","Y3Jvbg==","YXRlZ2llcw==","cGVyc2lzdGVudA==","IEFuZEFsc28=","dXN6","X3Jlc3RhcnQ=","IGRlY2lk","emY=","IHBhZ2luYXRvcg==","b2xsZXI=","IEhH","T3BhcXVl","c2VhdQ==","IE9NSVQ=","IFRoaWNrbmVzcw==","IEFpcndheXM=","X2RlbQ==","eXRpYw==","IHByb3Rlc3RlZA==","IHVwcmlzaW5n","IHN1aW5n","IFNoZWxieQ==","LmVuZXJneQ==","IGFsbGVsZQ==","LWJpZw==","U3RyaW5nQnVpbGRlcg==","IHNpZGVsaW5lcw==","IFRV","X2Fp","LkhPUklaT05UQUw=","IHJhZ2luZw==","LnRvTG9jYWxl","Lm11c3Q=","eEZGRg==","Lm5paA==","ICd7fSc=","2YjYrw==","IHB1bG1vbmFyeQ==","IOWPkQ==","IG7Dum1lcm9z","IE5hcG9sZW9u","X01ldGhvZEluZm8=","bGFzdGluZw==","IGV4cG9zdXJlcw==","IGVtYmFyaw==","X3VkcA==","S2lkcw==","X0NPTk5FQ1RFRA==","IHdlZWRz","UE9PTA==","IGtyaWo=","IG51aXM=","Sk5JRVhQT1JU","YWFhYWFhYWE=","IO2P","5Lu9","IHJlcGxlbg==","IFRyaWFscw==","d2FzaA==","cnV0","LWJlZm9yZQ==","X0FUVEFDSE1FTlQ=","VU5U","XFZhbGlkYXRpb24=","VG9u","IGhlYWRpbmdz","UHJvYmFibHk=","IGZhYnJpY2F0ZWQ=","U29ja2V0QWRkcmVzcw==","IGxldHRyZQ==","KSI+","IHZhY2NpbmF0ZWQ=","Omh0dHA=","IGNvbmRvbA==","c2hlZA==","IFNwaWVsZQ==","44OU","RGVwbG95","LkNvbnRyYWN0","LWJv","Iy8=","IGludGVyY2VwdGlvbg==","IGlzYm4=","IG1hbm5lcnM=","L2Fj","CUNoZWNr","X2Zn","IGVuZFBvaW50","X3dlYXBvbg==","IHVuaW50ZW50aW9u","IHF1aXRz","X01JQw==","YXBpcm8=","IGJhbGxvb25z","IGdyYWRz","bWFycmllZA==","IDwqPg==","IGRpc3RvcnQ=","X01FU1NBR0VT","IFBTQQ==","X1BE","YWxzZXg=","IERpYWxvZ3Vl","IHJlZ2lzdHJhdGlvbnM=","IE9yaWdpbnM=","IGZsYW5r","PzsKCg==","OwoKCgoK","XS0k","IERlc3M=","LlN0YXR1c0JhZFJlcXVlc3Q=","IGluaGFiaXRlZA==","IGdpbHQ=","IFNURENBTEw=","LnRoZXRh","JCQkJA==","aWNsYXNz","QXBhcnQ=","Lmxpc3RCb3g=","IEJlbGFydXM=","IGRlbmVu","IFN1c3NleA==","CWRlbA==","X0VD","bmVhcmVzdA==","XE9yZGVy","UGFja2FnZXM=","Zm9ybWVybHk=","Ke+8jA==","6LSj","U2V4eQ==","IGhvcnJvcnM=","Uk9BRENBU1Q=","QXBwcm94","RGVzaw==","QU1FRA==","Lk5vcm1hbGl6ZQ==","X3B1Ymxpc2hlZA==","IERlYm9yYWg=","56eR","IHBvdW5kaW5n","IEVzcGVy","IERhbmNpbmc=","IExPT1A=","IFJveWFscw==","IGluc3VyZQ==","IEludmVzdG9ycw==","IHRoZW9sb2dpY2Fs","QXBwb2ludG1lbnQ=","IGNhdGVnb3JpY2Fs","IGNyYW4=","VmFsaWRpdHk=","IHJlc3BvbmRlcnM=","ICgpDQo=","ZXBhZA==","QklUUw==","IExhbWJlcnQ=","c3VtbQ==","YWNpZGFk","IGxvZ2dlZElu","PVc=","LkxvY2FsaXphdGlvbg==","cmlkbw==","JyIpCg==","IFdlYlZpZXc=","bG90aA==","IHRlYXNlcg==","IENhbmQ=","IGVwaWxlcHN5","SW5jcmVhc2U=","aXZpdHlNYW5hZ2Vy","ZW50cmFudA==","VGVsZWZvbm8=","LmN1cnJlbnRTdGF0ZQ==","IE5vZWw=","ICAgICAgICAgICAgCQk=","IGV4aGF1c3Rpb24=","ZWxpYW4=","IGNvdmV0ZWQ=","LXByb2R1Y3Rpb24=","KHN0ZGlu","IHByZWZlcmFibGU=","IG9mZmVuZGluZw==","KGNvbW1pdA==","CWFs","IHJlbG9jYXRl","IGFub21hbA==","IERpc2Vhc2Vz","IEZvcmc=","IFdJRkk=","IEtpbGxpbmc=","cXY=","IGZtYXA=","IGxsZXZhcg==","dGl0cmU=","LmVtcA==","LCRf","YXZy","Q2FuQmU=","X21h","IEhhd2tpbnM=","X1JPVVQ=","IGxvYWRJbWFnZQ==","IFdhaA==","IERlbXM=","IGluZGVudGF0aW9u","cHJlY2F0aW9u","IOaWh+S7tg==","IEJ1ZGFwZXN0","IHV0Yw==","KGhvdXJz","IHRyYW5ueQ==","QW5z","ennEhw==","LnZlaGljbGU=","Q29pbnM=","IEJyYXVu","CVJlc3BvbnNl","IHZyaWo=","IHN0cmFuZ2VseQ==","IEZhc2M=","XFNlc3Npb24=","TW91c2VMaXN0ZW5lcg==","IFJvbGxz","4bqnbg==","LmdycGM=","SW50ZWdlckZpZWxk","CWFmeA==","RG9ja0NvbnRyb2w=","JVw=","JTsi","IGdpZ2c=","IGJvcnJvd2Vy","IGRpc3BvbmlibGVz","X1JFQ1Q=","IFRoaW4=","IHBlYXJs","eEZC","IHJpcHBsZQ==","IGtIeg==","LmFjcXVpcmU=","Ymlvcw==","dGFibGVGdXR1cmU=","L2FudGxy","b3JhY2xl","IEFSRUE=","IGludGVuc2VseQ==","IHByb3RvYnVm","IExFTkc=","IEhlYWRxdWFydGVycw==","YXRoZWQ=","TWluZA==","aW5peg==","CVBhdGg=","WE1MTG9hZGVy","IGFsbG9jYXRpb25z","LnNsb3Q=","UHJvY0FkZHJlc3M=","IHJvbGVJZA==","Oyc7Cg==","IEJSRUFL","IFBlcmZvcm1pbmc=","Lk9yZGluYWxJZ25vcmVDYXNl","LWds","Omg=","IGRvd25sb2FkYWJsZQ==","IFN1YnNjcmliZXI=","YW5zZQ==","IGNoYXJhY3Rlcml6ZQ==","IHNocnVnZ2Vk","IHNjcA==","IGd1c3Rh","IG1ldGFsbA==","IGxhYm9yYXRvcmllcw==","IFhpbg==","IE1vdG9yY3ljbGU=","IGVnZXQ=","IGZpbmFuY2Vk","IE1PRElGWQ==","KlI=","QWk=","IGV4dHJlbWlzbQ==","IEhhbGlmYXg=","IHZhbW9z","JG51bQ==","IGltcGFydA==","YnJpY2s=","IOexuw==","IGZ1ZXJh","IFJPTEU=","LkNvbmN1cnJlbnQ=","X09QRVJBVE9S","IGN5bmljYWw=","IFJlZ2luYQ==","Z2V0RXJyb3I=","2KM=","YnN1Yg==","SmFwZ29sbHk=","IGluaGliaXRvcg==","SnVzdGljZQ==","44U=","TmV2ZXJ0aGVsZXNz","LXNlbQ==","Lm9nZw==","cmVxdWVudA==","IG5vc3Nv","SGFpcg==","LkxpYnJhcnk=","bWRpcg==","IGhhcmk=","IFRhcmE=","IFBvcnRv","bmV0aW5ldA==","IGFsbGlhbmNlcw==","ZWxsc2NoYWZ0","X1N1cmZhY2U=","CVZpZXc=","YXR1cmRheXM=","IHBvcGNvcm4=","X1BBUlNF","IFJpcHBsZQ==","IHBoYW50b20=","IG1vbmRv","LmNyZWF0ZUNsYXNz","IEtvcmVhbnM=","IGZhc2U=","IFdvY2hlbg==","IEVxdWlw","LWVpZ2h0","IFN0YXRlbWVudHM=","IGFkYXB0aW5n","UHJlY2lv","IEN1cmU=","IGNhbWJpYXI=","5rCR","IGhleGFkZWNpbWFs","c3BpcmFjeQ==","YmlsdA==","IFl1Zw==","IC0tLT4=","IFBQQw==","aXN6","YWtlRnJvbU5pYg==","IERpc3A=","IEF0aGxldGljcw==","IG5pZ2h0Y2x1Yg==","R09PRA==","LnNldEdlb21ldHJ5","K1s=","L3NlbmQ=","IGJpbmFyaWVz","IHLDoXA=","OnJlcQ==","LWNvbnN1bWluZw==","ZXJ0aW1l","VVBEQVRFRA==","X251bGxhYmxl","VklO","dWxpYQ==","Y3lhbg==","IG1pc3VuZGVyc3RhbmRpbmc=","b3JpY2Fs","ZGVncmVlcw==","TGVhZGluZw==","LkFS","aWNrZXN0","TnVldm8=","dWZvcmlh","IGdvb2RpZXM=","IGZvcmVz","KCk8PCI=","YWRlbWlj","QWN0aW9uQ3JlYXRvcnM=","c2VydmVybmFtZQ==","KG50","ZGJDb250ZXh0","IGFpcmJvcm5l","IGV4aGliaXRpb25z","Y2VsZQ==","IHRlbGE=","PE1vdmll","KCd7fQ==","RXhwbGFuYXRpb24=","IGhPYmplY3Q=","IGJlYXJlcg==","ZW5zaWJseQ==","bmlw","IEplcm9tZQ==","IENa","IGRhdGVGb3JtYXR0ZXI=","w6ljaWFs","U2V0TmFtZQ==","b3VjZQ==","IHJlZ3Jlc3M=","JkM=","KCkiPg==","LnNldFByZWZlcnJlZFNpemU=","IE1JRA==","IEFsZXNz","IGhvcnNlcG93ZXI=","IGF0bQ==","IFBhY2thZ2luZw==","IGNpcGhlcnRleHQ=","UmVxdWVzdE1ldGhvZA==","IGJlaWRlbg==","6KM=","IFBPVw==","LldyaXRlSGVhZGVy","ZGlyZWN0b3I=","LWJ1dA==","44Gg44GV44GE","aW5jZXI=","X2Ru","ISEhISE=","IG1hbnVmYWN0dXJlcw==","LlRleHRVdGlscw==","IGNvbnNjaW91c2x5","IGJvdW5jZWQ=","Y3VsdHVyZQ==","IFNwYXI=","IFBpcGVy","LnByZXNz","LW93bmVy","IGV2YWx1YXRvcg==","IFNUUkVBTQ==","LlBpY3R1cmVCb3hTaXplTW9kZQ==","IHN1Z2Fycw==","U2NyZWVuV2lkdGg=","IG5leHRTdGF0ZQ==","IGl2b3J5","IGJydW5jaA==","ZGVuc2l0eQ==","X09X","IENvcm9uYXZpcnVz","IENGUg==","YmFr","XENhdGVnb3J5","5pWw57uE","IGludm9rZXZpcnR1YWw=","fSgpCg==","IHN1amV0","LW1hcmtlcg==","aXNkaWdpdA==","IE1vYmls","IEpzb25SZXF1ZXN0QmVoYXZpb3I=","X1JFTU9URQ==","LmV4aXN0c1N5bmM=","IHJpY2hlcw==","LnByZXNlbnRlcg==","IGdsQ29sb3I=","IGhhbnlh","IGZvcnRyZXNz","IGZsYXNoZWQ=","dml6","cmVxdWVudGx5","YnVhdA==","JGNvbg==","Pnw=","LkZ1bmM=","IGh1bW9yb3Vz","dWVt","LlpFUk8=","IFNUTA==","IEJ1aw==","L3NhbXBsZQ==","IEdyb3M=","UmVjaXBlcw==","IGluZmxhdGVk","IHN3dW5n","OkY=","RmFjaW5n","LlRoZW1l","0L3QuNC6","IHNwbGVuZGlk","IHJlcXVlc3RJZA==","LkNlbnRlclNjcmVlbg==","L2F1dG9sb2Fk","ZW1iZWRkZWQ=","X2RlcGFydA==","IFBvcnRz","4LmD","0LDQudC0","ZGlzY3Vzc2lvbg==","X2NvbnN1bQ==","IHNjb3V0cw==","IGNvbGFib3I=","LlN0YWdl","Lm5hbm8=","ZWxkb3Jm","IGdlbWFjaHQ=","ICAgICAgICAgICAgICAgICAgICAgICAgICAK","IHBvbGljeW1ha2Vycw==","X1BLVA==","LFRo","b2t5","X1VJRA==","UGluZw==","IG9yY2hlc3Q=","IG9wdGljcw==","dWhhbg==","IFhPUg==","IGVzcGHDsW9s","IEFkaWRhcw==","cm5n","bWFucw==","LnZzdGFjaw==","IGdldGF3YXk=","IGhpZXJhcmNoaWNhbA==","YW5vaWE=","IEJpdG1hcEZhY3Rvcnk=","cmVhbG0=","CWFw","X2FwcHM=","LWRpdmlkZXI=","LmRyYXdlcg==","IEhBUkQ=","J107Pz4K","LXBhY2tlZA==","5rK7","X1NUUlVDVFVSRQ==","W1k=","aVBhcmFt","KGVx","IGVuY29tcGFzc2Vz","IFwKCg==","LT5b","JnV0bQ==","Z3JvdXBvbg==","c3RyYXRl","RFk=","b21vcnBoaWM=","Jzpb","IGdyYXZpdGF0aW9uYWw=","IE1pY2hh","IFRlbmNlbnQ=","IGNvYWNoZWQ=","7Lac","0YPQvNC10L3Rgg==","L21vYmlsZQ==","TW91c2VEb3du","YnVk","IFlhcw==","IFByb3ZpZGVycw==","Tlo=","CXJlcG9ydA==","ZXJybXNn","IGltYWdlUGF0aA==","YWN0ZXJpYWw=","IE1hbmdh","d2lja2x1bmc=","KHVzdWFyaW8=","IikpOw0KDQo=","LyoqKg==","IG9yZ2FuaXNl","SW5kZXhlZA==","X1FVQUw=","KFB5T2JqZWN0","IHN1cnJlbmRlcmVk","UE9DSA==","IE5PVEVT","XFwi","LWpvYg==","IHNldmVudHk=","IyMjIwo=","IE1hbm9y","IGRvd25yaWdodA==","IHRpbWVmcmFtZQ==","aW5zdXJhbmNl","Y2hlY2tlcg==","IFNFQ1JFVA==","IGVjaG9lcw==","IENhcm1lbg==","LnNldEhvcml6b250YWxBbGlnbm1lbnQ=","IGlzQ2hlY2tlZA==","IFRPUg==","X25u","KCco","RmV0Y2hSZXF1ZXN0","IFByaW50ZWQ=","Rmx1aWQ=","IFNUQUNL","R0VT","YWlnbmVk","aWdvcg==","LlVua25vd24=","Q0JD","IENhcmxzb24=","LlVSSQ==","IHBsaWdodA==","L3N0YXJ0","IFBlcnNvbm5lbA==","IFBSRUZJWA==","LCoq","IGxpbWl0ZQ==","X2hlYXQ=","Je+8jA==","IERvbm5l","Z2V0Tm9kZQ==","IFNjaWVudG9sb2d5","IGNvbWV0","IHdlbmln","QXNpZGU=","IE1QRUc=","Jz8=","dmFyaWFibHk=","LmVuZERhdGU=","IHVuY29udA==","IFNjb3Jlcw==","IExvZ2luRm9ybQ==","LmdlbmVyYXRlZA==","LGNo","LW1hcg==","IE5lZA==","IGV2ZW50SWQ=","K3A=","IFNJTg==","L3Jlc2V0","LlJFQUNU","IE1lc3Np","X1JBTks=","LndyaXRlRmlsZQ==","IGNyaXBw","ZXN0aGV0aWM=","RVJTSVNU","IHJlaW1idXJzZW1lbnQ=","Q3VycmVudFZhbHVl","IHVuaW4=","RG93bkxhdGNo","IHBhZGRpbmdSaWdodA==","IHN0b2NrZWQ=","Lycu","IHJlcGF5bWVudA==","dHJhaw==","L2JhY2tlbmQ=","INC40LfQvNC10L0=","Q1NS","IHByZXZlbnRpdmU=","IHBhbnRhbGxh","X3RyaW0=","UGVkaWRv","aG9zcGl0YWw=","IG1hbmFnZWFibGU=","cm91dGVQYXJhbXM=","dGV4dHVyZXM=","Li4uLi4uCgo=","IHPDqWxlY3Rpb24=","TmFtZVZhbHVlUGFpcg==","IHBvbGx1dA==","TW9kZXM=","IExhdWQ=","amF5","IFVycw==","IHNpZ25lcg==","IEpK","IENoZXJva2Vl","X0VYSVNUUw==","IGR3YXI=","ICgkKCcj","IHJlZWY=","Pnsk","IEJheWxvcg==","IE1vZGVsU3RhdGU=","LV8=","IFN0cnVjdHVyZXM=","IHNvdXZlbnQ=","U3BlY2lmeQ==","KHBpcGU=","IGZyYWNraW5n","IEdQQQ==","IGJlbGU=","CQkJCQkJCSAgIA==","IE1pbm9yaXR5","IHR1ZA==","IG9wZW5uZXNz","IElsbHVzdHJhdGVk","IG94aWRhdGlvbg==","IE5L","CVVwZGF0ZQ==","IEVNUw==","IFRlZGR5","IGdlbmVyYWxz","CU1hdA==","IHJhZGlvcw==","IEFudGlxdWU=","Y29ub215","IFNxdWFkcm9u","KScsJw==","5aOw","IHlvdXJl","IE1haW5QYWdl","IGJlaGF2aW91cnM=","ZW5naHQ=","KEAiJUAiLA==","IHRlc3RjYXNl","IENvbXBpbGF0aW9u","IGZsYXZvdXJz","IEV4dGVuZA==","aWxsYXRvcg==","IGNvaA==","IHNwbGluZQ==","IEtH","LXBheQ==","IGNvbW11bmlzbQ==","IEJ1c2luZXNzZXM=","b2NraW5n","Lk1heExlbmd0aA==","YXNzYW5kcmE=","cXVpcmluZw==","YWRkZW4=","IEplYg==","X2ZhdWx0","W2ZpbGU=","IHByb21pbmVuY2U=","ZGlzY2lwbGluYXJ5","4oCUdGhleQ==","X2V4dGVudA==","IFZJQw==","IGVudGFpbHM=","LnBhcnRuZXI=","IGhpcHBvYw==","TGVhZ3Vl","55S3","d2lwZQ==","LXNwaW5uZXI=","IHNhbHV0ZQ==","IFN1cmdpY2Fs","KG91dHB1dHM=","d29ya2Vk","W3N0cmxlbg==","YXBwb2ludGVk","IEhlZw==","IEFDUEk=","KFte","dWFsYQ==","X3RvbA==","IFJpdA==","LlBheW1lbnQ=","a293c2tp","IHdhbG1hcnQ=","cmVxdWlyZW1lbnRz","IEZJTlNFUQ==","X0JBQ0tHUk9VTkQ=","IE9zYm9ybmU=","KGVycm9yTWVzc2FnZQ==","UmVwb3J0aW5n","IGF1Y3Rpb25z","IGNvbWJvcw==","IE5vdGljZWQ=","X29jdA==","IHByaW1lcm8=","dGFpcmU=","X2hy","INC80L7QtA==","IGNvbnRyYWRpY3Rvcnk=","PSJA","YWNoaW5lcw==","KG9wdGFyZw==","IFBlbmd1aW4=","IEFiYmFz","IHN1YmxpbWU=","IHBhZ2VhYmxl","IERlZmVuc2l2ZQ==","IGRpc3RpbmN0bHk=","IEF1dG9tYXRpY2FsbHk=","VW5kZXJzdGFuZGluZw==","RXF1YWxpdHlDb21wYXJlcg==","Z290YQ==","ICI6Og==","IHB1bHZlcg==","IEJhdHRsZXM=","IHVucGFyYWxsZWxlZA==","VENIQQ==","IGNvbnN0cnVlZA==","LWFmZg==","IHByZWN1cnNvcg==","LWxmcw==","IG1hZHVyYXM=","IERhaXN5","IEFyYmVpdHM=","Lk1hbmFnZW1lbnQ=","CUlu","IHJvYmVz","IHNww6lj","4oCcKA==","IG1hdGVybml0eQ==","ZXh0ZW50","IFNwYWNlcg==","RGlkQXBwZWFy","CXVz","LmdldFJlcXVlc3REaXNwYXRjaGVy","KGNvbHM=","IHBsdW1tZXQ=","7IU=","IHsKCgoK","w6lyaWNh","IFNpemVz","LmVudW0=","LkhpZ2hsaWdodA==","ICEhfTwv","QVRURVJZ","IFNvcm9z","R0xmbG9hdA==","44KE","IEplbm5pbmdz","Pz8KCg==","IFJvbWVv","ID8+CgoK","V2Vubg==","IGNsaW1heA==","IGNyZW0=","X3RoYXQ=","W+KApg==","X2RvbWFpbnM=","X1JFUExZ","IGNvbXBsZXRh","VkVTVA==","X3BhcnRpY2xl","IHNvcA==","IGZhdGFsaXRpZXM=","aW1wbGlmeQ==","IFNLRg==","IGluZnVzaW9u","IEphdmllcg==","IGJhbGxldA==","IGFtaWdv","LndhbnQ=","IGNvbGxhZ2Vu","IExhd3llcg==","LlN0YXRlbWVudA==","LnJ0","YmFhcg==","RW5kUG9pbnQ=","IEJlaw==","U0hJUA==","IHBhdHJpYXJjaA==","IEF1bnQ=","X1RN","IG3DrW4=","IG1hc3RlcmVk","V1hZWg==","IGVzcG9z","PWxvZ2dpbmc=","IHJpZ2h0ZW91c25lc3M=","dG9ycmVudA==","IGJzdA==","X0NIQUlO","IG91dHNraXJ0cw==","KHJvdGF0aW9u","ICcuJyk=","aWdyYW50cw==","K2xzaQ==","IENDVFY=","X1BIQVNF","LmF6dXJl","X1Byb2Nlc3M=","dmFl","IFRyb3BpY2Fs","IEFua2FyYQ==","aW1hZ2VWaWV3","X1JVTk5JTkc=","ICopX18=","4bq/bg==","KGNsaQ==","c2NhdHRlcg==","IHNjaGU=","UmVnaXN0cmFy","IGFpcmluZw==","IHB5cGxvdA==","aXNpw7Nu","L2N1c3RvbWVy","IHNpbXBsZW1lbnQ=","IGNsYXNzeQ==","IERXQw==","IEJhc2hhcg==","IERFVkVMTw==","IFZpY2s=","YXZhaWw=","IEjDtg==","X2V4dGVuZA==","ZHJGYw==","LmlzTm90Qmxhbms=","IHBsYWlz","fH0K","IHBvcm5vZmls","bGFicw==","IGhhdXM=","IG9yaWdpbmF0aW5n","IHN1cnJvdW5kcw==","IFFVQUw=","bWVn","L2xvZ2dlcg==","W29iag==","IGlycmVzcG9uc2libGU=","IFB1YmxpY0tleQ==","SE9ORQ==","Oicv","aWJveA==","IEZWZWN0b3I=","fHsK","YXRhbG9hZGVy","aGF3a3M=","SERS","IGVzY2FsYXRpb24=","IFBvZHNEdW1teQ==","ZWxpdGU=","IHByZXN1cA==","Q2FjaGVk","Pkc=","Lm9wdGltaXplcg==","IFZpc2libGU=","tIA=","IG5lbg==","IHBjcw==","IElkbGU=","W0FueQ==","IGtleWJvYXJkcw==","IENPTVBPTkVOVA==","IHRpdGFuaXVt","KG11dA==","IExlZGdlcg==","IHByb3NwZXJvdXM=","ZXRyb2ZpdA==","X0xM","X3BhdGllbnQ=","IHBkYXRh","IGtvbnRha3Rl","U3dpcGU=","IGNoZWVyZnVs","IEhvbmR1cmFz","Il1bJA==","IGhlbW9ycmg=","IjoiKw==","IGxlYXNpbmc=","IGluc3RhbGxz","IFBheA==","IExvZ2lzdGljcw==","IGtpbmV0aWM=","IFBob24=","X21vdmVtZW50","CWJ5dGVz","IGNpbmNv","IE1hZG5lc3M=","Iikr","IEpF","X2lq","U2NlbmVNYW5hZ2Vy","IEJ1c3Q=","cHRlc3Q=","YWVh","IGJlc3Nlcg==","w61n","0LTQuNC9","KHRhc2tz","KCIoIg==","c2V0VHlwZQ==","KG91dGZpbGU=","CXJlc2V0","IEFSQw==","IG3DunNpY2E=","IFNoZWxm","IG1pblk=","cGNo","IHdlaWJlcg==","aXNzb3I=","IHRyb3V2ZQ==","CUJ1dHRvbg==","IHJlZ2VuZXJhdGVk","xaNp","aW1hY2hpbmVyeQ==","YmxvY2tpbmc=","LmRhdGFUYWJsZXM=","X2ZyYWM=","IEFkdmFudGFnZQ==","LnZpc2l0TWV0aG9k","6YeN5paw","IGV4dHJhcG9s","IHRlYXNpbmc=","IEhpdGNo","IEdlZWs=","RVNDTw==","IHdpY2g=","CWF4","X2RlY29y","IHNjcmVlbldpZHRo","IFNvcGhpYQ==","Rm9yZ290","LnVuaQ==","IFZlbnR1cmU=","X2NvbGxpc2lvbg==","IGxhd21ha2Vy","KEVkaXQ=","YmxlcnM=","IGdldE5leHQ=","4oCUeW91","TWVkaWFQbGF5ZXI=","IEhvcmRl","IENvbmdyZXNzbWFu","b2JzZXJ2YXRpb25z","CXByb3BlcnR5","IDwtLQ==","Q3JlYXRlZEF0","dWJ5dGU=","IHF1YXJhbnRpbmU=","IGRpc3RyZXNzZWQ=","X0FQQg==","IEdvb2RtYW4=","44Kr","IHJlY29tZW5k","X1BSSU5URg==","RE9ORQ==","QmluZGFibGU=","cnN0cmlw","Y2VudGFqZQ==","IFVuZXhwZWN0ZWQ=","IFNDSE9PTA==","IFByb2Zlc3Npb25hbHM=","IEdQVXM=","TGVzc29u","RXhjbHVzaXZl","IGF0cmF2","IERhbms=","IExhd3llcnM=","IFdhbHRvbg==","Pltd","IGFsb3Vk","PSIuLi8uLi8uLi8=","IGRlYmF0aW5n","IEFWRw==","X1ZPTA==","L2NnaQ==","LmRlZw==","Omc=","LkluZm9m","TWVhc3VyZVNwZWM=","LnNvbmc=","bXRyZWU=","dWxscw==","Sm9yZGFu","IENvdmVycw==","IGF0dHJpYnV0YWJsZQ==","IGplZGlz","aWF0cmljcw==","IHJvdHRlcmRhbQ==","IG1lbGQ=","IENvbnRlbnRUeXBl","IG1hbnRsZQ==","IGFsaWNl","X2R1cGxpY2F0ZQ==","L0ludGVybmFs","IGZpbGVzaXpl","CWZpcmU=","cmVzZQ==","b25kZXJl","IGZhbWlsaWFyaXR5","IENyZXN0","IGthcm1h","IHRvcmlubw==","IG1lc2E=","L3RlbXA=","IGNoaXI=","IE92ZXJmbG93","IHRlbmVtb3M=","dW5paw==","TkVYVA==","QWxsZQ==","IG54dA==","TWFydA==","IGF0bA==","IHBlcmlvZG8=","X3lvdQ==","IH0pKS4=","aW50ZXN0aW5hbA==","LkFkYXB0ZXJWaWV3","IGhlc2l0YW50","IGNvbXBhcmF0aXZlbHk=","LlVJbnQ=","KHZpZXdNb2RlbA==","IHNhbmdhdA==","IFJlc3BvbnNpdmU=","IFphY2s=","4oU=","SkFWQQ==","IEZ1bGxlcg==","IOKdpA==","LkNvbnN1bWVy","IGFuaw==","IHJlYWN0b3Jz","ZnVjaw==","X3JhdA==","IHNlc3Npb25GYWN0b3J5","X2JhY2t3YXJk","IHNjcmFtYmxlZA==","CXRo","IGluc2Vuc2l0aXZl","IGNoYW1wcw==","IG5naW54","IGNvbmhlYw==","IEphc3Blcg==","LmZt","U3RyaWN0RXF1YWw=","YWNoc2Vu","LU5vdg==","bGFzc2Vu","LmludGVncmF0aW9u","KGxibA==","Q29tcG9zZQ==","IEZvbg==","w5o=","R3JhdGlz","IExpbWU=","IEFkYXB0ZXJWaWV3","IHBvaXNvbmVk","YW5jaG9ycw==","6K6+6K6h","J10/PiI=","IHByb2N1cg==","SXRhbHk=","Lk1PTlRI","IExVQQ==","IExpdGh1YW5pYQ==","IEhlYWRz","X0NIVU5L","IFBVU0g=","QXNwZWN0UmF0aW8=","IHdlZw==","IHZpZHM=","IFdlaW4=","CUlOVA==","c2Vzc2lvbklk","SW5kdXN0cnk=","IGRlbm91bmNlZA==","SktMTQ==","IFZhbmVzc2E=","LklkZW50aWZpZXI=","cHJvcHJp","INC40LM=","IHTDqWNu","IG1vc2FpYw==","U3RyZWFtUmVhZGVy","LVRo","Zm9ydGg=","IGFkaGVyZW5jZQ==","YmF0ZQ==","IGtuaWdodHM=","c291bmRz","IHNhbGxl","T01FVA==","44K544OI","LXRt","IFJoZQ==","LkZpbGVPdXRwdXRTdHJlYW0=","5YiG57G7","IEVORw==","aG9saWRheQ==","IENvbmdyYXR1bGF0aW9ucw==","KSgK","IGFnZ3JlZ2F0ZXM=","SE9PSw==","ZXdpcmU=","U2VuYXRvcg==","IGVtYmVkZGluZ3M=","ZXB5","KENPTQ==","IHJvYmJlcg==","w6R0ZXI=","d2FuZw==","X3RlYWNoZXI=","IHJlc2VudG1lbnQ=","IGxldHR1Y2U=","ZXJyZXVy","KGlj","IFRhY3RpY2Fs","IENvbnRyYWN0cw==","IG3Dpm5k","IHNpdGlvcw==","IGJhc3RhbnRl","IG51ZXZvcw==","CU5kckZj","IHByaXZhdGVLZXk=","dWNjaA==","TU1kZA==","IOi+k+WHug==","dW1iYQ==","QGZvcmVhY2g=","OiIpOwoK","IHNsaXBwZXJ5","IEtleXN0b25l","IHBpb25lZXJpbmc=","X3RyaWFuZ2xl","KCIK","CQkJCQkJCQkgIA==","IEludGVydmVudGlvbg==","U0NJ","IGNKU09O","IHRlcm1pbmF0aW5n","67mE","IGJhYnlz","U3Vic2V0","IOuh","IHNldWxlbWVudA==","IG11ZXN0cmE=","RW50cmU=","5Lul5LiK","bmdv","ImJ5dGVz","UVJTVA==","IHlwb3M=","cGVyc29uYQ==","IERlcGxveQ==","Y2Vl","IOCu","LmdvYWw=","IGhhYml0YXRz","IGlzQWRtaW4=","IGV4cGxvaXRpbmc=","IHZlbnRpbA==","IEJhbGxz","2KfYqA==","IG1pbmRmdWxuZXNz","KGt3YXJncw==","IHJlc2VtYmxpbmc=","IGNob2ly","IG9uQmFja1ByZXNzZWQ=","IFNFQ1VSSVRZ","L2d0ZXN0","IGp1c3RpY2Vz","IGludGVnZXJWYWx1ZQ==","YmxhaA==","IEFpbQ==","X2ZpbmFsaXpl","a2Vo","IENvbXBsZXhpdHk=","IGF1Z3VzdA==","Z2V0RWxlbWVudHNCeVRhZ05hbWU=","IHByZWFjaA==","IHByb251bmNpYXRpb24=","IFRyYXNo","LXBlcmNlbnQ=","X1BSSVY=","IEh1bnRz","IEN1cnNl","dWVsbGVu","IGhlYXZ5d2VpZ2h0","WGk=","CXNlbGVjdGVk","IE1jQ295","5byC5bi4","fD0K","IEJhdHRsZWZpZWxk","SXRlbUltYWdl","IGRlZHVjdGlvbnM=","IEVsZW1lbnRhbA==","KCkpOy8v","IEJ1cms=","fSkNCg0K","c3dpZnQ=","L2Z1bmN0aW9u","VXN1YWxseQ==","X1N0","X2ZlYXRz","IElzVmFsaWQ=","IHphZA==","SW1hZ2VDb250ZXh0","IGNsYXNzbmFtZQ==","IGRvbm5lcg==","IC0tPgoKCg==","IG1vdG9yY3ljbGVz","KycvJys=","IHNldEJhY2tncm91bmQ=","XENNUw==","LkFsbEFyZ3NDb25zdHJ1Y3Rvcg==","IExleGluZ3Rvbg==","LmV4YW1wbGVz","IFB1cnM=","UHVzaE1hdHJpeA==","ID09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09","LmFkZFRhcmdldA==","cG9yYQ==","RnVsbHNjcmVlbg==","IGdvb2Y=","aGxlbg==","w6RnZQ==","IENVUkw=","IEludGVyZXN0aW5n","IHJldHJpZXZlcw==","X09iag==","aW5uZXNz","LS0tLS0KCg==","LnRzdg==","KElN","IEJyYXZlcw==","X0lTUg==","b3N0aQ==","4buT","IEV4dGVyaW9y","IENvdXJ0bmV5","IHJlc2lkdWVz","VGllcg==","Lio7DQoNCg==","OmJsYWNr","d2ViVmlldw==","InBhdGg=","IG1hc2E=","XSE9Jw==","IE1hdGNoaW5n","ZHVy","SnZt","PWNvbnRleHQ=","X1JJTkc=","IHByb3BvbmVudHM=","IFFTdHJpbmdMaXRlcmFs","IGluZmxhdGU=","PEZsb2F0","IERvbm92YW4=","KElP","SE9SVA==","IGRpc2FncmVlZA==","aXNreQ==","YXNraW5n","X1ZFQw==","SEFTSA==","IG1hdGhz","IExhc3RseQ==","IGRlcHJlc3Npbmc=","LmVzdGFkbw==","IGhhbG8=","X2JsZQ==","IEdhYnJp","PFRSZXN1bHQ=","IHRyb29w","IGVudW1z","IFNFUklBTA==","bnVtZXJ1c2Zvcm0=","IENoaWM=","LWV4ZWM=","IGJhY2tsb2c=","IEJyYXZv","UG9wTWF0cml4","IEJydXQ=","IGJsb3F1ZQ==","IGp1bml0","IFdoaWxzdA==","0YbQuNGP","ZmV3","rIE=","IFZhcmlldHk=","IFBvbGl0aWNv","ZXhlbXBsZQ==","VXNlckNvbnRyb2xsZXI=","IGhhcmRlbmVk","YWtlbnM=","IFNlZWRlcg==","b3dhcmRz","Y2hlY2tzdW0=","IFNhaQ==","VkVSVEVY","UmVzcG9uc2Vz","cGxvZGU=","LWhhcmQ=","U3BlY2llcw==","UmVuZGVyVGFyZ2V0","X0NIQVQ=","IHNob3djYXNlcw==","aXRpbWF0ZQ==","X0ZPUkVBQ0g=","X0NPTkZJR1VSQVRJT04=","ZWJh","IEVzc2VudGlhbGx5","KHBvbHk=","LWxlYXJuaW5n","IGfDpXI=","X3N1Y2M=","KE1hdA==","IGNvaWxz","YnJhcw==","IGFtYQ==","X21hdGNoaW5n","aW5kdXN0cnk=","IE5vcnJpcw==","IEV4cG9zdXJl","IHBlcnZhc2l2ZQ==","IGRleg==","5peP","IGVsZWN0cm9uaWNhbGx5","RERS","IFN0aW0=","INGE0LDQudC70LA=","IG1hZHJl","bmVtb25pYw==","a2ljaA==","IEZyYWdlbg==","IFJ1bmU=","IG9uVG91Y2g=","CXNjYWxl","IFBoYXJtYWM=","IE1hbmRhdG9yeQ==","IFN0bw==","IEJyYW0=","X0xlZnQ=","X1NUQVI=","KX19Ig==","c2Npb3VzbHk=","0LXQt9GD0LvRjNGC","56uZ","Z3Jhdml0eQ==","K0M=","fTw=","QU5HRVM=","IGNvbnRyYWN0aW9u","IFdhbGxwYXBlcg==","LkZhY2U=","IHByw7N4aW1v","LmZpZw==","bGFuZ2xl","INC/0LXRgNC10Lw=","X0NSRUFU","QmFzaWNhbGx5","IGF3YWl0cw==","IENIQVJBQ1RFUg==","IHZwbg==","SG9u","IGV2aXRhcg==","IFVuZG8=","UVM=","IEVkbXVuZA==","IG1pcmFjbGVz","IFRpbWluZw==","IFZlbmV6dWVs","LlNxcnQ=","b2lkYWw=","IGVycnM=","LS0tLS0tLS0KCg==","IERFQ0xBUkU=","IHZpZ29yb3Vz","YXJnb24=","IGFnZ3JlZ2F0ZWQ=","IFNoYXJrcw==","IEN5cnVz","IHJlcHLDqXM=","bWF0Y2hlcg==","IGd1aUFjdGl2ZQ==","PyIpCg==","IEpOSQ==","LmNoYXJzZXQ=","J3w=","IGdvYXRz","aW5kcmU=","LmdldERheQ==","IHBhcnNlcw==","IElocmVu","X18uJy8=","aWxlZ2Vz","bmF2aWdhdGU=","IEJ1ZmZ5","UEhQVW5pdA==","IG1hc3Nh","YWx0YXI=","JyldLAo=","IG92ZXJzZWVz","IHt9DQoNCg==","IFdMQU4=","Y2xpcGJvYXJk","X0luc3RhbmNl","IGdsYWRseQ==","KHNlcmllcw==","IHZhZA==","IGdldFBhZ2U=","W29m","LkludGVydmFs","aW51cw==","Y2hhckF0","b2xlbQ==","YWludGluZw==","LkFG","X21pbm9y","X0lM","O3k=","IFRlbGVjb20=","IFBvbmQ=","IG1tYXA=","L14=","IFlhaw==","IFJhYmJp","ZW5vcw==","CUNvbnRleHQ=","LnZlYw==","KEF0dHJpYnV0ZQ==","IGNhdGVnb3JpemVk","IGRpYWJldGlj","KHJhbms=","IHBhw61zZXM=","IEAiIjsK","IGppa2E=","YXJzaXR5","IC8o","LkhlbHA=","LWJhbm5lcg==","IEJ5cm9u","IHVucmVhbGlzdGlj","IHxf","IFN0b3B3YXRjaA==","IGV4ZW1wdGlvbnM=","L2NhcmRz","IHRvc3RyaW5n","bmdpbmU=","IHNwcmF3bGluZw==","IGx0ZA==","IFVuZGVyc3RhbmQ=","INGC0LXQutGB0YI=","ZXdpdG5lc3M=","IGNhbGxCYWNr","LVllYXI=","RnVlbA==","PSo=","IGludmVudG9y","IGJlc3RzZWxsaW5n","IGhhcmRuZXNz","IFR1cw==","IGtleW5vdGU=","IGJlYXU=","X2Fib3J0","IHByb3Bvcg==","IGNvbWVyYw==","X1JFRkVS","UGFz","aGF2ZW4=","LWZpeA==","Q2Fub25pY2Fs","IGxvb2tvdXQ=","RXhwbG9yZXI=","IGNlcmNv","KHNlbnNvcg==","IEpzb25TZXJpYWxpemVy","IHZva3Nlbg==","IGJyaWdodGVzdA==","IHN0YWJiaW5n","LkJl","LmFkZFByb3BlcnR5","IEh1bXBo","IGlzQXV0aGVudGljYXRlZA==","5rKh","IHBvcmVz","IGplZ28=","IFNob3dpbmc=","ID8+Ij4NCg==","X0NPU1Q=","aWxpbmVhcg==","IFdvcmtzcGFjZQ==","IHNwZWw=","YWdvZ3Vl","IE1pbGxlbm5pdW0=","IFBvcHVsYXRl","IG5pZA==","LnBhcnNlQ29sb3I=","U29sYXI=","IEdhZA==","IOykkQ==","IEthbXA=","CXJt","IGJlbno=","IEhvbmVzdGx5","IGVsZWN0cm9kZQ==","IFByYWlyaWU=","IFBST0ZJTEU=","IE9yaWVudGFs","IE9MRUQ=","L2NvcHlsZWZ0","YXdhaWk=","KHByb2R1Y3Rz","KVw8","LWNyZWF0ZWQ=","Lk1hbnlUb01hbnk=","Ikhvdw==","INCy0YvQvw==","IG1pdG9jaG9uZHJpYWw=","X3Rlc3Rpbmc=","KGNyZWF0ZWQ=","IGdldEZpZWxk","X0VWQUw=","XS4i","IEZTTQ==","IFJpdGE=","IOWPguaVsA==","IGPDtHQ=","IEluc2lnaHQ=","CW15c3FsaQ==","X3RpbWluZw==","SURP","KSkpKSkK","Q09WRVJZ","LmltYWc=","Q0RG","bHVzdA==","aWNrdA==","X0ZQ","LicsJw==","Z2Nj","IGt1cno=","X3B3bQ==","IG9kcG93aWVk","IEJhcnJpZXI=","LyoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKgo=","cGFr","LUlzcmFlbA==","IFJ1dGdlcnM=","IHNlbGVjdGVkSXRlbQ==","IFJhbWlyZXo=","RmFybQ==","IGNhbGVuZGFycw==","Z3ppcA==","IGJsb2NrYnVzdGVy","IFBseW1vdXRo","55yM","cmVzcG9uc2Vz","LkRpYWxvZ0ludGVyZmFjZQ==","LWdyYW5k","IGdldFNvdXJjZQ==","IGRlanRpbmdz","IHRpZXRlbg==","IGNvbmRlbW5hdGlvbg==","IGNvbnRpbnVhcg==","Lk1vY2tNdmM=","L2VuZ2xpc2g=","IE1lZGlhUGxheWVy","Y29tcHV0ZWQ=","IENsaXBwZXJz","KGRlbGVnYXRl","LlNsZg==","IOuhnA==","IFRpZGU=","IGlocmVt","IFdhbg==","0YPRjtGJ","fT48","RGlzY3Vzc2lvbg==","IHdhdHRz","LW1pbnVz","IEp1bGlldA==","6ZuF","IGNvbmNsdWRpbmc=","YW5kc2NhcGU=","IMO6bHRpbWE=","IERFUlA=","IHNpZ25VcA==","IFNlY29uZGx5","V0FJVA==","bGRz","LmNhbGxiYWNrcw==","KGhvdXI=","aW1hdG9ycw==","dm9sZW50","QUFG","ZWRyaXZlcg==","IE1hdGhlbWF0aWM=","PFR1cGxl","IC8+Jw==","e2o=","X0FCT1JU","RXRoZXI=","IGVkdWNhdG9y","IHByZWNhdXRpb24=","IGZpbmdlcnRpcHM=","Z2V0VmFy","Y2FtYXRhbg==","LWRlYnVn","IFJBRg==","W2FyZw==","IHJhY2Vk","IHRzdW5hbWk=","LmZsaW5r","IGdseWM=","dWtv","IE11bHRpcGx5","IHJlZGlzdHJpYnV0aW9u","QUdP","IFJvdXRpbmU=","IG9wcg==","KGxvd2Vy","IEZ1bmt0aW9u","LmRr","IGVndA==","X0JBU0lD","c3lzY2FsbA==","IExTRA==","IER1cGxpY2F0ZQ==","X3NlbGw=","IGVycm9ySGFuZGxlcg==","X2lwcw==","IGVydg==","YW5uaWU=","KHJlc291cmNlTmFtZQ==","IGJvdHRsZWQ=","IGNyYXdsaW5n","ZWdtZW50","LnNldFRhZw==","IHJzcw==","IFF1YXJyeQ==","X2V4YWN0","Lmp3dA==","IEJvYXJkcw==","b3Bp","IG5hc2Fs","IFhZWg==","LnVk","Tm9ydGhlcm4=","IGFjdGl2YXRpbmc=","ZWR4","b3ZhaA==","IGluZHg=","QWxlcnREaWFsb2c=","IHRpZW5lcw==","YW5ueWE=","X3Bhbg==","KGRlY2ltYWw=","LkRpY3Q=","IHN1YnNpZGlhcmllcw==","UHJvZHVjdE5hbWU=","RmV3","ZGF0bw==","b2RpZWQ=","LXVuZGVy","IOqygw==","54mI5pys","YXRpc20=","W01hdGg=","Lic8","KGluZmlsZQ==","IGRlbm90ZXM=","JGNsYXNz","X1NFQ1VSSVRZ","IHNld2FnZQ==","bWVsb24=","KENoYXJhY3Rlcg==","L2dpdGh1Yg==","IGdsYXJpbmc=","Lkd1aWQ=","X3NwYXJzZQ==","IE1hcmdpbg==","X2Rucw==","IG1laW5lcg==","IGxlZnRpc3Q=","CWxvYw==","YWJ5dGVz","IGVxdWlwbWVudHM=","ZXhwbw==","IFNvbWVyc2V0","RUs=","5o2i","IGxlY3R1cmVy","IG1lbWlsaWtp","5qC4","57Sg","cHJvbg==","OnBvaW50ZXI=","Ym9ycm93","IFByb3RlY3RpdmU=","X2Nm","INCV0YHQu9C4","YnBw","JzsKCgoK","YXR1cmFsbHk=","X05BVg==","IHBlcHRpZGU=","PmQ=","IGlmc3RyZWFt","X0ZBQ1RPUlk=","Jyk7Ly8=","am9pbmVk","bW9uZw==","IHRpbWVzcGVj","IGRlc3RhYmls","IGF1dG9w","LWxpbWl0","cHVibGljYXRpb24=","IERlbm4=","Lk1lbW9yeQ==","KHNrYg==","IEFuYWhlaW0=","X1JFVFVSTlRSQU5TRkVS","b3VldXI=","KF8oJw==","bGVndA==","aXN0aW5ndQ==","CXByaXY=","IHJlZGlyZWN0cw==","TXQ=","IGFsbGVlbg==","IFBvaW50Rg==","IG9taW4=","IGNpdHQ=","IFRhZ2U=","IFdhbGxz","4buJ","IG9jY3VweWluZw==","eEJG","cmFuZ2xl","IHJlbGF0aW9uYWw=","LW9yZw==","IGpwZw==","LWRlcml2ZWQ=","IG1hbGZ1bmN0aW9u","IEJlbnNvbg==","KHNjcm9sbA==","IFhE","SG9seQ==","KGNvbW1hbmRz","IHRpcHBpbmc=","IHByaW1pdGl2ZXM=","IHNleGxl","Q2FsbENoZWNr","IE1BU1RFUg==","X1RFQU0=","LnNldFJlcXVlc3RIZWFkZXI=","X3NwZWNz","IHNlcmdl","Lk1hc3Rlcg==","IGltcw==","LlNwcmluZ0Jvb3RUZXN0","cGF5cGFs","IFdBTlQ=","Lkluc3Q=","IENhcnBldA==","IHdyb25nbHk=","KCQoJy4=","IGJpbGQ=","LlJvbGw=","IFVyYg==","LWNhbg==","44GP44Gg44GV44GE","b2xpYmVyYWw=","PCEtLTw=","4oCUZm9y","IG5lZ2F0ZQ==","KG5vcm0=","YWVj","X3NhbGFyeQ==","cGxhaW50ZXh0","b2Rlc2s=","IEJvc2No","U2NpZW50aXN0cw==","aW5kZXhlcw==","IG1weg==","IGdyb3VuZHdhdGVy","fX0pOwo=","0LDQu9C40Lc=","IGVybw==","IHByZXNjcmliZQ==","IEV4dHI=","PEFycmF5TGlzdA==","IGF0cm9jaXRpZXM=","QXJlYXM=","IFRJbnQ=","KHBsYXllcnM=","IGRhdGFi","IHd5bQ==","44Gb","IGR1YXM=","X3Bvc3NpYmxl","IGluc3RydWN0aW9uYWw=","aXRpb25lcg==","L2F1ZGlv","ICAgICAgICAgICAgICAgIAoK","c3RvcmVk","T01QSQ==","IGFwcHJlbnRpY2Vz","VGVuYW50","IENvdXQ=","IGNvbnRyYWNlcHRpb24=","TG9hbg==","X3Zpc2liaWxpdHk=","J3x8","LlBhcnNlRXhjZXB0aW9u","IGNvaW5jaWRl","LmdldFdpbmRvdw==","IE1hcnRpYWw=","X3Rscw==","L2Jvb2tz","IG91dHJhZ2Vk","ICh+KA==","c3Ryc3Ry","IEJveGVz","6YO9","44Ol","Uk9J","RnVuY3Rpb25hbA==","IFByb2Q=","PFRlc3Q=","IHZpZGVvdA==","IGFtb3Jl","YWJicg==","IE1vbnVtZW50","IHJlaW5mb3JjZW1lbnQ=","IENvY29udXQ=","LnNlbmRTdGF0dXM=","Lmtl","IExlYXA=","X2FydGljbGVz","UGll","IElydmluZQ==","QUJDREVGR0hJ","IEV4cGxhbmF0aW9u","Z3JvdXBCeQ==","IG92ZXJoZQ==","IGFuw6Fs","IGNsYXNzaWZpZXJz","IE1peGVy","L2NvbG9ycw==","IFVzZXJEYXRh","X0FSUk9X","X3ZsYW4=","LkNyZWF0ZURpcmVjdG9yeQ==","IEhhaw==","IEJvbmVz","IEFwaVJlc3BvbnNl","IE1vb2R5","REFD","Z2V0Yw==","6LaF","LkZpcmU=","6aM=","IGhpdHRlcg==","ZnJlc2g=","4LmB","IENoaWxkaG9vZA==","eG9y","LWh0dHA=","IE1PUg==","LnNlbmRLZXlz","X3NoYXBlcw==","IFVwcw==","IEFycmVzdA==","YXp6aQ==","X29wY29kZQ==","Lk5vbWJyZQ==","IHByw7Nw","IHp4","IHRyZW1lbmRvdXNseQ==","U3BhY2Vz","ZWNj","IHZlbHZldA==","IG1lbW9yaWE=","IExBUA==","LkRyYXdMaW5l","IHRhcmdldFR5cGU=","cmVzdHJpY3Rpb24=","IERSVg==","W3RvcA==","IeKAmQ==","L2NoYXQ=","IHNvbmlj","VG9yb250bw==","b3dp","LmRvY3M=","IEluaXRpYWxpc2U=","IDwh","LnRibA==","LlByZXBhcmVkU3RhdGVtZW50","L2RvbQ==","LnJvdA==","X1BST00=","S2VlcGluZw==","IGhhcmdh","IGpvcm4=","IGlkZW50aWZpYWJsZQ==","W2lw","UGluaw==","X0hlYWRlcg==","w5E=","YWRsZQ==","572R57uc","c2VxdWVudA==","QWN0aXZhdGVk","dG1wbA==","IFBhbGw=","IGZhdGFsbHk=","fX0pCg==","UG9wb3Zlcg==","IE1jTGFyZW4=","Q2hhbmdlZEV2ZW50QXJncw==","IEZvcm1hdGlvbg==","TmFt","bmV3c2xldHRlcg==","LmZyb21TdHJpbmc=","X2ltbQ==","QVBQRUQ=","LG5vZGU=","KGRldA==","IHBhcmFsbGVscw==","IGxhc2Vycw==","IGNob2NvbA==","L3BvcnQ=","YWZmZW4=","KGRldGFpbHM=","IHJlcGxpY2F0ZWQ=","QXNTdHJlYW0=","YXJtYWM=","XV09","YWxhY2g=","X3Nlc3Npb25z","QWxnb3JpdGhtRXhjZXB0aW9u","IHZlcmJvc2l0eQ==","LkNvbHVtblN0eWxlcw==","KFVTRVI=","IHNsZWVwcw==","IGFxdWF0aWM=","X2J1bGs=","PScuLw==","b3VybsOpZQ==","IE1TRA==","IEJsb2M=","IEdsZQ==","IHJlcHJlc3Npb24=","IGVudG9uY2Vz","CQkgICAgICAgICAgICAgICAgICAg","WU5D","LkFsbG93R2V0","IHR1cnRsZXM=","ICd+Lw==","ZXNzb24=","IERJRQ==","IEFxdWE=","IFNFUQ==","Ozs7Ozs7Ozs7Ozs7Ozs7Ow==","LnB1dHM=","IE1BSw==","KEN1c3RvbWVy","IGRlc3NlcnRz","IGVtYmVsbA==","IHRheGVk","5bqX","IHNjaGw=","cmVzY28=","IEZyb2c=","IFBlbmRpbmdJbnRlbnQ=","X0xvY2Fs","L3NlY3VyaXR5","IFJveA==","IHNwb2lsZWQ=","X1dJTkRPV1M=","SmVubmlmZXI=","IGRhdGk=","VW5sb2Fk","LmdyaWR4","KHN0YWdl","4buX","U3FsQ29tbWFuZA==","Lm14","IGJsaXR6","IEZvcnRyZXNz","IEJyb3dzZXJBbmltYXRpb25zTW9kdWxl","d2luZQ==","TlNF","LXJhbmtpbmc=","eXJl","IGxpbmthZ2U=","w6Fr","kZw=","YXRzYXBw","IEN5Y2w=","IGVjb2xvZ3k=","IGJsYXRhbnQ=","IFBlcmY=","IFhpYW9taQ==","IERvcnRtdW5k","cmVzdWx0U2V0","IGdpw6A=","IGZhdWNldA==","IERhbHRvbg==","IGZyZWVz","QlVGRg==","LnBhcmFsbGVs","IEFzdHJvcw==","IFZFQ1RPUg==","IHN0YW5kb3V0","w7Ntbw==","IGZyYW1lYm9yZGVy","X1BBUkFNRVRFUlM=","IEZhbGs=","IERpZ2l0","IGVsZWN0csOzbmljbw==","IHZlcnI=","VUlBbGVydFZpZXc=","KFNxbA==","LUlORg==","IikpKTs=","JycK","KEVGRkVDVA==","IFp1bQ==","X0RQ","KV07DQo=","IGFudGVubg==","IGFiYnJldmlhdGlvbg==","IHNlaXNtaWM=","X1RSQU5TTA==","tZw=","Lk1pbGxpc2Vjb25k","LGxhdA==","IEFuY2g=","X01vZA==","QWxyaWdodA==","ZGRh","IMKl","VU5ETEU=","INC30LDQsw==","IHN1bGZ1cg==","IFNpdGg=","IE5pbWJ1cw==","IEV4YW1pbmF0aW9u","X3dpZmk=","fWApOwoK","IHNlbnNhdGlvbnM=","YWZz","X0NMUg==","IGluZmluaXRlbHk=","IHN5c3TDqG1l","X2ZvbnRz","SW1wYWN0","UG93ZXJlZA==","IDw9Pg==","X25lZWQ=","REVDUkVG","IC8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8v","IFJlcG8=","Z2V0U2VydmljZQ==","JG4=","X3BjdA==","RXJyZXVy","IE5HT3M=","ICoKCgo=","LmF0YW4=","X1RNUA==","IGNvbGxhcHNpbmc=","IHNobw==","X1BDSQ==","Lm9wZXI=","KGFkag==","IGdpb3Y=","Piku","IGluY29udHJv","YXJkYQ==","IGFwZXg=","IG1lZGlkYQ==","IFNoZWlraA==","IEFybWVuaWE=","YXNzb2NpYXRl","LXdvdw==","IFR1cm5pbmc=","IEZyZXVk","IEZvb2w=","IExEUw==","LS0tLS0tLQoK","b2xzb24=","LkZJTEU=","X2RldGVjdG9y","RG9taW4=","IGRlcGxveW1lbnRz","IGZhcmV3ZWxs","KGJpbmQ=","IG5vdmljZQ==","dGRvd24=","IGdldEVsZW1lbnQ=","IHZlbGl0","YXN0aGFu","CWNoYW5uZWw=","X0ZSQU1FQlVGRkVS","LnRyYWlsaW5n","LnNldEVkaXRhYmxl","Oyw=","IElERg==","X1BC","Z2V0TGFzdA==","IENvYXN0YWw=","IEhhbmR5","bGluZ2Vy","44Gn44KC","UGVyc2lzdGVuY2U=","LmdldFNlcnZpY2U=","INC+0Lo=","IG5vdHdpdGhzdGFuZGluZw==","KFBS","VU1C","J10pKXsNCg==","ZW1icmFuY2U=","ZXhjZXJwdA==","YXF1","X2Jsb2M=","IFByb3Zpc2lvbg==","IE1jRG9u","IEdvbGRiZXJn","IGNvbXBvbmVudFdpbGxVbm1vdW50","IGJhc2VQYXRo","LWZpcmVk","IGZvbGxhbmRv","IFRpbGVz","QGVuZGZvcmVhY2g=","RU5DSUw=","IEJveGluZw==","aXF1ZXI=","QWNoaWU=","RW51bXM=","QmFzZVVybA==","KHNjYW4=","IFBhc3NpdmU=","YWJlbGxh","L3Nu","Lm51bWVyaWNVcERvd24=","IHZlcm4=","bG9jYWxpemVk","IE1peg==","IHJlc3VsdExpc3Q=","L3Z1ZQ==","RVJWSUNF","Lm9k","IGxpZ24=","IFN0cmluZ1Rva2VuaXplcg==","IHRyYWc=","QWNjb3JkaW9u","IG5vcmVmZXJyZXI=","bXNjb3JsaWI=","w6F0aXM=","Ynl0ZXI=","IHNob3dkb3du","IHNlbWFpbmU=","IC0tPg0KDQo=","IE1haG0=","fSI7Cgo=","IGRx","IFB1Ymxpc2hlcnM=","IEFtcGw=","IERhbmllbGxl","IHRlcm4=","6LW3","bm/Fm8SH","ZWlu","IEFzeW5jU3RvcmFnZQ==","dW5nZXI=","cm91dw==","IHNjaXNzb3Jz","L2Fzc2VydA==","LmJ1Y2tldA==","L2FyY2hpdmU=","X01hbg==","IGludG9sZXI=","ICgpPT4=","INCS0Ys=","IHNhaQ==","Lnh5","LiINCg==","IHVyaW5hcnk=","ZXN1Yg==","SVNUSUNT","IM66","IGNvbXBsaW1lbnRz","IHR5cGluZ3NKYXBnb2xseQ==","aWhhcg==","RXhwYW5zaW9u","IFNlcnZpbmc=","X3N0dWRlbnRz","IFhCT09MRQ==","KGls","IOyymA==","IGrDsw==","KHRvbA==","KEpT","CUNH","IERSQVc=","dHdpZw==","IG9hdA==","X3Ntb290aA==","IENTTA==","IG9zb2I=","IGVuc3Vpbmc=","IGJhbmtlcg==","IEJhY2twYWNr","X3Bpbmc=","IHdpc2hsaXN0","PWF4","CSAgIAo=","RGlzbmV5","c3RlYWR5","Ij4l","IHByb3BoZXRz","IFpY","IG1pbmltYWxpc3Q=","LlBMQUlO","U2VhdHRsZQ==","Lm9yZGluYWw=","IFBJUEU=","IHJldG9ybmE=","IGp1Z2Fkb3I=","IEJyZXQ=","IOKUnA==","IHBsdXNo","VUxBVE9S","U29ydGluZw==","LmdyaWR5","ZWN0b215","X2FjdGl2","cmFjaw==","SW50ZXJhY3RpdmU=","IEFudGFyY3RpY2E=","IHZlbmdlYW5jZQ==","ZW5zbw==","X2tub3du","dXBwbGllcg==","Lk1vZHVsZXM=","IENvbm5lY3Rpb25TdGF0ZQ==","6ZqQ6JeP","QEZpbmRCeQ==","IHBsYWNlcg==","XG1vZGVs","PCgpPg==","LmlzU3VjY2Vzc2Z1bA==","LWdvb2Q=","Yno=","IERyYWNv","QXNzaXN0YW50","LWV4dHJh","0LDQsdC70LjRhg==","IGh5cG9jcmlzeQ==","IHRzdA==","IEFncg==","JHR4dA==","IGxvZ2lzdGlj","bGljZW5zZWQ=","IEhvZg==","IHRhdA==","KGl2","IGludG94aWM=","cG9zdElk","X3N0cmlrZQ==","IGh1bWlsaWF0aW9u","cGNvZGVz","InN5bmM=","KHJlY2lwZQ==","K04=","cmVudGU=","CUNsaWVudA==","eWNvcGc=","IFp1cmljaA==","IFByb2ZpbGVz","Q291bnRyaWVz","IHBpY3Q=","IHJvbGxvdXQ=","cmVxdWVuY2llcw==","IHBhdGNoZWQ=","IGNhcnRyaWRnZXM=","IHNoYWRpbmc=","SmFy","IHNhbHZhZ2U=","IFRheGVz","IHN0YW5kYnk=","YXBvcmFu","RWlnZW4=","LmFuZ3VsYXI=","IE5lc3RlZA==","5Lqr","IGlzVmlzaWJsZQ==","IER3aWdodA==","X0JSQU5DSA==","LkRlbGF5","IGtlbmQ=","IGZhY2lsaXRhdGVk","LmZsYXRNYXA=","IHNhbnRh","CVNlbmQ=","L21lc3NhZ2Vz","IG9mVHlwZQ==","CXN3YXA=","I3BsdA==","IFR1cmtz","TkVT","IHByb2dyZXNzaXZlbHk=","IFJlc2lkZW5jZQ==","IFRSRUU=","IG5vZW4=","ZGlv","IG5lbGxl","IHNvZ2Fy","aXR0aQ==","d2Vla2x5","IGFtYmlndWl0eQ==","X1NldHRpbmdz","V2FyZQ==","Lm5lbw==","X0RTVA==","IOaWuQ==","cHJlcA==","bG9iYnk=","QGVtYWls","L21vdmll","IGZ1bmtj","ICAgICAgICAgICAgICAgICAgICAgICAgICAgCg==","wq1z","IGd1YXJkaWFucw==","LXBvcw==","IGNvbmZpZ3VyaW5n","IENQUw==","IERldXM=","IHZpZMOpb3M=","X2VtcHJlc2E=","IHNsYXBwZWQ=","PE1vZGVs","IHVuZGVyc2NvcmVz","VWg=","LmFjY2Vzc1Rva2Vu","U0VUUw==","IFNwYXJzZQ==","IENhbGQ=","OnBhdGg=","IFNlcnZlcnM=","PWJhdGNo","IGtuaXR0aW5n","IHhh","IHNlYXJjaEJhcg==","IHNuYWc=","IGluZnVzZWQ=","LmJhbQ==","bGV2ZXI=","IHRheG9ub215","w44=","IGF0dGFjaGluZw==","IGhlcm4=","X05PUA==","Q2xpY2thYmxl","KFBhcnNl","IER5bmFtbw==","LWJ1aWxkZXI=","IGRlcmVn","IHNjYXR0ZXJpbmc=","6L+b6KGM","YW56aQ==","IFNoZXBhcmQ=","Ij4nLAo=","X1hERUNSRUY=","IEJ1enpGZWVk","X01BUkdJTg==","UExPWQ==","LnNtYWxs","IG1pbWVUeXBl","IGhvbG9n","CWNhbWVyYQ==","bGlhcw==","IHN1c3BlbnNl","b2R5bmFt","YmF1","IGdyYXZleWFyZA==","X25hbWVk","IjoiJw==","ICoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKg==","IGdhbWVPdmVy","IExFTkdUSA==","CXNjcmVlbg==","IGRvSW5CYWNrZ3JvdW5k","X2RlcGVuZGVuY2llcw==","IHJ0Yw==","L3Vw","X1JPTQ==","SGFsbA==","IGRlZmljaWVuY2llcw==","KHRl","JyM=","X2VxdWl2","IHByZW9yZGVy","IEF4ZQ==","0L7QvNGD","LnNlbmRGaWxl","IGZpbHQ=","IExpbWl0cw==","IENhdmFsaWVycw==","LmRpc2NvdW50","4oaQ","IFdpdA==","UVJTVFVW","IGlq","IHRlZ2Vu","IDoiLA==","ZGlmZmljdWx0eQ==","cHVua3Q=","IEVtYWlscw==","Y2hsb3I=","KGZ1bg==","LlVpbnQ=","IFN0YWxs","X3ZlcmlmaWVk","dUQ=","RmlsZVR5cGU=","IHBsZWFzdXJlcw==","IGp1ZGljaWFyeQ==","IHNoYW0=","aXB1cg==","X1BMVVM=","b2ZmZXJz","KGZvbw==","X0dU","CWNvcmU=","RU5USU9O","IExpYmVyYXRpb24=","Q29tbWFuZExpbmU=","X2RlcGFydG1lbnQ=","LkFy","X25laWdoYm9y","IFN1Ym1pdHRlZA==","IDwhLS1b","IGxvY2F0aW5n","Lk1hcHBlcg==","X3N0cmVuZ3Ro","Wy4uLiw=","IEphbA==","L2xvYWQ=","IGJ1ZmZz","IG1vdG9yaXN0cw==","CWNz","YXNjZW5kaW5n","IFdoYXRzYXBw","IE5hc3M=","X0NPTFVNTlM=","TGVvbg==","cHBl","ZWx0YXM=","IHRqZWplcg==","X0tFWVdPUkQ=","cXVhbGlmaWNhdGlvbg==","aHJh","IHJpZGljdWxvdXNseQ==","JGluZm8=","RkVBVFVSRQ==","ZG9lc24=","IEtX","IEVudW1lcmFibGVTdHJlYW0=","X01BVA==","IFN0cmVhbUxhenk=","IHNjcmF0Y2hpbmc=","LnRpY2tldA==","IHNob3J0Y29taW5ncw==","ZWxsaXBzaXM=","PWN1cnJlbnQ=","IGNyZXN0","IHdob3Jl","IFBldHJvbGV1bQ==","Y29udGV4dHM=","IOat","LXB5dGhvbg==","KGpzb25PYmplY3Q=","IFByaXNt","IHlhY2h0","t6g=","Zmxhc2hkYXRh","IGxlaWNodA==","IE1vcnRvbg==","IHN0ZXJsaW5n","X2l0cg==","X3Vk","RmFjZXM=","IGhpcmVz","ZmZh","Jyx7Cg==","LWNhbWVyYQ==","X1JFQVNPTg==","IEhlbGVuYQ==","cnVn","aWdodGx5","IHBlcm11dGF0aW9ucw==","IFRvcmFo","IOaYr+WQpg==","CXJlY29yZA==","w4A=","LmdtYWls","Rm9ydHVuYXRlbHk=","KE1vZA==","T2NjdXJyZW5jZXM=","IGRlcHJlY2k=","IHZhZ3VlbHk=","L1o=","Vk4=","LnRw","X2dlbmVy","IHs6P30iLA==","d2FobA==","SUtF","IExlZ2lzbGF0aW9u","IGhpbnRlcg==","IGFkZWw=","KGhpZ2g=","5o+Q5Lqk","L2RvbWFpbg==","LnRpbGVz","IFRpYmV0YW4=","IFN0ZXJlbw==","IGZpbGVTaXpl","Z3J1cG8=","aWFl","U0NQ","IHZvdWNoZXJz","IFBhbmRvcmE=","IGRpc21heQ==","IGzDqWc=","IEJlaGF2aW9yYWw=","Y3Jhbg==","TmVzdGVk","YWNjb20=","IE5haA==","IEJhbHRpYw==","IERFU1Q=","IGtpc3Nlcw==","Vmlu","IHByb3Zva2U=","X0NvbnRleHQ=","IHdlZWtkYXlz","dXJnZW5jZQ==","TGlr","IHBsYXph","IGJsZXY=","IHJlYWZm","X1RpdGxl","KEd0aw==","IGNlbGxl","Iz09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT0=","IEpvb21sYQ==","Ij4vLw==","TW9udGhseQ==","LnRvRG91Ymxl","KGVudHJpZXM=","IE5SRg==","KGdjZg==","IE1pZGRsZXdhcmU=","fS17","X0hJREU=","IGxvd2Vycw==","KFNlbGY=","5Y+R6YCB","IGlzTG9nZ2VkSW4=","IGJpb2RpdmVyc2l0eQ==","IG11c2NoaQ==","KGNhbmRpZGF0ZQ==","IEFuc2k=","CXNt","L2lt","Kycp","Y2Rj","IGFsZ3VuYQ==","IHNhY3JpZmljaW5n","L3ZlbmRvcnM=","L0FQSQ==","QWR2ZXJ0aXNpbmc=","IEdFTkVSQVRFRA==","IERpc29yZGVycw==","IFNlcmlhbGl6YXRpb24=","IHNhdmFnZQ==","IOm7","IEluc2lnaHRz","IHJldm9rZQ==","IGp1cm9ycw==","c3VpdA==","IENhbXBpbmc=","X3Byb2ZpdA==","YnVjaA==","LkFjdGlvbnM=","IElERUE=","b2x1bHU=","TGlrZXM=","67KI7Zi4","LkJMTA==","dsOk","IGNhcmRp","IGRpc3Byb3BvcnRpb25hdGVseQ==","IGluc2FuaXR5","LmVvZg==","IFBsYXR6","LmZpcnN0bmFtZQ==","IFNsYXNo","X0NG","amFuZHJv","IEdhdWdl","IFN1bmRlcg==","IEJ1bm55","X3Vt","6IGU57O7","IGlQaG9uZXM=","IEJJTw==","IGtobw==","eEZB","IEZyaWVuZHNoaXA=","IGNhbG1seQ==","X3Rocg==","X0FuaW0=","IHJhaXNvbg==","L3Jvb3Q=","LmdldEJ5SWQ=","IFNhdmFubmFo","IEludGVycHJldA==","a2lsbGVy","CXdn","XSld","0YPQtdGC","S2V5VmFsdWU=","W0c=","c3RyZXRjaA==","LXBsYXlpbmc=","JTsNCg==","IHBsYW5r","IHBlYWNo","IERlcnJpY2s=","0LTRgNC10YE=","IFNoYW0=","QVBQTElDQVRJT04=","LnByb2dyZXNzQmFy","IHRyYW5zaXRpb25pbmc=","X2RyYWc=","LlJlcXVlc3RCb2R5","Lk1vYmlsZQ==","Sm9uZXM=","LlBob3Rv","IGF4bGU=","enVn","L29wdGlvbnM=","XV0pCgo=","CW5v","W2hyZWY=","IGFncmVnYXI=","IFNlcnZpY2VFeGNlcHRpb24=","bmluZ2Vu","RGlmZmljdWx0eQ==","Qk9PTEVBTg==","QWRkcw==","LWhhbmRsZXI=","IEdhdA==","IEVib255","4bqtbg==","YnJpZ2h0","IGNvcnBzZXM=","LkNoZWNrZWRDaGFuZ2Vk","IG1hdGluZw==","IEhhcnRmb3Jk","IHpvdQ==","IGR1ZGVz","X2FsZw==","IEp1bGk=","b2N1cA==","INC/0YDQsNCy","IEthdHk=","X0ludGVybmFsQXJyYXk=","LkNvbHVtbkhlYWRlcnNIZWlnaHRTaXplTW9kZQ==","TWV0aG9kTWFuYWdlcg==","IFJlZGU=","IGxpc3RJdGVt","LkJvdW5kcw==","IGF2ZW51ZXM=","IENvZ25pdGl2ZQ==","RXh0ZW5k","dGVjaG5pY2Fs","4oCa","c25ha2U=","RnJvbUNsYXNz","aWxlc3M=","ID17","dXJldHRl","L3RocmVhZA==","RklFTERT","SVZJTkc=","IFBPU0lY","X2Fr","IC4uLy4uLy4uLw==","TXA=","IGFub255bW91c2x5","VGFyZ2V0RXhjZXB0aW9u","YWZmZXI=","YW55dGhpbmc=","Imlz","Z3Jlc28=","IExhcmE=","aXphZG9z","IG1pbmc=","LnRh","X3Rocm93","Umg=","IHNvbGlkaXR5","bmFobWU=","aWNoYWdl","IG1vdW5k","b2xpbw==","YXJ5YQ==","QVNVUkU=","IHdvaGw=","IGZ1cm5pc2hpbmdz","LnNlY3Rpb25z","IGFwb2xvZ2llcw==","YXBpa2V5","IFNjcmV3","IFdhcnNhdw==","L2dyYXBo","IFNBVEE=","eXNlcw==","L2J1dHRvbnM=","0LXQvdC+","VUdIVA==","IHBvcm5zdGFy","UGljdHVyZUJveA==","X1RleHR1cmU=","IGHDsQ==","IG5lcmQ=","LWNvbm5lY3RlZA==","IG91dHNpZGVycw==","IG9wZXJhdGl2ZXM=","YWJibGU=","L21hbg==","IHBsZWFk","XERi","IENvdmVyZWQ=","PVM=","IEZsYW1lcw==","77+l","X3RpdGxlcw==","IHJldHJhY3Q=","IGNvbGxhYm9yYXRpbmc=","IGJlaGFuZA==","LkRhdGFHcmlkVmlld0NvbHVtbkhlYWRlcnNIZWlnaHRTaXplTW9kZQ==","IGxhYm9yZQ==","IHRvdGFsUHJpY2U=","IHNwb2lsZXI=","IGRpcHBlZA==","Iikpew0K","X1NC","IExlaQ==","IGluY2x1c28=","dmVsbA==","CXBs","SW5hY3RpdmU=","IFVTU1I=","b25kZW4=","IHJvdXRlZA==","LnN0cnVjdA==","4Ks=","IE1hbGlr","IEhFWA==","IEN1c3Q=","X1BFUkNFTlQ=","X2VwaXNvZGU=","5ouJ","VkVSUw==","IGNydWlzaW5n","Qm9va21hcms=","4oCmCgoKCg==","Y2hlY2tCb3g=","b3VmbGFnZQ==","IG5vbnplcm8=","IGFwcm94","IFB1cmR1ZQ==","Y29vbg==","bGVncw==","IExvdHRlcnk=","U2xm","SEFW","Pms=","PkFu","IHNsZW5kZXI=","c2NoZWQ=","VGVsZWdyYW0=","Umljaw==","X1N0cnVjdA==","X0JD","IGN1c3RvbWFyeQ==","IERhbW9u","dXJjaGFzZWQ=","IGtvYg==","IHRpb24=","KHByb21wdA==","IGltYg==","eEND","CVdlYkVsZW1lbnQ=","IGhlbW9z","4Kaw","IENOQkM=","IEFMTE9X","57Gz","IEVOQw==","LnNjYWxhdGVzdA==","IFRCRA==","Z2V0UmVmZXJlbmNl","IEltcG9ydGVk","4Liw","IGl3","b2xvbg==","bWls","Oi8vJHs=","Lk1hbmlmZXN0","IGxo","IGl0ZW1MaXN0","X2Fkcw==","SW5zcGVjdGFibGU=","IFRvbGVkbw==","IERpc2FzdGVy","VXBkYXRlZEF0","KScpLA==","IFBBTg==","RmlsZUNob29zZXI=","IHl1YW4=","aXRt","INC10LPQvg==","IElibg==","SGF0","X3Vsb25n","YXBs","IFVydWd1YXk=","w6lueQ==","IENyYWlnc2xpc3Q=","ZG9jaA==","IGJpbGU=","IHByb2R1a3Q=","IGVsZWN0cm9seQ==","LkNvdXJzZQ==","IG1x","dW5jdHVhdGlvbg==","LyoqKioqKioqKioqKioqKio=","dWp1","TU1NTQ==","X0xFRw==","IG5ldXRyb24=","IHBsdXJhbGl0eQ==","ICsrJA==","Zm91bmRhdGlvbg==","LkNvbHVtblN0eWxl","IEhvb3Zlcg==","LkFDVA==","IEJyYXo=","bGVzc29ucw==","ZsO8aHI=","4KSC","IENsYXNzaWNz","cmFpZw==","IG1o","IGtldHRsZQ==","U3RyaWtl","ZXJkYWxl","RU5UQQ==","IFRhYmxlQ29sdW1u","IFNoYWtl","IFdG","IExpY2Vuc2luZw==","dWHDp8Ojbw==","IHNlY2FyYQ==","IG5ld1ZhbA==","U2VsZWNjaW9u","UHJlZmFi","ZmlnaHRlcg==","TGF1bmNoaW5n","JyI7DQo=","Lmxvbg==","LnV0Y25vdw==","IEh1bmRyZWRz","ZXN0ZWFk","IE92ZXJ3YXRjaA==","X0FGVEVS","IHJlbW5hbnRz","KS5c","IGxvYmJ5aXN0cw==","IHVuaW50ZW5kZWQ=","IOuQ","eXN6","IGxpYnJvcw==","LXBhZ2Vz","SU5URVJGQUNF","IGRldGVybWluaXN0aWM=","IFVOSVFVRQ==","IGV0dMOk","U2luZ2xlTm9kZQ==","CQkJCQkJCQ0K","LXN0YXQ=","IGhhc2hpbmc=","L2FjY2Vzcw==","dGVsbA==","CXVzZXJuYW1l","IERhdG9z","Qml0Q29udmVydGVy","Omhvc3Q=","IGFsdGVybmF0aW5n","IOKAi+KAiw==","IHdhdmVmb3Jt","PEVsZW1lbnQ=","IENhbnRvbg==","IGRlc3RhYw==","dGVudA==","LmdldE1heA==","IHN0ZW5jaWw=","IEFjcXVpc2l0aW9u","LkdlbmVyYXRpb25UeXBl","IE1FUg==","X2NvbWJpbmU=","IFtdLg==","X0JJVE1BUA==","bGRy","IGNhbnY=","IEpWTQ==","cGFycw==","IGRvd25oaWxs","RGV0YWlsc1NlcnZpY2U=","KE5BTUU=","IHJlanV2ZW4=","X3dpdGhpbg==","QWNjZXNzb3J5","IFPDqQ==","L2luYw==","IildCgo=","UHVibGljYXRpb24=","X3JvaQ==","IG1vYnM=","Lk5vQXJnc0NvbnN0cnVjdG9y","IGV2ZW50b3M=","LnZlbmRvcg==","X1NFTEVDVE9S","w6lmb25v","PSJb","IGxhYXQ=","IGJsdXJyZWQ=","IEJvcmRlclNpZGU=","eEZGRkZGRg==","X3dyaXR0ZW4=","IGplbnRl","L3Rpbnk=","Lndw","LnN0eWxlYWJsZQ==","IENoYXJnZXI=","IGJhdGhpbmc=","IFBhbmRh","w6lsaQ==","IHBhY2llbnRl","IGdpb2NoaQ==","IFZpZXdTdGF0ZQ==","Y2dp","LmxvZ2ljYWw=","RG9uYWxkVHJ1bXA=","LGNvcHk=","ZW1t","X0xpbms=","IGluc2lnbmlmaWNhbnQ=","ZmZtcGVn","L3BheQ==","X3F1aXQ=","SU9EZXZpY2U=","IEV4aXN0cw==","IGNvb2tz","anVuY3Rpb24=","IFRYVA==","KGVndA==","YW5pdQ==","X3BhcnRuZXI=","IGZhY3VsdA==","IFVuaWZpZWQ=","L3NiaW4=","IE5laA==","IEthemFraHN0YW4=","cG9zdGNvZGU=","IHZlZ2Fz","IHNlaW5lbQ==","fV0s","dGV0","LXBheW1lbnQ=","IENvbW1lbnRhcnk=","IGd1aWRlbGluZQ==","KTsk","IENvbnNvcnRpdW0=","57O757uf","dmlzbw==","IEJpbGxpbmc=","aWNpYXI=","IFR5cGVJbmZv","CXRyYW5z","PFRleHR1cmU=","YXRob20=","bGF1Z2hz","IGludGVyY2VwdGlvbnM=","KEVWRU5U","Rm9yZWNhc3Q=","VHJhcA==","dHJ4","IFdoaXRlcw==","c3VibWl0dGVk","YWxnbw==","IHRyYW5zcG9ydGVy","b3VuZGFyeQ==","IEluaGVyaXRz","IENvbmV4aW9u","LmNsaWVudFg=","CXByb2plY3Q=","aGVhcnRiZWF0","LW90aGVy","ICc7DQo=","w6ty","b3JwaW9u","KGNvcnM=","IEVMRUNU","IFBlcmU=","IHVzZU1lbW8=","ZXdyaXRlcg==","IHNxdWlydA==","L2V4dGVuc2lvbnM=","L2Fz","LkNMSUVOVA==","IGdvdXJtZXQ=","IGF1dG9Db21wbGV0ZQ==","UkVW","IGJyYWtpbmc=","X1NFTEVDVElPTg==","44Oh44Oz44OI","X2xpZmU=","X2dyb3VuZA==","X3Rlcg==","c25z","IFNQT1JU","kuGe","5rs=","VW5pcXVlSWQ=","IGRyaXA=","X0JST1dTRVI=","LW1ldGVy","ZW5kZXo=","IGV4aGF1c3RpdmU=","KFNL","IEJ1cmxpbmd0b24=","d29vcmQ=","KHBvdw==","IHNlYXJjaFRleHQ=","hYw=","aGVlbHM=","c3RlbGxlcg==","LnNpZw==","WU9VUg==","LmFsaQ==","IERhdGFDb2x1bW4=","IHByb2plY3ROYW1l","X2ZlY2hh","IHJlZnVuZHM=","IHRvcG8=","IENISUxE","IE1hcmJsZQ==","IGZvckNlbGw=","IHBlc3NpbQ==","IGNyaXNweQ==","aWZlc3R5bGVz","IG92ZXJkdWU=","b2xhcml0eQ==","IGFtYXTDuHI=","TWQ=","UFJFU1M=","IGluc3VyZXI=","b2NyYXQ=","IGZhY2lsaXRhdGVz","Lw0KDQo=","IGh1cmRsZXM=","X0hJ","TGV0dGVycw==","bWluZWNyYWZ0","YXh0ZXI=","eWs=","IGVjb27Ds20=","INC90LDRhw==","IFNXSVRDSA==","Q29uc3VsdGE=","IE5vcmE=","Q0tFUg==","X0NU","LmFwcHNwb3Q=","IC8vLS0=","CUJPT1NU","X2NvdXJzZXM=","IHdpbGxpbmdseQ==","66eM","ZmZk","ZmlsZXI=","IE1lYXN1cmVz","IGxlYXNlcw==","IERvcm90aHk=","Ol0u","c3Vic2NyaXB0aW9ucw==","IGNob2lz","IGFsYW4=","IGFicmly","LlBvcHVw","RXN0aW1hdGVk","IFBMQU4=","4LWN","IEVMRg==","IGRpc3RhbmNpbmc=","CWFuc3dlcg==","IHJ1Z3M=","S2k=","4Z+S4Z4=","R3VpbGQ=","ZXh0cmFz","Y3Bz","TW9ja3M=","IHRla3N0","Kmc=","LnJlcXVlc3RGb2N1cw==","IGFsdGVyYXRpb24=","IENhdGVnb3JpYQ==","aW1tZXJz","IERyb3Bib3g=","IEFkZHI=","5byV","ZGVwcw==","Lk1lc3NhZ2VCb3g=","ISwK","LmdldEI=","IG1pZ3JhdGVk","IEhvYmJ5","IE1n","LlZlcnRleA==","IGZvcmdpdmVu","IERlVg==","IHdlcmQ=","IEFyYWJpYW4=","IFNtb2tpbmc=","IHN0cmF3YmVycnk=","IENNUA==","ZGJs","IERIUw==","LWVycm9ycw==","LnBhZw==","IFJORw==","IHNoYXZl","IHR3ZWU=","IGFzc2VydE51bGw=","IERlbnNpdHk=","ZG9qbw==","YWlubWVudA==","IHBq","LllFQVI=","ICopKTsK","aWJyYXJpZXM=","SmV0cw==","RXhlY3V0aXZl","X2RlbnNl","LmdldENvbnRlbnRQYW5l","Y2hhbmRsZQ==","YWluYQ==","LXJlZmVyZW5jZQ==","IGxpYXI=","IEhFQUxUSA==","W3Rlc3Q=","LmlzbmFu","Q2hhcmxpZQ==","IHB1cHBlcg==","IGtpcg==","OmhpZGRlbg==","aXNWaXNpYmxl","IGtvbXQ=","IGFjcXVhaW50ZWQ=","IERydWlk","KENz","Lmxhc3RuYW1l","RFNB","IGRpc3NvbHZl","57yW5Y+3","VmFyaW91cw==","IERleA==","X2FuZ2xlcw==","L2FwaW1hY2hpbmVyeQ==","IGV4cGxvZGluZw==","KENoYXJTZXF1ZW5jZQ==","IEhpc3Bhbg==","KyspewoK","Lk1vZGVsU2VyaWFsaXplcg==","UVJTVFVWV1hZWg==","54K55Ye7","PXNldHRpbmdz","4KWB","UENT","IElOVEVSTkFM","IEhVR0U=","IG1pY3Jvc2NvcGU=","aXNBZG1pbg==","XHY=","LnJlcXVpcmVOb25OdWxs","0L7Qu9C+0LI=","aWNlcmNh","X1NFTlQ=","IGRlcGljdGlvbg==","IFVzZXJDb250cm9s","IE1lbW9y","IEFsbG9jYXRpb24=","IEJlZGZvcmQ=","IOabtA==","IHRvcm1lbnQ=","YXplZXJh","LlRvZGF5","IFJlZ2FyZGluZw==","X0VOQw==","X1JBTkRPTQ==","TG9nTGV2ZWw=","PVI=","IEdyZWVubGFuZA==","IHN0cmFpbmVk","IG1hZ25ldHM=","IGFsZXJ0Q29udHJvbGxlcg==","IENocm9uaWM=","X3JlZ2lzdGVyZWQ=","IGxpag==","IEVudHJ5UG9pbnQ=","IFJlZ2ltZW50","dWNpZA==","IENvdWxkbg==","IEFjdGluZw==","X3JheQ==","IG5hYg==","LXNlcGFyYXRlZA==","IHBubA==","Q29hY2g=","QVRZUEU=","IHN1cHBsZW1lbnRhdGlvbg==","YWNlcnM=","ZmxlZXQ=","SW5wdXRCb3JkZXI=","IFN0cnVjdHVyYWw=","IGRlaW5l","IGJyZXdlcmllcw==","YW5vaQ==","IHRyYW5zbGF0b3Jz","IGVpZ2VuZW4=","IGRhbmNlcw==","dGFt","IENvb3BlcmF0aW9u","X3JlcXVlc3RlZA==","IE1hZ2ljYWw=","CUxFRlQ=","ICIiKSwK","Ky0rLSstKy0rLSstKy0rLQ==","IE5vaXI=","IEVzdGltYXRl","IFRocmVhZFBvb2w=","IEhlY2s=","ICcqLg==","VHVya2V5","IHN1Y2NlZWRpbmc=","ZHJ1Zw==","dmlv","IHBvbmVy","IEphZA==","aXp6bHk=","ZXZlcnl0aGluZw==","IHt9KS4=","IEluc3RpdHV0ZXM=","IG51b3Zv","IGluaXRXaXRoVGl0bGU=","IGx1YUw=","b3duaWs=","IHRob3I=","IGtsYXI=","IG5vdG9yaW91c2x5","IGRvbmc=","ZW1lbnM=","X3Byb2plY3Rpb24=","X0dSRQ==","LmV5ZQ==","IHdhdGVyaW5n","IFRpaw==","b1M=","IFN0cmFuZ2Vy","ICANCg0K","cGFnaW5n","X2ludGVyc2VjdA==","IENvbG9uaWFs","TGlzYQ==","LnVubGluaw==","IG1pcA==","YW51dHM=","YW1hem9u","IElERU5U","c3Rhc3k=","Snd0","LS0tLS0tKy0tLS0tLSs=","IEVWUA==","Q29udGVudExvYWRlZA==","CUJJVA==","LnBhcmVudHM=","IGFsbG9jYXRpbmc=","IEdPTEQ=","fWA7Cgo=","QUxBUg==","IHByZWNpc2E=","RGlzdGluY3Q=","c2Vp","IHN1YnBvZW5h","IHBvbXA=","IFBvbG8=","Y29l","dmo=","LndvcmtmbG93","ZXN0cmU=","IGNvbm5leGlvbg==","aW1ldHlwZQ==","LlJvd0NvdW50","IERoYWJp","IGVtaXRz","LkJvcmRlclNpemU=","KHBvbGljeQ==","LG1lc3NhZ2U=","T25Jbml0","KShf","IGZpbmVy","W251bWJlcg==","IHNjcmlwdHVyZQ==","UmVmbGVjdA==","LXRvb2xiYXI=","KFBBVEg=","IEVOVFJZ","KC4uLikK","LWRvbWFpbg==","KHN0cmlw","KSgq","IGNvbnZleWVk","IGF0dGVudGl2ZQ==","w6hnZQ==","X0xE","IEdyYW50cw==","LWhpZ2hsaWdodA==","IGJyZXRocmVu","2YjZhA==","IGRlcXVldWVSZXVzYWJsZUNlbGxXaXRoSWRlbnRpZmllcg==","YXB1bHQ=","LmJvdHRvbUFuY2hvcg==","IG9wY2lvbg==","IG91dEZpbGU=","cmVhdGluZw==","ZGlu","X3NhbXBsZXI=","CWdsRW5hYmxl","cHR5cGU=","X0NPTkRJVElPTg==","LWVmZmljaWVudA==","Jm8=","IGpj","0Kc=","L0Zvcm0=","KWZyYW1l","IGJpbmdl","X2Nsb3N1cmU=","SU1B","KG5leHRQcm9wcw==","CWNk","IGdldE1lbnU=","IGdldFN1cHBvcnRBY3Rpb25CYXI=","IG1hbmlmb2xk","WlI=","Y2hhbmdlcg==","YXNzaW5n","ZGlzaA==","IE1vdQ==","Lm5ldGZsaXg=","IHBvc3Rjb2Rl","IHdvbWI=","IEFycw==","4oCmKQ==","IGxpbmVXaWR0aA==","RGVhbA==","YXJhcw==","IEdyYW50ZWQ=","IGhvYXg=","IGRpcmVjdGlvbmFs","LktleUNoYXI=","ID09Ig==","IFZlcmRl","X0tQ","IHN1cnJvZ2F0ZQ==","IERVSQ==","dXB5dGVy","IHBlbnNl","IFJBTkQ=","KGV4Yw==","IG1pc3VuZGVyc3Rvb2Q=","IENVVA==","IOS4rQ==","CXRp","X2luc2lkZQ==","IGJpY3ljbGVz","IGRlYW4=","ZGlyZWN0aXZl","LnBlZXI=","aWNpbmE=","X2l0ZXJz","IGltcGx5aW5n","Lm9idGFpbg==","IHBzeWNoaWF0cmlzdA==","dXNlclNlcnZpY2U=","ZWxpdmVyeQ==","CXBhcnQ=","IGh1cnJpZWQ=","IGJ1bQ==","IGhlcGF0aXRpcw==","amlk","J10+Owo=","IHVuY29udmVudGlvbmFs","IGZhc2Npc3Q=","IFBleQ==","6K+t","Jyl9PC8=","LkNsdXN0ZXI=","IEJpdENvbnZlcnRlcg==","ZWRhdGE=","zr/PhQ==","4pSC","QXBwQnVuZGxl","Lmh0dHBDbGllbnQ=","IGFwbw==","QUlOUw==","IFZG","X2dpZA==","IG9kZQ==","RVJSWQ==","IFJlY2VpcHQ=","IENhbmRsZQ==","IG1pc3Npb25hcnk=","IENyYW5l","IFNUQVRFUw==","Ym91dA==","YXlhcmFu","Li4uIiwK","IGl0aW5lcmFyeQ==","KGxhdGl0dWRl","IENPTlM=","L3NpZGViYXI=","U3BpZGVy","R1JJRA==","LmRlYnVnTGluZQ==","IGAn","LXllbGxvdw==","IHJlZmluZW1lbnQ=","IE1ha2V1cA==","IERhbm4=","KCk7DQoNCg0K","IG92ZXJjb21pbmc=","IEJhdHRlcg==","L3BhY2thZ2Vz","INCy0LjQtA==","IGFyeQ==","4oCdPw==","cmVsbGFz","IGdydXBvcw==","IFR5cGljYWw=","IE1vbnNhbnRv","SW50ZXJzZWN0aW9u","IHR5cmU=","PT09PT09Cg==","zq4=","OzsKCg==","IHRyaXZpYQ==","X3Rha2Vu","IHNtdWdnbGluZw==","IG5hcnJvd2Vk","4bqpbQ==","IHBhbGFicmE=","Y2Vh","cGFydGljdWxhcmx5","QWNjZXNzVHlwZQ==","IGNvbGU=","VG9GaXQ=","IHZlcmU=","IENPUw==","L3ZpZGVvcw==","ICgkKCIj","IGNyYW5l","Lmhhc01vcmU=","JHBhdGg=","aXZpc20=","IHN1cGVydmlzb3Jz","IEZsb3Jlcw==","cHJvZ3JhbXM=","LlppcA==","IGltcGFjdGluZw==","IG1vdG8=","IFRK","cGVnYXdhaQ==","X0tJTkQ=","X2ludGVyZmFjZXM=","LyoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKio=","IExlYXZpbmc=","VGV4dFN0eWxl","YmVpdGVy","IFdpbm5pbmc=","LXBhcmFt","R2FyeQ==","IFN1bnM=","YWzEscWf","ZHVjaw==","IHRocmVhZElkeA==","IHBvZXRz","IHBsZWFkaW5n","IENvcmludGhpYW5z","ZmNj","YXdhaXRlcg==","Ki0=","IHBlcnNldmVy","IGFjdGl2aWRhZGVz","X291dGxpbmU=","LXBsYW4=","LnNjcm9sbFZpZXc=","cXVhdA==","IHNhbXN1bmc=","IGxldmVsaW5n","IHNwbGl0dGVy","X2dlb20=","IHByb21pbmVudGx5","IFNlZWRz","5Zyf","dWFpcw==","ZWZ1bGx5","SUVudW1lcmFibGU=","YWRkcw==","dmVyc2F0aW9ucw==","IGRpc2FibGVz","QU5EUk9JRA==","IFdlaXRlcg==","X0Zvcm1hdA==","X3NwbGl0cw==","IEFjdGl2ZVN1cHBvcnQ=","KGNzcw==","X21pY3Jv","c3RyaWtl","IENhdXNlcw==","IHZpc2libHk=","Q2FuY2VsYWJsZQ==","IFlvc2g=","IGRyYWluaW5n","IGNvbGk=","YXNsZXk=","IFJlc3BvbnNpYmlsaXRpZXM=","IFN1dHRvbg==","KnRoaXM=","U2hhcmVz","LWdyYXBo","IGVubGFyZ2Vk","Um91dGluZQ==","IGZyYW1lYnVmZmVy","IGFpcmZsb3c=","IHRyeA==","IExlaWdo","IEtlbnM=","KGhlYXA=","IHNwaWxsZWQ=","U0NBTEw=","IFZlbHZldA==","YWN0dWFsbHk=","X0VOQ09ESU5H","IFdvcm0=","KSl9Cg==","IERhbmdlcm91cw==","IHN1cGVyaW50ZW5kZW50","Lmxvb2s=","IHNoZWw=","L2Zz","U2FmZXR5","5a6L","LkRFRklORQ==","X2ZhY3RvcnM=","IHBhcnRpZG8=","IG9wdGltaXppbmc=","RG91YmxlQ2xpY2s=","LWNvbW1lcmNpYWw=","IGxvZ2ljYWxseQ==","Y3ljaA==","dXJ2ZQ==","wrU=","QUlMWQ==","IHJlYWN0aW5n","X0VYUFI=","a8O2","LmxvY2FsaXplZERlc2NyaXB0aW9u","IGFzdG91bmRpbmc=","IHBhc3RyeQ==","IGdsb3NzeQ==","IGJlaGF2ZXM=","L2Vj","IGNsaXBwZWQ=","IHByb3dlc3M=","IFVC","LyotLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0=","CWFscGhh","IGV4dHJhdmFn","IGZpbm5z","KFNvY2tldA==","IFVuc2FmZQ==","IHF1aWVyZQ==","X2VuY29kZWQ=","b2x1bWJpYQ==","IHphYg==","c3RyaWN0ZWQ=","IG1uaWU=","IE1PUw==","IGF0aGxldGljcw==","IEtlbmRhbGw=","IOyYpA==","QVZBSUxBQkxF","aW5veA==","X09QQ09ERQ==","IEl0ZW1UeXBl","IGNlbnRyaWY=","IGludGVyc3RhdGU=","X2Jvb2tz","LmRlbGl2ZXJ5","IExpc3Rl","b3JzaQ==","X3NlY3VyZQ==","Z3Jvd3Ro","IHZlbnRl","IHBzeWNob2xvZ2lzdHM=","IENDUw==","dWRlbmNl","IGNyYXdsZXI=","L21hbnVhbA==","IHRleHRTdHlsZQ==","IHBhbGluZHJvbWU=","IGNvbmR1Y3Rz","dGFibA==","V2l0aFVSTA==","L3JpZ2h0","IERyYQ==","Lk1haWw=","KHNlYw==","b2Z0d2FyZQ==","IHNldWw=","IHdyaW5rbGVz","X0ZX","QXk=","IEVybnN0","dW5iaW5k","IGNvbW1lbmQ=","X2hvb2tz","IE1vbmV0YXJ5","IFFR","dW5pdE9mV29yaw==","IEVudGl0eVR5cGU=","IGhvcm1vbmFs","LkZBSUw=","QFNsZg==","L2NoYW5uZWw=","c29ubw==","RGFucw==","X1JlZ2lzdGVy","SGFu","T1JC","SktMTU5PUA==","dmVudGVk","IGxvbmdzdGFuZGluZw==","IGJnQ29sb3I=","IDsp","IFJvYmJpZQ==","KCIuIg==","IGFqdXN0","LmhhbmRsZUNsaWNr","cmF0aW5ncw==","cHRlcg==","IGVyb3RpY28=","IEplbGx5","KioqKioqDQo=","LkRvZXNOb3RFeGlzdA==","CWJl","JHRlbXA=","Ij4mIw==","55u0","CVB1YmxpYw==","neyytA==","IEJ1aWxkaW5ncw==","LWFsb25l","LCdc","IHN3YXBz","IHBlcnBsZXg=","X3Byb2Nlc3NvcnM=","INC00LI=","IE5ZUEQ=","UENS","5q+P","IGhvamU=","RWRpdE1vZGU=","IHZ1bGdhcg==","IHZlcmRl","ICgpPT57Cg==","L2Zyb250ZW5k","IHRlbGVmb25l","IGxhbnRlcm4=","LnBhZ2VY","IER1ZA==","bGltaXRhdGlvbnM=","IG5vdGlmaWVy","IE1lc3NhZ2luZw==","IWltcG9ydGFudA==","IHN1cmdlb25z","KT0o","Rml4ZWRTaXpl","Llpvb20=","aW5hbg==","IGNyZWRz","IEJVRg==","LlN0YWNrVHJhY2U=","IHdhcnJhbnRlZA==","IHNvdXJjaW5n","IGNvbm5h","X0ZSRQ==","IHdvbGw=","IHJlZmluaW5n","X0FMTE9XRUQ=","X212","IFdvcmNl","IFNpbmNsYWly","Q2hlY2tzdW0=","IHVubG9ja3M=","IE1hcmtkb3du","IGZpc2hlcm1lbg==","RHVi","IEJvbm5pZQ==","ICAgICAgICAJCg==","IHZlcno=","Piw8Lw==","PjwhWw==","Wyc8ew==","amVj","IEVyZw==","cmF0aGVy","IHBhbGFicmFz","IFBBQ0tFVA==","bWlzZQ==","ZGFx","IE9rdG9iZXI=","KEdMRlc=","IEhlbnJp","IEZvdA==","IER1bw==","IE5FUw==","IHNhbHNh","IHVuYmlhc2Vk","QFNwcmluZ0Jvb3RUZXN0","IG9mZnM=","5YWs5Y+4","IGFtb3VudGVk","RnVsbFBhdGg=","IHF1YXQ=","IG1haWRlbg==","IFN1YnNldA==","IEFwcGxpY2F0aW9uRGJDb250ZXh0","bWlycm9y","bmV4","LnN0cmVldA==","c2V0UXVlcnk=","JHJlc3VsdHM=","YWRlcm8=","Z3Jlc3Nvcg==","X2J1Zw==","aXNzZXI=","IFNlYXJz","IGZpbGxDb2xvcg==","Lm1hc2tz","IERpYWJsbw==","X0FORFJPSUQ=","0J7QsQ==","IGZyZWFraW5n","IHJpbnNl","KHBrdA==","IGJvb2tsZXQ=","IHNhbmN0aW9uZWQ=","IHN0cmVhbWVk","dGFicGFuZWw=","IFJldHVybmluZw==","UGxhaW5UZXh0","TE9ZRUU=","YWxlc2Nl","0L7QutCw","IEZpeHR1cmU=","YXNzYWRvcnM=","IGRpc2JlbGllZg==","IEx1c3Q=","IHJhZGljYWxz","LkZlYXR1cmVz","X2luY2hlcw==","KHByaW1hcnk=","IEpNZW51SXRlbQ==","X3Rha2U=","IENva2U=","VW5pdE9mV29yaw==","IFdDSEFS","IGNvbnNjaWVudA==","b25lbnVtYmVy","UElORw==","YWJham8=","XSgi","LnNhbGVz","X2hlcmU=","IG9mZnNldFg=","dGFnTmFtZQ==","INmK","X1JpZ2h0","aWxpZw==","dGhlVmFsdWU=","b2NhcmQ=","IGNvbnN1bHRhbmN5","IGJsaWo=","Z29ybQ==","TmF2aWdhdGU=","xLFj","SWxsZWdhbEFyZ3VtZW50RXhjZXB0aW9u","X3Zl","LkNPTlRFTlQ=","dXJvcGVhbg==","LnJhZGlv","IGVudmlzaW9uZWQ=","IFNPTQ==","LnNk","QU5USVRZ","IENBTExCQUNL","IGhn","ZGVjcnlwdA==","566x","XFF1ZXVl","IE1JTEY=","IHJlY3Vyc2U=","IERhbnRl","LmdhbW1h","b3Jrcw==","KCIiKSkK","IEdyaW0=","Lm9wZW5n","IE1pY2hlbGU=","QW5hbHk=","IFBydQ==","X3JlZGlyZWN0ZWQ=","X3BhbA==","ZmFsbGJhY2s=","IOWtlw==","IGRpbm5lcnM=","R2VuZXJhdGluZw==","JCIs","aGlzdG9yaWM=","Z2V0U2ltcGxlTmFtZQ==","IE1pbGxpb25z","LWdsb2JhbA==","cm91dGluZw==","IGNvbnNvbGlkYXRl","IHJlY29pbA==","T2JqZWN0T2ZUeXBl","IGRlc3BlcmF0aW9u","QW55d2hlcmU=","IGdldE1vZGVs","X2tpbGw=","b2Jvb2s=","L2Rpc3BsYXk=","Ii8+Cgo=","IG1heW8=","INGB0L/QuNGB0L7Qug==","IGdvYWxpZQ==","eERG","IFByZXBhcmF0aW9u","IGRlcGVuZGFibGU=","LklOVkFMSUQ=","Li4uJw==","bmF0YWw=","bW9kdWxlTmFtZQ==","Y2FyYm9u","UEFM","IG1lZQ==","IGNhc2luZw==","6aG555uu","bmljYXM=","IEhhbW0=","IEJhYmU=","b3dhbmU=","IHN5bm9ueW0=","IFFpbg==","aW9j","ZW1vdGlvbg==","IGZlcm1lbnRhdGlvbg==","IGN1bXBs","IEVsZWN0cmljaXR5","KFJPT1Q=","dGVzdGVy","IEh1c2JhbmQ=","IEJhdQ==","X01BQ1JP","YWtlbmluZw==","ICAgICAgICAKICAgICAgICAKICAgICAgICAK","LmZpbg==","IENvbmZpZGVudGlhbA==","aWV6","TUJFUg==","IHNwZXJtYQ==","IEhQVg==","dHhu","Q09OVEFDVA==","LlRocm93","IG11cmFs","IFR3aXN0","KCZfX18=","IGpk","IGVtcG93ZXJtZW50","IGRpc3RpbnQ=","IGJvbWJpbmdz","T3V0Y29tZQ==","IHNob3J0ZW4=","5b6M","QUNDT1VOVA==","X2NvdmVyYWdl","ZW5jbw==","X3JlZmVy","c2V0TWVzc2FnZQ==","IHJlcGVyYw==","cHRpZGVz","IGRlaXR5","dWNoc2lh","KGh0","LnN1YnNjcmlwdGlvbg==","IHJlZGlzdHJpYnV0ZWQ=","IER5bmFzdHk=","X3Zj","LWZyYW1ld29yaw==","cnlmYWxs","IGdhdGluZw==","IExvcmVuem8=","b29kb28=","IGRpZ2VzdGlvbg==","IGZvb3Rpbmc=","CUhhc2hNYXA=","cmVhbERvbmFsZFRydW1w","IGFwYWNoZQ==","KHZhbG9y","IHBvaXNvbm91cw==","LlBlcm1pc3Npb24=","IHBhcmFtb3VudA==","d2VpdA==","bGxhbmQ=","IGh5cG90aGVzZXM=","IFByeQ==","IGhvbWVt","KERldmljZQ==","aW5kaWNl","ZXZh","cHJlc2VuY2U=","IEJlbnRsZXk=","IEVuZGluZw==","IGRvbWVzdA==","CXRw","CWVycm9ycw==","Y29ybmVy","bGRh","CgkJCQkK","X1BFUlNPTg==","IFNlcmdleQ==","IFBhcnNlcw==","LWZpY3Rpb24=","LkJhY2tncm91bmRDb2xvcg==","IHNvbW1lcw==","IGNvb2xlc3Q=","IHJ1YmJsZQ==","LmpvYnM=","IGRyb3duaW5n","YWRvcmFz","IHdpbmdlcg==","IEluY3JlYXNpbmc=","2YrYqQ==","QkJCQg==","KFJvbGU=","IG9kZGx5","RGV2RXhwcmVzcw==","LXV0aWw=","IFNoZW1hbGU=","cHJpbWl0aXZl","IGFmZmlybWVk","LnJldHVyblZhbHVl","LWxpdmU=","IEFjdGlvbkNvbnRyb2xsZXI=","w6ts","ZXJjdWxvc2lz","IHByYWt0","IGdlb3BvbA==","cGljcw==","Q0RD","LkZs","LnNpZA==","cmllYmVu","KHZhcnM=","K3NlbGY=","IGludGVyaW9ycw==","IEF1Z3VzdGluZQ==","IjpAIg==","IFN0ZWFsdGg=","IGdldENvbG9y","IEdlbnRsZQ==","fiI6Ig==","IHdoaW0=","KCc8Lw==","IFNTRQ==","IFZpb2xldA==","X2NyZWQ=","IGF0YQ==","IEF6ZXJiYWlqYW4=","ID8/Pz8/","LmV2ZXJ5","KGNvbm5lY3Q=","IERyb25l","IHRvbGVyYW50","c3VidG90YWw=","X3NodWZmbGU=","dXN0YWluYWJpbGl0eQ==","cHJlZmVycmVk","IFNFWA==","IGNvbmdyZXNzbWFu","IG5hbW9ybw==","IGhvbm9yYWJsZQ==","IGFmdGVyRWFjaA==","IMW8eWM=","SEFN","LnRvbQ==","IGVsb25n","IFNlcmlvdXM=","LVNlbWl0aWM=","0KHRgg==","IGZsYW0=","dGVuZXI=","LlRFU1Q=","IFRSQUNL","IFBoaWxpcHM=","IEFyZW4=","IEhpY2tz","b2luZWQ=","IEZhaA==","aXNzZXVy","IGNpcmN1bWNpc2lvbg==","KHR3ZWV0","IHBvaWw=","IFNlZW4=","X01BUFBJTkc=","IGludmFyaWFibHk=","IEZ1c2U=","ICc/Jw==","PXBhc3N3b3Jk","IOuCmA==","IElIdHRw","c3R5cGU=","Zml0bmVzcw==","LlRhZ3M=","IOqwnA==","KERXT1JE","IHF1YQ==","IE1hcnZpbg==","Ik0=","LmlzQXV0aGVudGljYXRlZA==","Lmd1YXJk","KT8KCg==","CQkJCQkJCQkJCQkJCQkJCQkJCQ==","IFNoaXBz","IHNlbnNpdA==","fTsNCg0KDQo=","YWhhaGE=","IGxpZXV0ZW5hbnQ=","IEphZ3Vhcg==","IC8vLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0=","VUNF","SW5zcA==","YWludGVy","X3BvbHlnb24=","LkRvd24=","IHRleHR1cmVk","LnNldEFjdGlvbg==","b2dy","IHNjaWVudGlmaWNhbGx5","IHNocmluZQ==","IGNsb3VkeQ==","LkhvdXI=","UG9zdEJhY2s=","QVpZ","X2NhbmRpZGF0ZXM=","KFNlYXJjaA==","IGNvbW1pc3Npb25lcnM=","IEJpZW4=","IGRvY3RvcmFs","IEZlZWxpbmc=","X1ZFUlRJQ0FM","IEJk","bmdpbng=","IOWcqA==","X2FyZ3Y=","UlNB","IGVsZGVzdA==","LWhlYXZ5","Q09OTg==","IEh0dHBOb3RGb3VuZA==","LWNvbHVtbnM=","IE5QQ3M=","IGNhZmVz","IGfDqQ==","IHN0YWxscw==","IGZvcmtz","IHBvYmw=","U3RyZWFtcw==","IGJhc3RhcmQ=","IFJhcHRvcnM=","IEdyYW1teQ==","IEdlaA==","X1RpY2s=","KHByZWc=","IGxpcHN0aWNr","X3J1","PEg=","IMSRaQ==","LkNhcg==","IHNwYXJlZA==","bW9uaWM=","aW5jdGlvbnM=","QWZyaWNh","KGRpY3Rpb25hcnk=","ICoqKSY=","YGBg","X3ByZXNzdXJl","bWll","IFJvbWFuaWFu","L21hcms=","IG1haW50ZW5hbnQ=","IHRyZW4=","IFBvc3RncmVTUUw=","UkVMRUFTRQ==","SlBFRw==","IGRlZGljYXRl","TWFrZVJhbmdl","IHJvYm90aWNz","YWt0aXY=","JSUl","YWFy","dmlld01vZGVs","KG1hYw==","dWNoZXI=","IGRlYmVu","TG9jYWxpemF0aW9u","0L7Qt9Cy0YDQsNGJ0LDQtdGC","LnNldFRvb2xUaXA=","LmZhc3Rqc29u","IHBlcmVubmlhbA==","LWNoaWVm","a2lzaA==","IGF0dGlj","U3VidGl0bGU=","IFNsYW0=","IExpdGVyYXJ5","ZXJuZXM=","INGC0L7Qu9GM0LrQvg==","IHN0YXJ0QWN0aXZpdHlGb3JSZXN1bHQ=","LkVycm9yTWVzc2FnZQ==","YmluYXRpb25z","Ikw=","IGZvcmJpZA==","IGxvZGdlZA==","Lkxpc3RCb3g=","IFBTRA==","IGN1bHR1cmE=","VU5DVA==","Ik9uZQ==","IEd1aWxs","IEJhdHRhbGlvbg==","IGNhcmVnaXZlcnM=","IEtsbw==","QmVoaW5k","IHNlYXJjaGFibGU=","X0JPVU5E","Uk9D","IHN0ZXJlb3R5cGU=","IHByZXBlbmQ=","aW50ZXJzZWN0aW9u","QmFza2V0","KGxv","IGZpbGVJbmZv","IFVJU2Nyb2xsVmlldw==","ZWNlc3NhcmlseQ==","IENoZXM=","LWluc3RhbmNl","IGFwcGFydA==","IEFtYXI=","IHJvd0RhdGE=","IGF5dWRh","IGNhcmF2YW4=","X3BpY2tsZQ==","IGNoYWluaW5n","KV07Cgo=","IGJveGVk","YWVwZXI=","IEVWRVI=","eW50aGVzaXM=","LWZhc3Q=","IOuwsA==","5Y+v5Lul","IHZvbHVudGVlcmVk","IGV4aWc=","U0lERQ==","IFBob25lTnVtYmVy","dWxhaXJl","IEthZA==","IGRhcm4=","IHlhaw==","IEJsaW5r","LnNwaW5uZXI=","IG9yZGVhbA==","X2VuZW15","IGdldFM=","IEJvbw==","TGluZU51bWJlcg==","X0xPT0s=","RUxDT01F","IHNlYW1z","IHNhZ2Vu","aXNjbG9zZWQ=","KHJheQ==","W2dyb3Vw","UFRT","Lk5hdmlnYXRl","IE93bA==","IGRidXM=","IGltcGF0aWVudA==","IEd1cHRh","KG9iamVjdHM=","IGFwcmls","LXF1","IG91dHJhcw==","IFRIRU0=","IEVNQw==","RW1wbGVhZG8=","IGdydWI=","SUFN","IHZlbm9t","IHRyYW5zY2VuZA==","IHZpY3RvcmlvdXM=","IE1heWVy","INGC0L7QstCw0YA=","IEtlbGxleQ==","SW5wdXRHcm91cA==","IHJlZmlsbA==","V2l0aFR5cGU=","IGNoYXVmZg==","b2xkZW0=","X3RpZA==","IGZsdXNoZWQ=","XHN5c3RlbQ==","LnJhbmRyYW5nZQ==","IFBPU0lUSU9O","IFRlbmFudA==","Y29udmVyc2lvbg==","Y2FsbGluZw==","KCkpKSwK","0L7QvdCw","IHNpZGV3YXlz","IGxheA==","CXJlcA==","YWVwZXJuaWNr","IG5lZ2Vy","IEZseWVycw==","ICJALw==","dXBha2Fu","X2VsYXBzZWQ=","dHViZQ==","UG9zWA==","LnNleA==","IGzDpHNzdA==","IEdyYXZl","5Y+C","KGVtcA==","KHN0cnRvbG93ZXI=","Y29udmVydGVy","IFNwb25zb3JlZA==","KHdvcmtlcg==","IG1hdHJpbW9u","Q29tbWlzc2lvbg==","KGh3","X1NJR05BVFVSRQ==","bWVr","IGFsZ3VuYXM=","X0VU","aXN0cmluZw==","THY=","U2xpZGVz","IHdlYWtTZWxm","IHdr","IFppZw==","IHB1YnM=","IEJSQQ==","IGZsdW9yZXNjZW50","Y2Fycnk=","LmVyYg==","IEluaQ==","LkRyYXdTdHJpbmc=","IFNFUA==","dXR0ZXJz","2ZE=","Um95YWw=","IGNhYmJhZ2U=","IFN1aw==","XT49","IEVkaXNvbg==","IHNwZWN1bGF0ZWQ=","LmRvd25jYXNl","IHRwaA==","IMOD","IGd1bnNob3Q=","cnBt","IGZsdXR0ZXI=","IGFueA==","YXplcw==","UU9iamVjdA==","IEZhdm9y","IG1vZHVsZU5hbWU=","JnM=","bGVo","LldlaWdodA==","IFdBTA==","X1ZBUlM=","IFdhc3Nlcg==","IG91dGJvdW5k","IGVyZm9sZ3Jl","LnZhbG9y","KGxpZ2h0","IE1hZ251cw==","IHpvZWs=","eWg=","IHN0eWxlc2hlZXQ=","Pm0=","V2hpdGVzcGFjZQ==","IFsnLw==","CVJlcXVlc3Q=","X2luY3JlYXNl","LWRpc3RhbmNl","aWNvbG9y","aGNp","IEtJTkc=","UFg=","b2ls","ZW1pbmc=","bmFtZW50cw==","RGVmaW5lcw==","IFstLQ==","IHZhcmlvcw==","IFBSRVNT","LGF4aXM=","IENvbGxpZGVy","KX0KCg==","IGZvcmNpYmx5","IHN0YWF0","X1NUQU5EQVJE","IG9jY3VsdA==","IGJhcHRpc20=","IEN1bm5pbmdoYW0=","X2J1aWx0aW4=","Q1BG","W21heG4=","IFJIUw==","IE9uZXM=","KF86","IGluc2VjdXJpdHk=","LnJlZ2lzdHJhdGlvbg==","aW1wbGlmaWVk","IFN5bXBvc2l1bQ==","aHJlYWQ=","IHF1ZWxsZQ==","IGZyZW56eQ==","Q2FsaWJyaQ==","IFNQRUVE","b3Vp","KCldLAo=","YWNjb3JkaW5n","IG1jYw==","IGFzaWF0","IGFkamFjZW5jeQ==","IEFibGU=","IHNhbGRv","bm9zdGk=","IGRpbWU=","ZXRyYXRpb24=","IE1vZGlmaWNhdGlvbg==","IEhlcmI=","IHBsYWF0cw==","IGludGVycGVyc29uYWw=","IO2ZleyduA==","YXJtZQ==","IGNvbWVyY2lhbA==","IEJhdGVz","KGNhcmRz","LmdldENsaWVudA==","Lk5PUk1BTA==","CVRlc3Q=","ICAgICAgICANCiAgICAgICAgDQo=","IFJhem9y","d2Vpcw==","SVRIVUI=","IEVOVElUWQ==","YWdpdA==","IG1pbmVjcmFmdA==","cHJvcG9zYWw=","IHNhbHR5","YW5kcg==","IENvbmNsdXNpb24=","IHBydWRlbnQ=","IFtA","IFB1cHBldA==","aWdvbg==","IEdvdGhhbQ==","IGNoZWVycw==","IFNoYXk=","IGpp","IEdESw==","ZXhwZXJ0","IGZ1bmt5","IFphbQ==","W05VTQ==","RGVxdWU=","X1RXTw==","XHZpZXdz","IHByb2pla3Q=","IGRyb3duZWQ=","a2lkcw==","LnNoZWV0","IG5vbmQ=","IGNvdXJ0ZQ==","IC4uLgoKCgo=","IHBpY3R1cmVzcXVl","IHR1YmluZw==","KCkuIg==","amV0cw==","X1B1YmxpYw==","IEZhcnI=","IEFyZA==","T1VSU0U=","IGthZGFy","IFByb2dyYW1t","LmtleXdvcmQ=","CSAgICAgICAgICAgICAgICA=","aWVkYWRlcw==","YXRvbG9neQ==","IER1bmQ=","PWNvdW50","IHNsb3dkb3du","LSIs","LkZvcmVncm91bmRDb2xvcg==","UnVucw==","LlR5cGVPZg==","JGN1cnJlbnQ=","IHVwc2NhbGU=","CXVuaW9u","KGNoaXA=","dW1pZGl0eQ==","PVtdDQo=","IGhhcnQ=","ICRfWw==","eW5lYw==","LlVzdWFyaW8=","IG9jdGF2ZQ==","IHBvcnRyYXlhbA==","INC90L7QvNC10YA=","IE9jY3VweQ==","X25hbg==","IFNtYXJ0cGhvbmU=","aGluZA==","IHdpbmRzaGllbGQ=","IGxvbmVsaW5lc3M=","L2NoYXJ0","IGFjdGl2YXRlcw==","LnJpYmJvbg==","IGxhZ2k=","IHBhcmFjaA==","SHlwZXI=","c2NhbGVk","VGVz","IEJlZXQ=","IGRpc3NlY3Q=","IENpYw==","IH0sCgoK","PigpCgo=","LnN0dWR5","IGNvbnRyYXN0aW5n","WkVSTw==","IHR1bmE=","IENob3c=","X3Zh","ZmF2b3I=","W0luZGV4","IFBvd2VyU2hlbGw=","KHByb3Rv","JykpOgo=","X2Zvcm1hdHRlcg==","Q2hyaXN0b3BoZXI=","T3JOdWxs","Q0lTSU9O","X2NvbnN1bWVy","UGFzdGU=","KG5vbWU=","ZW50b24=","IHVucmF2ZWw=","X2Rvbg==","IHBhcmVudGhlc2Vz","IE5VSVQ=","L10=","IOKIpw==","c3RhY2xlcw==","L2NvbW1lbnQ=","dXR0aW5n","IHNsb3BweQ==","KFt7","LnNhdg==","dG9Kc29u","IOu5hA==","IFByYXR0","Lm1vZGlmeQ==","LklzQ2hlY2tlZA==","IHZlbmV6","IFNFVFRJTkdT","amF3","IGZpcmVzdG9yZQ==","IGNvbnNvcnRpdW0=","IGthYg==","IFN1cHBvcnRpbmc=","IFRoZXNpcw==","IG5vbmxpbmVhcg==","IHRleHRib3g=","LiIiIg==","IEVuZXJn","LkpPcHRpb25QYW5l","IGludGVycnVwdGlvbg==","w6h0cmVz","IHNoYWxl","IFBsYXllZA==","IHNvY2lhbGU=","WUdPTg==","X0JBVENI","IHRyaW1lc3Q=","IFByb2NlZHVyZXM=","IGF0dGVuZHM=","IiR7","ZXZhbHVhdGlvbg==","LlByb2dyZXNzQmFy","IEFsZXhhbmRyYQ==","Y2jDqQ==","X1NFUVVFTkNF","IGNyb2NoZXQ=","Um9z","IGlobmVu","ICIqKio=","IGFyb3Vz","IG1vZHVsdXM=","X0xJTlVY","U3RhY2tTaXpl","aWF0aW9uRXhjZXB0aW9u","Lk11dGFibGU=","IClb","IHBpaQ==","Zmlmbw==","X1BJQ0s=","UHVycG9zZQ==","KFN0dWRlbnQ=","IE5pY28=","ZXN6","L3Nt","IFBQUA==","W2lucHV0","5Y+Y","IGJsYXN0cw==","IE11dHVhbA==","cm9sbGV5","IHV0aWxpc2Vy","OlRoZQ==","5Z+6","LmRlY29kZXI=","IG9iamV0b3M=","IGF3YWtlbmluZw==","IEVubGlnaHQ=","CWFsaWdu","X3Jld3JpdGU=","L2N1cnJlbnQ=","IGRhcmF1Zg==","Q2FudGlkYWQ=","LG5w","IHZlbG9jaXRpZXM=","Q0xS","IG1pc2luZm9ybWF0aW9u","IHN0cmVhbWxpbmVk","IGdyb29taW5n","IGF6aQ==","b2xn","IGNvbnN0aXR1ZW50","IHdlZQ==","0YXQvtC00LjQvA==","IEFsb25zbw==","aWV0Zg==","Y3Rlcg==","IHRoZXJtb3N0YXQ=","KEND","IHN0YWNraW5n","X2NvbnZlcnRlcg==","IERpc25leWxhbmQ=","CWZpbGVz","SUNJ","X1RPUElD","CUVsZW1lbnQ=","YXJnYXM=","IFxA","YW5jb2Nr","IEJhc2VFbnRpdHk=","KCItLS0=","cmJyYWtr","IG5lZ2F0aXZlcw==","IHZ3","PWZvcGVu","Y2hlbWlzdA==","QXJjaGl2bw==","IGAu","IEZPVVI=","KGFp","VGFibGVXaWRnZXRJdGVt","PD8+Pg==","LnByZWQ=","VHJhaWw=","LWZhY3Rvcg==","IEltYWdlQnV0dG9u","cGVyaWE=","IENlbGVicmF0aW9u","LlJlc3BvbnNlQm9keQ==","dXJjaGFzZXM=","IGdldEtleQ==","IENyYWI=","IHFp","IFdpY2s=","IGNoYXN0","IC4uLi4uLg==","IGNvbWVueg==","IHNoYXJkcw==","IGTDqWNvcg==","IGhhbHZlcw==","UVVFTkNZ","IHBvd2VyaG91c2U=","TElORw==","Q2xhc3NMb2FkZXI=","Y2VudHJl","LXNlbmQ=","bWFo","IHNocmVkZGVk","IFRJRkY=","aW5rYQ==","LgoKCgoK","IGRlc2lnbmF0ZQ==","IE5pZ2h0bWFyZQ==","IEdlbmV0aWM=","X2NoYW5jZQ==","KGFuaW1hdGlvbg==","cXVpbGE=","X3NwZWNpZXM=","TkVZ","b3lzdGljaw==","cmVsbG8=","zqw=","IGRpdmlzaXZl","IFJFQw==","IHN0dW1ibGU=","KGZha2U=","IExhY2U=","YW50YWdlZA==","YWtlc3Q=","cHJvbW90aW9u","IEZvd2xlcg==","PWNlbnRlcg==","IENpdWRhZA==","UmFkaQ==","IFNsZWVwaW5n","dXRyb24=","IHF1b2k=","IFJBRA==","IGV4cG9uZW50aWFsbHk=","IEJyZWVk","IG1vbm9wb2w=","aGlnaGVzdA==","eG1sbnM=","SW50UHRy","IHR1dHRl","IFJlZnJpZ2Vy","IOmhtemdog==","IHpvbmRlcg==","bGJyYWtr","O2VsZW1lbnQ=","IEhlZA==","UmVsYXRpb25z","64U=","Q29ycmVv","5aC0","IE1pZ2h0eQ==","QU5HTw==","X2NvbXBpbGU=","LmdldENtcA==","IGludmFkZQ==","LnNwcmluZ2Jvb3Q=","IFR1bmU=","X3NuYXA=","X0ZFRUQ=","IGRlY2lwaGVy","PXNpemU=","X2ZyZQ==","IFRpbGxlcnNvbg==","0LjQutCw","dGlnaHQ=","IGN1bHByaXQ=","UlRM","IFBhcmU=","KHB1Yg==","ZWdvdg==","IHBvbnRv","IGNvbnN1bA==","SlNJbXBvcnQ=","IHZlcndlbmRldA==","IEJvb3N0ZXI=","5b6F","IGNhcnJvdA==","dmVyaWdl","KExQ","IHd4VA==","IGltcHJvcGVybHk=","Iik6DQo=","IHN1Y2U=","L21vZGFs","IElDVA==","LikuCgo=","X21hcmtz","IENhY2hlZA==","IEN1cnJpY3VsdW0=","QnM=","CUpPcHRpb25QYW5l","m4Q=","IGNvZ25pdGlvbg==","IE5lZ290","PXJlc3VsdA==","X0ZvbnQ=","YXJpbmU=","IGNvbnNwaWM=","IENhbGN1bGF0aW9u","IENFT3M=","LXRyYW5zcGFyZW50","IEJlcmVpY2g=","56iL5bqP","Lmh5","LkFsaWdu","IGhvcGVsZXNz","IGNvbG9tYg==","dXJiZWQ=","IFNBWA==","IGVpbno=","KHpvbmU=","IG11enpsZQ==","IHRyZXNwYXNz","IEFicmFtcw==","IGNvbXDDqXQ=","IFNhbmN0dWFyeQ==","IE5TVGV4dEFsaWdubWVudA==","IHN0YXY=","IHByYWdtYXRpYw==","c3RyZW5ndGg=","V2l0aE9wdGlvbnM=","LmJhbmQ=","YXBoYWVs","QXVzdHJhbGlhbg==","IE9TRXJyb3I=","TWFuY2hlc3Rlcg==","SWRl","XFJlc291cmNl","0L7QtNC10YDQtg==","IHppZQ==","SGFybmVzcw==","LlR3ZWVu","Y2Ftcw==","4pyU","LXNjYWxhYmxl","LW9r","IGpsb25n","IE9sc29u","IE9ha3M=","LnNsaW0=","IHPFgg==","IG5ld09iag==","LkludmVudG9yeQ==","IGtlbm4=","IG5pZ2h0bWFyZXM=","aXJjbGVz","Lm50","Z3Jlbg==","IFRFTg==","IFNjb3Rz","IERpc2FiaWxpdHk=","X21hbmlmZXN0","LnNpZGViYXI=","IHNodWZmbGVk","IGh1bWlsaXR5","LnRhcA==","IEdyYWlu","bm90aWNlZA==","77yJ44CC","X2hwcA==","IGRpbGF0aW9u","IGhhbmRpY2Fw","Z2V0RGF0ZQ==","IGR6aWHFgg==","JykuJzwv","cmVjb3Zlcg==","eXNp","KGdyYXk=","YWhrYW4=","IGludGVyZmVyaW5n","X1RPVUNI","X3JlZHVjdGlvbg==","QWx0ZXI=","IGN1Yw==","RXhwZXJ0","IEx1bXA=","Wzpd","IHJlbG9j","IGNvbmR1Yw==","Q2hhcnNldHM=","Lmxpc3RlbmVycw==","LWludmVyc2U=","IHN1bW1vbnM=","IMO6bmljbw==","IE9W","IFNpY2hlcg==","IEpGYWN0b3J5","LmdldEJvdW5kaW5nQ2xpZW50UmVjdA==","amg=","IHNrZWxldG9ucw==","IEFzaWFucw==","IEFNQw==","aXNlbGVjdA==","LmNsaWVudEhlaWdodA==","KGZy","SGFzRm9yZWlnbktleQ==","LnJlbGF0aXZl","INiu","IG11bHRpY3VsdHVyYWw=","X0NPTEw=","IG1pY3JvYmlhbA==","IGltcG9ydGFudGVz","U3BhaW4=","IGN5bGluZGVycw==","aWVuaWU=","X09XTkVS","KERJUw==","IGZhbmRvbQ==","KG54","IGFwbGljYWNpw7Nu","b2NhdG9y","ZXNzaWFu","IENsYXVkZQ==","IGludG9sZXJhbmNl","xYJlbQ==","IFNlbWFudGlj","Lk1pZGRsZVJpZ2h0","QVJFU1Q=","IHNpZXZl","xLHEn8Sx","aWNhYmxl","ZXJnaWM=","IGJhdHRsZWQ=","b3JiaXQ=","KXx8KA==","dWVsZQ==","IGZhc2NpbmF0aW9u","IGTDpQ==","IFRpZ2h0","X0lOQ1JFRg==","LklzU3VjY2Vzcw==","LE8=","IHN0w7hy","IHByZXNzdXJlZA==","LlRSVUU=","IFRob3VzYW5k","IGdlbWVpbnM=","IHpi","IHNwaXJpdHVhbGl0eQ==","IFpldXM=","IFBvd2VyZnVs","YmF0dGVyeQ==","aXN0ZXM=","IO2D","LnNoaXJv","IEhpcHA=","ZGVjbHR5cGU=","LmpmYWNl","LnRlbXBlcmF0dXJl","IG1hcnF1ZQ==","X2JhZw==","QXR1YWw=","cHJpY2luZw==","Q2xlYXJseQ==","X0Fic3RyYWN0","w6lr","YWhydW5nZW4=","SW5zdHI=","CQoKCg==","IGNoZXdpbmc=","IENvYWNoaW5n","JExBTkc=","bWFsbG93","IHNlcmlvdXNuZXNz","X2N1dG9mZg==","IFF1YXJ0ZXJseQ==","fScpCgo=","IikpKTsKCg==","6KeE","LlBvc2l0aXZl","LXBv","eGl0bw==","LlJhZA==","IGJyaXNr","IExpZmVjeWNsZQ==","5pWw5o2u5bqT","ZmF0YWw=","IHhwb3M=","LkRldGFpbA==","ZW5hbA==","TUFUQ0g=","IGhlZWQ=","IGFmcmljYW4=","RGFkb3M=","YmVyYXBh","IGhlbGY=","JywnJyw=","IGVudHJlcHJlbmV1cnNoaXA=","IGNlcnRz","ZWNl","PnI=","X2ZpeHR1cmU=","IHBvb2xpbmc=","IG1vZ2VsaWpr","IHNldERhdGU=","5pS/","LWNvbXBsZXRl","X1JBRElP","IGt1bA==","IGdvYg==","X1NMQVZF","IGZ1cnJ5","IE5VSVRLQQ==","SUxJVElFUw==","IG5vY2hl","IGN1ZmY=","IGNvbnRlc3RhbnRz","IFdW","IHBhc3Nwb3J0cw==","IMWC","IE5haWw=","X2RlY2ltYWw=","YXN0bGU=","IFNvbGRpZXJz","UmVjaXBpZW50","IGNvdXJzZXdvcms=","IGltZQ==","IFNlYXRz","X0RM","IGNvbnN1bHRhdGlvbnM=","X0FEVg==","IElrZWE=","IG9maWNpYWw=","IHJlZ2ltZW50","IEJhdGhz","LXBpbg==","X0JVQ0tFVA==","QUJDREVGR0hJSktMTU5PUA==","Il0pKTsK","PE1lc2g=","Iix7","IGRlcml2ZXM=","4oCcRm9y","IFl1Z29zbA==","aXNFbmFibGVk","IHNvbGx0ZW4=","IHBldGl0aW9ucw==","b3ZlcmFsbA==","IGdldFRvdGFs","X0hJTlQ=","TWludXM=","IGFub21hbGllcw==","IFBpY2t1cA==","PT09Jw==","bGVpdHVuZw==","IERlaw==","WVNJUw==","LnNlc3Npb25z","IGNhcmM=","X0l0ZW1z","IGludGVybWl0dGVudA==","Lkpzb25Qcm9wZXJ0eQ==","IG1NYXA=","IEthaw==","YWluY29udHJp","X3NlZWs=","IHVuYW1l","X3B1dHN0cg==","RmQ=","TGltaXRlZA==","c25vdw==","IFBhdmlsaW9u","IEV4YWN0","IHBvc3Rpbmdz","CWRpc3Q=","PHN0ZGxpYg==","TGlnaHRz","IGZpbHRybw==","V29ya2Vycw==","IHN5c2xvZw==","R2lybHM=","IEd1bQ==","X3llYXJz","J319Cg==","IGjDpHQ=","Z2F5","KHByb2I=","ZWxsYXM=","IHdpbHQ=","Lm9wdGltaXpl","X0RVTVA=","KFhNTA==","IERYR0k=","IG3DqXRo","SVRJWkU=","ZWxlY3Ryb24=","LmN6","IHN1YnNldHM=","IHJlc3Bvc3Rh","IGJlYWQ=","wrsu","IE9TQw==","JnBhZ2U=","Z3Bz","YW5pYW4=","UHVycGxl","IGFjcm9ueW0=","Uk9XTg==","QXVkaXQ=","IGNvdXJpZXI=","YWxpZQ==","IFdhc3M=","IGF1ZGl0cw==","IFBPVg==","IEZhY2lhbA==","X3N0cmNtcA==","ICsl","ICAgICAKCg==","YCk7Cgo=","RUhJQ0xF","WyJA","LW5hdGlvbmFs","6ZuF6buR","6L2v6ZuF6buR","X2NvZGlnbw==","IHVucXVlc3Rpb24=","aWxtaW5ndG9u","cmVxdWVzdENvZGU=","IElX","LnN0cmF0ZWd5","IFNZTUJPTA==","IGdyw7bDnw==","X2JlaGF2aW9y","IHJlZnJlc2hUb2tlbg==","IG1vbmc=","aW1lbnRhcnk=","IFNob3Bz","KCc/","X2hpZ2hsaWdodA==","X2xleA==","IGlsbHVtaW5hdGVk","IHBhbHA=","LWluc2VydA==","IHN0cml2ZXM=","IGZvcnRz","IGVtYm9kaW1lbnRz","bXBqZXM=","X1RPTw==","IGRyYWdnYWJsZQ==","IGltbWVyc2lvbg==","cGlucw==","IFJlZ2lzdHI=","IEZyZWVCU0Q=","X3hsaW0=","IFR1bHNh","U25hY2tiYXI=","L2RhdGU=","IGRhdm9u","IGF1dG9yZWxlYXNl","IHZhY2F0aW9ucw==","CQkgCQ==","aWNlcHM=","IFJhbXA=","IEN5bnRoaWE=","X3BvcHVsYXRpb24=","JCQk","IFRBUg==","ZW5nYQ==","IHB1cw==","IOW5","IHRpbWVzdGVw","TGlmZXRpbWU=","IGZpbG1lcg==","WVNU","IEdhemV0dGU=","IG91dHNpZGVy","IEVYUE9SVA==","R09SSVRITQ==","LmZsZXg=","IFJvb3Rz","KHBpeGVs","emN6ZQ==","YWlyaWU=","IG92ZXJsb2FkZWQ=","U1RSQUNU","IENvdXJpZXI=","44GW","Y29udGluZW50","RnJlZA==","IHNlbXA=","IFN0ZWxsYQ==","IGRvdWJ0ZnVs","YWRtaW5z","IG9wdGluZw==","TE9UUw==","IG1hbmlmZXN0bw==","LWZvbGRlcg==","X2Ryb3BvdXQ=","dXR1cmVz","w612ZWlz","YWNoaWV2ZW1lbnQ=","IGNveQ==","ZmFpdGg=","X0hBTEY=","aXJlY3RlZA==","IGNvbnRhdG8=","U2VtYXBob3Jl","UHNp","IHZpdGFsaXR5","IEZsYXRCdXR0b24=","SXRlbVR5cGU=","IGltcGVjYw==","IGJ1b3k=","dWlu","IHNreXJvY2tldA==","IFNsYXllcg==","IFJDTVA=","IFNldmVudGg=","X0ludGVyZmFjZQ==","IGZpZXJj","c3RhdGlvbnM=","IEdyYWY=","bGljZWQ=","IGVudW1lcmF0b3I=","Q29udGFpbmVycw==","IG9p","w4fDg08=","LXRvbg==","UkVQ","KGZsb3c=","LmNvb3Jk","R2Fi","IE1vcnBo","IFpvZQ==","IGhhcmJvdXI=","Lm1lc3NhZ2luZw==","X29wdGlvbmFs","IEJhc2VBY3Rpdml0eQ==","cmVzZW50ZXI=","IG5ieXRlcw==","IGNvdXJhZ2VvdXM=","PSE=","J0l0","IGZvcnM=","IGNvcnJpZG9ycw==","IEJFRU4=","IGZ1c2Vk","PWltYWdl","LkdyaWRWaWV3","IHNlbWVu","aWdyb3Vw","dXB0aW1l","IFhC","5o6S5bqP","IGludGVncmF0ZXM=","X09D","IGJhaWxvdXQ=","IHRlc3Rl","IG9jdXA=","YXVsZWQ=","X29kZA==","cGdh","IEFTVVM=","IFRTUg==","IG9jY3VwYW50cw==","U2V0VGl0bGU=","U2NoZWR1bGVycw==","IGJla29tbWVu","QnJpZ2h0","IE1haW5Gb3Jt","Xygn","RnJvbUFycmF5","IGluZGljYQ==","SEFORA==","T3JkZW4=","IFRlbXBlcg==","LnN0YXR1c1RleHQ=","cG9saXRpY2Fs","IFBlcmN5","44CCCgoKCgoK","LnNldFg=","Z2V0TGlzdA==","aG9sZXM=","UGl4","IG91dHNvdXJjaW5n","IG1lc3NhZ2VJZA==","IGdldFNlc3Npb24=","IFZJUg==","T2ZGaWxl","IFNwYXRpYWw=","LkZsb2F0RmllbGQ=","KShfXw==","IFN3aW1taW5n","QUNMRQ==","IHNlbnRpcg==","IHBsdW5nZWQ=","IGF1am91cmQ=","Z3VuYWthbg==","KHZvbHVtZQ==","IGNyYXRlcg==","Lnhscw==","woDCmQ==","UmVuZGVyV2luZG93","LnVzZXJtb2RlbA==","IGZ1bmN0b3I=","RG9tYWlucw==","aW50ZXJwcmU=","IGFibm9ybWFsaXRpZXM=","YXJnaW5n","RGVtb2NyYXRz","IHBhbG1z","4qCA","w7hk","KkE=","RnJvbURhdGU=","fFs=","IEFsdGVybmF0ZQ==","IHB1ZG8=","IGNvbmRlbnNlZA==","KHBsYW4=","ZGVsaXZlcg==","IGJ1bGxldGlu","J11dLA==","IGNyw6llcg==","LWlw","V3M=","IiIiLAo=","IGlrZWE=","IHZpc2l0ZQ==","IG11bHRpcw==","UmVzdWx0YWRv","IFBob3RvZ3JhcGhlcg==","Li4uJywK","IG1pZ2xpb3Jp","IFRocmVhZHM=","Z2V0U3R5bGU=","ZXJhw6fDo28=","PFRTb3VyY2U=","IEdpbmc=","J10iLA==","IHNpZ25hbGVk","U3VwcHJlc3NMaW50","IGR3b3Jk","IEh1bnRpbmd0b24=","IEFBUA==","QU5HTEVT","LmNyZWRlbnRpYWxz","c3dhZ2dlcg==","LWNvbnNvbGU=","Ii0t","LlRleHRJbnB1dA==","IE5PUlRI","IG5pZ2h0bHk=","LkZPTlQ=","IHF1b3RpZW50","5Lmf","IHNjaMO2bg==","IFBsYW5uZXI=","IHJlYWRsaW5l","IGNvbmZyb250aW5n","YH0=","SXRlbUNvdW50","CWFjdGl2ZQ==","IHLDqXBvbmQ=","ZWxtZXQ=","IGdpbW0=","LG5vbmF0b21pYw==","IEFDVElWRQ==","aGV1cmU=","L1ByaXZhdGU=","IG1lYw==","LlNlY3JldA==","IENJUw==","xYJ1Zw==","KHBlcmlvZA==","IGxsZWdhcg==","dXJpYQ==","RGVzY3JpYmU=","IHBhcmVqYQ==","IFZlZA==","LWVmZmVjdHM=","IFBhcnNpbmc=","LXJlc291cmNl","IGFiYQ==","ICosCg==","IGFuYXRvbQ==","ICgqKSg=","LXJlYWw=","IFZlbnR1cmVz","IFNoaWVsZHM=","IFVuaXZlcnNpdGllcw==","UFJFU0VOVA==","IFFMYXRpbg==","xaU=","IFdpbGV5","QWFyb24=","IHJhY2lhbGx5","IE5hZHU=","IGh0dHBSZXNwb25zZQ==","w610aWNh","IOuwqQ==","IGdyw6F0aXM=","5LuL","b21hcA==","IGFub24=","CXBvcA==","YXZhdGFycw==","IHN1YnBhcmFncmFwaA==","ZHpp","UHJvamVjdGlsZQ==","RFRW","bGlzdGVuaW5n","X3JlZ2VuZXJhdGlvbg==","IFNoZWx0ZXI=","PFZlcnRleA==","L21k","KGxl","IHZhaw==","c2VsZWN0ZWRJbmRleA==","X10=","IFN5bnRoZXRpYw==","YXBwSWQ=","IEZpcmVk","IHBhbXBo","X2xhdGVuY3k=","aW5maWxl","KGNyaXRlcmlh","c2VyaWFsaXphdGlvbg==","UkNU","CWV2","IFNDSA==","IE9wdGljYWw=","IHN0aXJyZWQ=","IFBvdGlvbg==","ZXRoaWNhbA==","Ojp7Cg==","IFBlbmd1aW5z","UEhZ","RGVjaXNpb24=","a2FydA==","IGV4cG9ydGVycw==","IFBvbHllc3Rlcg==","Y29udHJlcw==","IExhd3Nvbg==","IEVtcGxveWVy","IHNhc3M=","IGRvd250aW1l","IGJyb2tlcmFnZQ==","IFJvdGFyeQ==","IFdhaGw=","V0FSTg==","IHNldEFjdGl2ZQ==","dGVtcGw=","Q2hlZXJz","LXNoZWxs","Rml0bmVzcw==","IHF1aWw=","IGNsZWFuZXJz","IOeb","IE1pbGFubw==","LWFzc29jaWF0ZWQ=","fX19LAo=","UEZO","IG9uUGFnZQ==","X3N0cmVhbXM=","IHNjdWxwdHVyZXM=","IG5haWxlZA==","PXNj","6aaW6aG1","0LjQvNCy","Y29ubmV4aW9u","Sk9C","IEthcm1h","IFN3aWZ0VUk=","IERleg==","L1VJ","IOyZ","Z2V0Q2xpZW50T3JpZ2luYWw=","IHB1bmlzaGluZw==","IG9kZW5zZQ==","LHJpZ2h0","ZW5lcmF0aXZl","IFByb2JsZQ==","IEFwcFN0YXRl","IGRpc2Nsb3N1cmVz","IENhbnRlcg==","Y29tcG9zZXI=","dXBhdGVu","IHN1Y2Nlc3NvcnM=","Ij4nCg==","IHByZXNlcnZlcw==","Lm9wZW5k","X05vcm1hbA==","L2hy","UmFuZ2Vz","LGxvbmc=","CQkJCSAgICAgICAgICAg","cHJvZHVjdG9z","IGZseWVy","IEdydXBv","Tmlja25hbWU=","SGllcg==","IERFQQ==","U3ByaXRlcw==","CW1hc2s=","X3Jlc2VydmVk","LXNob3A=","Lm5vdGlmaWNhdGlvbnM=","IGRpdmlzaWJsZQ==","aW9zaw==","a2VyamE=","aW5ndA==","IEZpZnR5","IGFjY291bnRhbnQ=","IEV4cGxvcmF0aW9u","X2Jyb2FkY2FzdA==","IGV4dHJhb3JkaW5hcmlseQ==","IGtvdA==","IGNpcmN1bWZlcmVuY2U=","cm91Y2g=","W0Jvb2xlYW4=","Y3Jhd2xlcg==","L3JlbW92ZQ==","YXJlbGxh","IHNleGVz","SGludHM=","IGdhbWI=","IGRhcmVk","dGVzdGVk","X0tFRVA=","IGZpbHRyYXRpb24=","aWNrZXk=","IEluZmx1ZW5jZQ==","IHNwZWNpZmljaXR5","X0lEUw==","IFJvZG5leQ==","X0lSUUhhbmRsZXI=","T25FcnJvcg==","IHByZXZTdGF0ZQ==","aWVnZWw=","IExFU1M=","IGF3YWtlRnJvbU5pYg==","IExV","dW1hYmx5","b3J0YWxpdHk=","IG1hbmRhdGVz","CXZlcnNpb24=","IHBhcmVudE5vZGU=","IHBlc3Rz","IGNhc2M=","Y2VwdGFy","IFdvb2R5","ZXJlZQ==","X3Bm","LlBPUw==","aXN0cmE=","bGV3","WWFuZw==","IHN5c3RlbWQ=","IHJvYW0=","LkdyYXk=","IGNvbmR1","4oCUaW5jbHVkaW5n","VmlvbGF0aW9u","TWFob24=","IE1VU0lD","IFNpcmk=","IEVudGVyZWQ=","IGNlcnRhaW5z","ZWxhaA==","CU1haW4=","LkRhdGVGaWVsZA==","LkhlYWx0aA==","IEthc2ljaA==","IGNhbmluZQ==","PXJvb3Q=","dWRkbGU=","XGNvbW1vbg==","IFN1bHRhbg==","ZmluYW5jaWFs","IFFTcWw=","IGFzY2VudA==","IHBydWViYQ==","emllaHVuZw==","LmdldEVycm9y","IEdsb3JpYQ==","RWNobw==","X0NIT0lDRVM=","X2Vwcw==","L3Byb3ZpZGVy","UEhPTkU=","5YWz6Zet","IGNvbXByb21pc2luZw==","X0FQUFJP","UHJvY2Vzc0V2ZW50","IGJ5dGVBcnJheQ==","IENydWM=","wqg=","IGljaW5n","IFBDTQ==","dmVjdA==","QW15","IFZhY3V1bQ==","aW5jaWRlbnQ=","IHVzZXJu","emJlaw==","XSspLw==","IH19Ij48","IEdldERhdGE=","Y250bA==","IHNhZ3Q=","X1BSSU1BUlk=","IGxlcg==","IEZVQ0s=","IFN0YXJy","SUg=","w7ZycGVy","eW1z","XSldCg==","L3Rvb2w=","Y29tYmluYXRpb24=","IHRhbXA=","IEJlaXQ=","IE5JR0hU","IGFubsOpZQ==","KGFt","XFRyYWl0cw==","Olwi","IGNhcmdh","LmlkZQ==","IGRpa2tl","Q29tcGV0","IHNjb290ZXI=","IHhQb3M=","KGludGVycA==","IGhhc2ls","Y2xpZA==","IGhldXJlcw==","Z2xvbWVy","c2hhcmVz","77yMCgo=","cG9uZGU=","4bqjaQ==","X2R1cGxpY2F0ZXM=","c29uZ3M=","fV07Cg==","IFNuaXBlcg==","IFRodXI=","cm9wcA==","IGdydWVz","IG9yZXM=","dXNoaW1h","IHVzYWJpbGl0eQ==","6ZKf","L21lbWJlcg==","b2xkZW1vcnQ=","SXNBY3RpdmU=","R2V0RW51bWVyYXRvcg==","bXV4","V0lORE9XUw==","TmVnYXRpdmVCdXR0b24=","4Liz","LW1ha2Vycw==","44Kk44Oz","IEJlcm0=","QnlFeGFtcGxl","IFLDvGNr","U2hvd3M=","Z2hp","IElocmVy","IENydWQ=","Y2hlZg==","X2F1Yw==","IGFww7Nz","YW5rYW4=","IEtERQ==","SUxMUw==","IGFuZ2xhaXM=","LXJlZnJlc2g=","CXJhbmdl","eG1t","KGVkZ2Vz","IGFwcGVs","Ijt9","IGVkaQ==","IHN3b2xsZW4=","IGJ1dGNoZXI=","aWNpZGVz","aG91bmQ=","IF4o","IEV2YWx1","IGtleWJvYXJkVHlwZQ==","U1NJRA==","cm9iYXQ=","IG5paw==","IHN0cmF3YmVycmllcw==","XCJd","bm9zaXM=","TUVE","54g=","5LqU","aW1heA==","XEFubm90YXRpb24=","IG51cnU=","IE1pbmltYWw=","IHdvcmRwcmVzcw==","IGNvbGRlcg==","CXBhcnNl","L3N0cmV0Y2g=","5omn6KGM","cm9tb3NvbWU=","RElN","IHRlbnRhdGl2ZQ==","Ok5TVVRG","LGltZw==","IE1BVEVSSUFM","IEpldEJyYWlucw==","TGVnZW5kYXJ5","CXN0cm5jcHk=","IGRlZnM=","TnVtYmVyRm9ybWF0RXhjZXB0aW9u","IGJ5dGVjb2Rl","IHdpc3Nlbg==","X01PUkU=","oO2DnQ==","IENvZmY=","LkNvbmRpdGlvbg==","IGTDqXBhcnQ=","ZHNu","IHBhcmFtZXRybw==","XEw=","Lm5hbm9UaW1l","Qk9UVE9N","LldoYXQ=","64Q=","IERpeA==","X0RB","KENvbnRhaW5lcg==","YXlhcg==","RmxleGlibGU=","LlJheWNhc3Q=","IEVkd2lu","W3VybA==","wpI=","LnN0cm9rZVN0eWxl","IFBvbHlub21pYWw=","aWxpdGF0aW5n","IFFWQm94TGF5b3V0","KHJlcA==","LnZu","LWFzc2V0cw==","Q0hBU0U=","IEVzc2VudGlhbHM=","anlsbGFuZA==","IGF4cw==","IFRyZW0=","Lm1haW5sb29w","IFdJTkRPV1M=","LlJFUVVFU1Q=","IHJlaW50","IExpYnJl","Y2hlb24=","IGd1ZXJy","CU5kckZjU2hvcnQ=","LnNvZnRtYXg=","IEFzdXM=","LXNjb3Jl","IEpPSE4=","PlN0YXR1cw==","PkVkaXQ=","IENhbWU=","IEFzaGU=","X3VzaW5n","IExvbmU=","IGxlc2Vu","IHJldmVyc2luZw==","bmdyeA==","LnNpZ25hdHVyZQ==","LUFzc2Fk","L25hdGl2ZQ==","X3JhdGluZ3M=","IG55YQ==","IGFkaWRhcw==","KG9wdGlvbmFs","Il0o","IHJlY3VycmVuY2U=","IEJNUA==","z4w=","X2dw","Ij5c","X3dyb25n","eXBz","LlByb3h5","X1VEUA==","UXRDb3Jl","TGlua2VkSW4=","IGNhdmVybg==","IHNww6ljaWFs","X3dpcmU=","IG5hbm9w","LmJhbGw=","IHJlZHVjZXJz","IG1haWxlZA==","ZG9uZw==","IG9wcG9zZXM=","IEhhbnNvbg==","IFNhdHVyZGF5cw==","YWNvbW1lbnQ=","X01ldGFEYXRh","IEdhbGFjdGlj","KCIvIik=","IENsZWFuZXI=","X1RFUk0=","IGNsYXJv","Lk9VVA==","5a6h","IHNsaWs=","IGplZG5haw==","SGFuZGxlckNvbnRleHQ=","IGlycmFkaQ==","ICAgICAgICAgICAgICAgICAgICAgICAgIAo=","LnRpZ2h0","QnJlYWRjcnVtYg==","ZnJleQ==","IOqwneyytA==","bGJyYWNl","TEVHQUw=","LWd1bg==","IEJsb2dz","IFNoaXJsZXk=","IFB1bmU=","dXJzaW9ucw==","IHN1YnRyYWN0aW9u","ICoqKgo=","YXJtYWN5","IHNhbXQ=","PSIpLg==","IHBlcm1pc3NpYmxl","KHJk","IFdBVEVS","IHByb2Zlc2lvbmFs","IGhhbmRib29r","IG1vdXJuaW5n","YXJlZmE=","IGFzbg==","aXNleA==","IGNvbnRlbnU=","IFVOQw==","LmdldFByaWNl","IFB1bXBraW4=","LwoKCg==","IGNvc2luZQ==","IG5pZWQ=","IEJyYWtl","RGF0YVVSTA==","IERhdGFHcmlkVmlld0NlbGxTdHlsZQ==","IFJldHVybmVk","ZXdvb2Q=","aXF1w6k=","IGJsZWFr","IHdlYmhvb2s=","LlRoZXk=","YXJi","TEFOR0FETQ==","X29yZGVyZWQ=","IHByYW5r","Lk5ld1JlcXVlc3Q=","IGxpdGVyYWxz","J30+Cg==","c2VyaWFsaXplZA==","a3Rvcg==","KHJ4","IGdldFk=","CVN0cmluZ0J1ZmZlcg==","KHNsaWNl","cmJyYWNl","ZW1lbnRv","IGxhbmM=","RGVwbG95bWVudA==","IGNvbmNlbnRyYXRpbmc=","U2tldGNo","IGJyaWdodGx5","QmVnaW5uaW5n","IERhaA==","VGs=","SW5zZW5zaXRpdmU=","IHNhYmU=","KE1vZHVsZQ==","IGNlZGFy","X2NvbnRpbnVl","IHdpdGhPYmplY3Q=","IGNvbHVtbmE=","IENhbGRlcg==","INC/0L7QvA==","X3NvZnRj","c2hhbGVk","ZXJ0YXRpb24=","CSAgICAgICAgICAgICAgICAgICAgICAgICAgIA==","OkAiIg==","IGZhw6dvbg==","dXN0dW0=","c3Rr","X0NSQw==","b2R6aQ==","IGFzY2VuZA==","Zmdhbmc=","IHByZWZhYg==","IGZpbmRldA==","Oicr","5Y2V5L2N","dW1ibGVkb3Jl","LmludmFsaWRhdGU=","IHRvaQ==","YW5nZXBpY2tlcg==","X0FJ","aGls","U2VhdA==","IHBpc3Rvbg==","Zmli","X2JsdWVwcmludA==","44K4","X1JlY29yZA==","cmV0cw==","RnJhbg==","IENhaXQ=","IHBlbGlj","IGRuYQ==","IHVwZGF0ZVRpbWU=","IC9eWw==","IHJhbGxpZWQ=","IEhpbWFs","U1NJ","X3BsYW5lcw==","IE91dHN0YW5kaW5n","QXBwbGljYXRpb25CdWlsZGVy","c3R1ZA==","X2xvY2F0b3I=","IGFib2xpdGlvbg==","ICgkKQ==","amVybmU=","IEFBQw==","L3dpbmRvd3M=","LUNhbA==","X1NFQ09ORFM=","ICcnfQo=","w6FueQ==","IHl1bW15","5omL5py65Y+3","IFZHQQ==","aWxhdGU=","IFN1cnZlaWxsYW5jZQ==","CUd0aw==","8J+Y","IHNoaW1tZXI=","YWx0ZXJuYXRl","Rm9yU2VndWU=","dWVzdHJh","LWNvdmVy","YXNs","IEluc2V0cw==","bGlqYWg=","OlM=","CWNhdGVnb3J5","IGZq","w61saWE=","IE1BRA==","QGpz","5p8=","IHBvb2xlZA==","IHRyZWF0aWVz","IEJpaw==","IEhhemVs","QWxsb2NhdGU=","IGFpcnBsYW5lcw==","IHNlcm1vbg==","IFBvc2l0aW9ucw==","IE1BSUw=","U3RvcHBpbmc=","YXZvcmVk","KFRlbXA=","IGNoZWF0cw==","LnVzZXJJRA==","IHB1dGE=","LXl5eXk=","VWlUaHJlYWQ=","IG9mc3RyZWFt","XFNlZWRlcg==","IENvdHRhZ2U=","IF4K","IEFMVEVS","IHF1YW50aWZ5","cmVpYnVuZw==","IG5lY2Vzc2l0aWVz","LkxvY2FsRGF0ZQ==","IOaXpQ==","cGljdHVyZXM=","IGNydWQ=","5pyo","IGRvd250dXJu","YWN0b3Jpbmc=","IERlcm0=","IGVzdHJ1Y3Q=","IE11c2lr","IG1seA==","Lm1ham9y","Lkh0dHBTZXNzaW9u","Pzw=","eWVhaA==","IG1vam8=","IFVuaXR5RWRpdG9y","IHJha2U=","X3R3ZWV0","IHJhZGlvQnV0dG9u","IERvbWluaW9u","YXNTdHJpbmc=","b3p5","IHZvZGth","b2dsb2I=","IEFsdW1uaQ==","YmFsYW5jZXM=","X21hbnVhbA==","LmxvYWR0eHQ=","X2ZyaWVuZHM=","IFhtbERvY3VtZW50","W2ZpcnN0","S2V5Q29kZQ==","IHBvZXRpYw==","bWluYQ==","IG9wY2lvbmVz","5omT","X3N1cHBsaWVy","LkZyb21SZXN1bHQ=","X2Rpc3RyaWN0","IEdhbGE=","LnF0","IGNvbnRyYWN0dWFs","YWNvbnM=","LWFuY2hvcg==","IHl1cA==","IHVuYW5zd2VyZWQ=","IG1heGxlbg==","RXJyTXNn","LXNu","IGh5cG5vdA==","X1dN","KCldWw==","IGRlc2VydmluZw==","b3dtZW50","KFJhbmRvbQ==","IHZldG9y","IElTVA==","0LDQvdC0","LWxhbmc=","IHNpaw==","Y3JlYXNpbmc=","IHBvcnRhbHM=","IEJ1bGxkb2dz","cHJvbW8=","IHByb3Zva2Vk","XX07Cg==","IEliaWQ=","ZXJnbGFzcw==","X1dJRkk=","YXBwcm9wcmk=","IHJlZGVzaWduZWQ=","IC8vLS0tLS0tLS0tLS0tLS0tLQ==","emlr","JG8=","dWx0b24=","IFJlbGF0aXZlcw==","IG1ldHJvcw==","IG1lbnRvcmluZw==","YXTEgw==","dXNobWFu","IGluaGVyaXRz","IFJ0","L3ByZWZlcmVuY2Vz","aW1lZA==","Sk9JTg==","KGludGVyZmFjZQ==","IGFkZXB0","IE9mZmVuc2l2ZQ==","IEFHUkU=","b25pYW4=","LnBhcnNlcnM=","IHBhc3NwaHJhc2U=","IHVuc2VyaWFsaXpl","VmlzaXRlZA==","IGdldFByb3BlcnR5","IG5vYw==","ZWRhZA==","ICMtfQoK","dmlkYQ==","c29sdmVy","IE1vcmFsZXM=","IGt2aW5uZQ==","IEFjY2lkZW50","IHZldXQ=","IG1pc2d1aWRlZA==","IFJldmVsYXRpb24=","IHJhcGlkZQ==","cHVuaw==","Iy0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0=","T2JqZWN0SWQ=","YWJpbmV0","ZXh0cmFjb21tZW50","IGJ1bm55","IERlZmVycmVk","dXR0YQ==","dWFl","YnVzdGVycw==","IFNvaWw=","R1NU","LkN1cnJlbnRSb3c=","44GR","IGdyYXR1aXRz","IGNydWlzZXI=","15E=","IFRlbm4=","anNj","IO2VhA==","ZGlzcG9zZWQ=","QUJPVVQ=","fQ0NCg==","ZXhwaXJlZA==","IFhtbE5vZGU=","IFRhdHRvbw==","Vm90ZXM=","Rm9sZA==","RWxpemFiZXRo","X0ZJTEVOTw==","IGNvbmNv","IEdkaw==","b3BpZXM=","fX19","UVVPVEU=","LUlJ","c3BhbQ==","LWxp","IGNhcnRh","LmxheW91dHM=","IGJlc3Bva2U=","IGFtYXRldXJz","IGNvdWxldXI=","aXRhbWlu","IGlycmVzcGVjdGl2ZQ==","IGJsYWNrQ29sb3I=","LnlhaG9v","IHdlYXJ5","IHN3ZWV0cw==","PyI7Cg==","PVwiJQ==","X3dvcmtzcGFjZQ==","IERpYW1ldGVy","IGFtZA==","IE5ldWU=","IGRiTmFtZQ==","SmVyZW15","bG9nZmlsZQ==","YXRyaWI=","IEh0dHBTZXNzaW9u","CUNyZWF0ZQ==","aWRkeQ==","LlBBUkFN","IGZpYW4=","IHN6Y3o=","IHFyZWFs","X0VTQ0FQRQ==","dXNhaGFhbg==","LmRpZ2VzdA==","IGdldFBhcmVudA==","LkRyb3BEb3duTGlzdA==","IHRow6k=","IG1vbnN0cm91cw==","IGJlcmhhc2ls","IiIiDQoNCg==","U3VwcG9ydGVkQ29udGVudA==","IEdhdGhlcmluZw==","aW5jeQ==","LktleUNvZGU=","IGZldHVz","LmNlbnQ=","IGJlc29uZGVycw==","bmlsYWk=","TFRSQg==","IGhpbmdl","UFJPUA==","LmZvdW5kYXRpb24=","bnVtZXI=","LXJhbmtlZA==","6I0=","IHBhaW5mdWxseQ==","ICg7Oyk=","Zm9ybWU=","TGFkeQ==","L2FwcGxl","IENvbnN0aXQ=","IHN0b2NraW5ncw==","5rS7","IG1lbnRvcnM=","PkNyZWF0ZQ==","IEludGVybmFsRW51bWVyYXRvcg==","IHRlbGV2aXNlZA==","VG9rZW5UeXBl","IGJyaWI=","Y3JlYXRlVmlldw==","L0RURA==","R2l0SHVi","KGJpZw==","IG3DoXhpbW8=","5b6u6L2v6ZuF6buR","LmNm","IMKgIMKgIMKgIMKg","PHR5cGVvZg==","IHByb2dyZXNzaW5n","LnNldFdpZHRo","KHR2","IHVuZmFpcmx5","IEFuaXRh","YXJ5YXdhbg==","RGFs","VVJZ","b2dlbmVpdHk=","ZWZh","LyoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioq","IGRlamE=","T1NF","cmFpbA==","cm9vZg==","X3F1b3Rlcw==","PGo=","44Ko","KHNldHRpbmc=","bGV2ZWxuYW1l","X2hhbmRsaW5n","w6lyYQ==","JGo=","IGRhcmxpbmc=","LlBhdGhWYXJpYWJsZQ==","W3NvdXJjZQ==","TWV0aG9kTmFtZQ==","IE91dGxldA==","5pKt","IENvY29h","VWJ1bnR1","IG1vb2ll","IGZsb3JpZGE=","IHJldGhpbms=","IGdldFg=","Z2V0RWxlbWVudA==","IHJhZGl4","IEdhbWVy","ZGVhbGxvYw==","bGVmdEpvaW4=","X1NZTg==","R3JpZExheW91dA==","Imdv","KGVhY2g=","CXNjZW5l","IFB5RXJy","SG93YXJk","LlNpZ25hbA==","IFRFTQ==","IOen","VkVOVE9SWQ==","IHNpbXVs","IDw8LQ==","IHR1cmJpbmVz","IHN1cnRvdXQ=","YWx0bw==","IHVuYXJ5","YA0K","IFNjcmk=","IE1vbms=","IHVuZm9sZGVk","Q29tcG9zaXRpb24=","UFBFUg==","IHNpZGluZw==","Jyx7Jw==","IHRyZWZm","X1VOSUNPREU=","IGRlcmVjaG8=","IHBvbGFyaXR5","IG9yYw==","PERvY3VtZW50","KHRvZGF5","LikKCgoK","IHNlZW1pbmc=","XFY=","PklE","IGZpYm9uYWNjaQ==","KG1hdGVyaWFs","RkxBU0g=","ZGlyZWN0b3JpZXM=","ZXN0ZXJz","VEVDVElPTg==","d3JhcHBlZA==","LXNlbGVjdGlvbg==","LXJlbGF0aXZl","KGNocg==","IHBvcnRmb2xpb3M=","IHNob3dEaWFsb2c=","aW5nbGV0b24=","IFRJQ0s=","IEludmVzdG9y","IGJyYXY=","IFNWTg==","IGhhdGVmdWw=","cmlwcw==","ZXhwaXJ5","X2NvaW4=","PgoKCgoK","IG1hcmdpbmFsaXplZA==","IGV4Y2VlZGluZ2x5","bmF2YmFyU3VwcG9ydGVkQ29udGVudA==","KGV4dGVuc2lvbg==","IGFkdmFudGFnZW91cw==","Lk1pY3Jvc29mdA==","IGVuc3VpdGU=","LXZpb2w=","X2R1ZQ==","S0g=","IFJvbWFudGlj","aW5hbmQ=","ZWNp","cmVwb3J0ZWQ=","IENvcnB1cw==","IHNwYW5raW5n","IENyb3NieQ==","LkZvdW5kYXRpb24=","XF8=","IGFubm9uY2Vz","QXR0YWNobWVudHM=","4Liy4Lij","IFdheA==","77yB77yBCgo=","IHNhaWxlZA==","LkV1bGVy","CXNjcm9sbA==","IHBlYXNhbnRz","IEJ1aWxkZXJz","LkdlbmVyYWw=","QVJFQQ==","IG1lc3Npbmc=","dmVybg==","IGRpYXBlcg==","IG9jY3VwaWVz","CWxvZ2lu","LkxPQw==","aWdhbnM=","77yB4oCd","X2Zvb3Q=","X3RhdQ==","LXBhY2thZ2Vz","cmVjdXI=","QWx0ZXJuYXRpdmU=","77yB44CN","YXJvbw==","IHRydXN0ZWU=","LDpd","5pa55byP","Pz4+","Lk1pbnV0ZQ==","IGFsY2Fu","IENvbmNlcHRz","Y2hpbGROb2Rlcw==","Q291cnQ=","IGNlbGxhcg==","bGVr","YWtpcw==","QnViYmxl","IG9iamVjdGVk","IO+7vw==","Ol06Cg==","LnBhcnNlRmxvYXQ=","IHNwYXJrcw==","LWZpbmQ=","dmFyaWF0aW9u","SGFjaw==","RmFucw==","X3BhcnNlZA==","RW50aXR5VHlwZQ==","YXVjZQ==","X3RyZWVz","IEVnZ3M=","VUlCYXJCdXR0b25JdGVt","X3RheG9ub215","IFNIT1A=","VHdlbnR5","X2NoZWNrcw==","IExY","dXRzY2hlaW4=","KHBsYXRmb3Jt","IGF1dG9wc3k=","UmVxdWlyZW1lbnQ=","IFJFQ1Q=","dG9Db250YWlu","JywnJQ==","L2VkaXRvcg==","IHFi","IEVFRw==","aHRh","X1RJTEU=","LXN1bQ==","IEFsYnVxdWVycXVl","IHNob3J0Y29kZQ==","IHNpbnVz","IGRlc2tz","IHBvb3A=","Lm9wZW5zb3VyY2U=","IENvbGxhcHNl","LmRlcg==","IGhhd2s=","IFZhbmd1YXJk","IE1hcnJpb3R0","X1RhcmdldA==","IEJhbmFuYQ==","X2F0dGVudGlvbg==","IEFyaWVs","X3Rlbg==","IGJha2Vy","4oCUaGU=","xIXFvA==","dmVsb3BtZW50","RWxm","X2djaGFuZGxl","UmVwdWJsaWNhbnM=","IGl0ZW1CdWlsZGVy","V29u","X2FjY3Vt","IG5ld1Bhc3N3b3Jk","IGRldm9pZA==","IE1hcmt1cw==","ZGFlbW9u","Lkh0dHBDb250ZXh0","S3Jpc3Q=","IGFhbGJvcmc=","X3RyaWFscw==","KGFzc2VydA==","44Gj44Gm","YmVsdA==","IG1pbGRseQ==","ZXJ2b2ly","IGRlc2NlbmRhbnQ=","IEdpb3Zhbm5p","IGRlY2x0eXBl","LVNoaXJ0","IGFwcm8=","QXBwbGllZA==","LmdldFBhcmFt","aG9m","dXJhcg==","IE9CUw==","X3Nlcg==","KHNlY3JldA==","W2xheWVy","IHVzZWZ1bG5lc3M=","IEtvdQ==","X3N1Ym1pc3Npb24=","X0hPUklaT05UQUw=","LHRtcA==","Ly4K","IGxlc3Nlbg==","X3dj","X0ZJTkFM","0L3QvtC/","LnRvZG9z","LlhQYXRo","IElEYXRh","IGRvb3JzdGVw","IGNvbXBvc2luZw==","IGh1dA==","IFZMQU4=","IG91dGY=","6K+l","KGJldGE=","KioqLwoK","IEluZG8=","IGtsYQ==","X2NvbmZpZ3VyZQ==","Lk1hcms=","b3NlY29uZHM=","KFZlcnRleA==","b3JnYW5pc21z","IGZmbQ==","IGRlbW9saXNoZWQ=","ICItLS0=","bGVzaQ==","IFNpZG5leQ==","LmdldEluZGV4","Lk1vbmFk","U2VsZWN0ZWRJdGVt","IE5hdlBhcmFtcw==","YXpvbGU=","QUJDREVGR0hJSktMTU5PUFFSU1RVVldYWVo=","X3NlbnRlbmNlcw==","IGluY2xpbmF0aW9u","IEZhdGhlcnM=","YWNjb3VudElk","aGFyaQ==","KT4K","L3Jhdw==","ICcnKTsKCg==","K2w=","KGNk","IHVuemlw","IGdsYW1vcm91cw==","IyIs","IG5hdw==","IG1pbmli","IEJyYW4=","TmFjaA==","X3R3ZWV0cw==","IENDUA==","JSI+PA==","IFN0ZXBoZW5z","bWFzxLE=","J2Vz","IHJlcGFy","X2RvY3VtZW50cw==","LmNsb3NlZA==","LXJpbmc=","L2NhdGVnb3JpZXM=","IERlZXBDb3B5","U1VQ","Lm5ld2F4aXM=","IGdkeQ==","aG9l","IFJlZWY=","IHBvbGl0aWM=","IFJlcXVpcmVtZW50","IHNoZWRz","c2VhbGVk","IHBhdGhvbG9neQ==","Ii8+PA==","bW9kbw==","IHN0ZW1taW5n","IHRhYm9v","IFNhdmlvcg==","IH0NCg0KDQoNCg==","LmN2","IGpvdWV1cg==","IENvcm53YWxs","IFJlY2VwdGlvbg==","IGlsbHVtaW5hdGlvbg==","IGdkYg==","VkVD","b2R1","Q29udGVudEFsaWdubWVudA==","c3RhbnRpYWw=","YmFzZWxpbmU=","X2J1c3k=","LwoKCgo=","IHBsYXllcklk","5qM=","X3BldA==","IE1pcmFjbGU=","dXJlbnQ=","IE1lcmxpbg==","dWJlbg==","IHNldENvbG9y","IGRhcmtlc3Q=","c3Rlcnk=","IGNhcmlj","IHJldGFyZA==","IEhvdXNlaG9sZA==","IGphbA==","IHlw","IiwiIik7Cg==","IEFjZXI=","W1c=","b2xraWVu","YXlv","UHJpdmF0ZUtleQ==","IFNUQVRT","INC90YPQtg==","OicuJA==","IHRoYW5rZnVsbHk=","IGRpc3RydXN0","Z2V0RGVmYXVsdA==","L2ZhY2Vib29r","IENvbnJhZA==","IHV0aWxpemFuZG8=","IEthZw==","L25hbWU=","IGJhbWI=","LkZyb21TZWNvbmRz","IG11dGls","IExhZ29z","IEJsZXNzZWQ=","aWxsZWdhbA==","aWVp","X1RQ","IG1hdGxhYg==","IGN5Y2xpYw==","IHdpdGhoZWxk","IGhvcnJpYmx5","LWhvdXJz","LUhlYWRlcnM=","IG92ZXJsYXBz","IGN1YXRybw==","IGVxdWl0YWJsZQ==","IGNvbG9ybWFw","IHNoaW4=","IFN1aXRlcw==","X2x1YQ==","KHZv","X1JFU1VMVFM=","IFZpa3Rvcg==","RG93bmxvYWRpbmc=","bm9jaA==","TW9vbg==","IGRlY2lkZWRseQ==","44GU44GW","X1JQQw==","SW50ZXJwb2xhdG9y","IHZhbnM=","e1Q=","X3NwYXdu","IEV4eG9u","X0NhbGw=","IENsYXNzcm9vbQ==","IHNlcm90b25pbg==","IERpcGxvbWE=","YmVkdGxz","IFByb3RvdHlwZQ==","LmV4ZWN1dGlvbg==","IGRhdGluZ3NpZGU=","IEdva3U=","X3Jvb21z","4oCZYW0=","Z3JhZg==","YWNlb3Vz","IGFjY29tbW9kYXRpbmc=","fSwn","LmRpbWVuc2lvbg==","ZXJyb3JNc2c=","CW1lc2g=","RmlsbGVk","LnByZWZlcmVuY2U=","IHNtYXJ0eQ==","X2NvdXBvbg==","IMO2dmVy","IGNvbmNlaXZl","b2Rvbg==","ZGljZQ==","VG9EYXRl","YWRhbWVudGU=","LW1hc2s=","IGVzY2FsYXRpbmc=","4oCmKQoK","SW5SYW5nZQ==","X0Vt","IHV0aWxpemE=","IGxldnk=","PCFb","IEplbm5lcg==","IFJFU09VUkNF","X1NUQVJURUQ=","IHZvbGxleWJhbGw=","IG1nYQ==","IFJvc3Np","Q2hhbmNl","IEVuZGVk","LnVudGls","IGtub2Nrb3V0","X2V4ZQ==","IFByZXNjcmlwdGlvbg==","IENPVU5UWQ==","Lmhy","aWVyc2hpcA==","RVJWRQ==","6ak=","44Gn44Gv","IHBlcsOt","IGltZ1VybA==","ZWN4","IFd5bg==","CVJldHVybnM=","X2V5ZQ==","IEFnaW5n","cXVldWVz","IOWIneWni+WMlg==","LlNlcmlhbGl6ZWROYW1l","LmhvdXJz","IGlzZQ==","LkFjdG9y","5p2h5Lu2","YXBwbA==","VGFu","L2NhdGFsb2c=","L1Jlc291cmNlcw==","ZWxhbg==","KCd7ew==","IGluc24=","IG5vZGVOYW1l","IGNvb2tib29r","JywnPScsJw==","Uk9NRQ==","LnRlbXBsYXRlcw==","ZWN1cmU=","LWtleXM=","IGdsVW5pZm9ybQ==","IGdlw6c=","IFJlY292ZXI=","SURY","IEtyaXN0ZW4=","IHBvbnRvcw==","YD0nJA==","YXJnZW50","IGFycmFuZ2luZw==","6KiY5LqL","IGVybGU=","ZW5lZG9y","KCkpKTs=","w6Zra2U=","IEdpbGxlcw==","In0+Cg==","Lm1vdmllcw==","LXNlbGVjdG9y","LmxlYXJu","IHBvdGVuY3k=","IGZpbm8=","CWJn","IGxlaGV0","IGzDtg==","IGVybQ==","IGFzYmVzdG9z","IGRlc3Rl","IGJsb2NrYWRl","IFJPVU5E","IGxuYW1l","IFNlcGFyYXRl","w6RuZ2U=","IGZ1eno=","CVVO","X25vbWU=","X2xpbmtlZA==","IFNoYXJlUG9pbnQ=","aGF1c2Vu","IGxvYWY=","LWVjb25vbWlj","IGRpZEZpbmlzaA==","eWVu","IGJsYXN0aW5n","IFdlaXJk","SUNMRVM=","IEdGWA==","IHN1ZmZpY2U=","ZWJpbg==","IGFwcHJvdmluZw==","IFJleWVz","IFJUQUw=","aWdsaQ==","X3Rvaw==","b3Jkb3Zh","Q2FybA==","IFBsYXlz","bG9zc2Vu","cGFpcmVk","QUdNQQ==","d2nEhXo=","bGlua2VkaW4=","IGVnYWw=","KHByZWRpY2F0ZQ==","IFJFU1BPTlNF","IG1pblg=","IGNoYW5jZWxsb3I=","IFJFQ0VJVkVS","IGFzY2VydGFpbg==","IHplcg==","IFdvcmtzaGVldHM=","Tks=","IHZvd2Vs","dmFudA==","VVBT","4oCcLg==","IEhheWRlbg==","IFNwYXJ0YW4=","cmlnaHRz","LmdldElu","IGlubGFuZA==","IE5pbGU=","IFRyYW5zbGF0b3I=","IHJlY3RhbmdsZXM=","QnV0dG9uVHlwZQ==","IFNvbGlj","IHJhZ2F6emE=","L3RhZw==","IGlycmVzaXN0","I0VuZA==","KioqKioqKg0K","IHJlc3RyYWluZWQ=","IGNoaXJvcHI=","L1No","LWZsaWdodA==","Y29udmVydGVk","IHNraXJ0cw==","KGNoYXJz","JHZpZXc=","IGlucHV0RmlsZQ==","Z21haWw=","X0RJQUc=","IG51bWVs","IEdpbmE=","ZWxsdW5nZW4=","IHRheGE=","IGRyaXBwaW5n","PSIiLz4K","IGJvcmRlcmVk","IHRvdWdobmVzcw==","bGVuZXNz","IEJpZWJlcg==","X1dBS0U=","KGV0","IHNhbnTDqQ==","IFRFWA==","X0RJU0NPTk5FQ1Q=","IHBpZW4=","IEZvbnRTdHlsZQ==","X1VM","LXRvdGFs","d29sZg==","IE1hcml0aW1l","IE9QVElPTkFM","LXJlc3Q=","IG1lbWJ1YXQ=","IEJTT04=","X3NpbWlsYXJpdHk=","Lm92ZXJsYXk=","IHBhbGF0ZQ==","IEJyaWRnZXM=","QW5kUGFzc3dvcmQ=","IENoYXZleg==","aGV0dG8=","Lm9mZnNldEhlaWdodA==","IHVuZGVzaXJhYmxl","IGFwbGlr","IC8+XA==","LHRv","IHJlbW92ZXI=","IE1vZGVsaW5n","IHB1cmNoYXNlcg==","IENob29zaW5n","b3BsZWZ0","IG11dGFibGVMaXN0T2Y=","IFNpc3RlbWE=","IElQTA==","aWNrZXJWaWV3","SGFzQ29sdW1uVHlwZQ==","IHNvYmll","dWJlcm4=","IGFsdW5v","IGltYWdpbmF0aXZl","IEludGVyZXN0ZWQ=","KCl9PC8=","IGRpdmVyc2lvbg==","X3Rvb2x0aXA=","LlNhbXBsZQ==","IEZ1dHVyZXM=","Y29udGVuaWRv","IEVJTlZBTA==","KGVuY29kZWQ=","IFNoYXVu","CXBheWxvYWQ=","ZGVr","PllvdXI=","SXNv","VHJhdmVyc2Fs","aWNpZQ==","LmNyb3A=","IEpC","SU5HRVI=","IGV4ZW1wbGFyeQ==","X3JlbHU=","YW5uaXM=","0LXQt9GD0LvRjNGC0LDRgg==","Y2x1YnM=","4oaR","IHNjcmFtYmxl","IFVuYmxvY2s=","IGRvcnM=","IHNoYWNr","IG1pbmltaXppbmc=","IFBhc3Npbmc=","YWRkRWxlbWVudA==","4bud","IHJvb2Zz","IGpjbGFzcw==","Y29yZG92YQ==","UG9zWQ==","KENhbnZhcw==","KGZpbg==","LWxvc3M=","LmJ0bkNsb3Nl","ZG9jdW1lbnRhdGlvbg==","IFJK","YW1vbmc=","TW9z","bGluZ2Vu","IEFndQ==","b2x5bm9taWFs","XTw9","IGRpZmZpY2lsZQ==","IFdpbm5lcnM=","5bGV","U3RyYQ==","IGNvbmdyZWc=","IEVuYWJsZXM=","IFN5bXB0b21z","X3Nn","IFJpZGluZw==","X2hlYWRz","IENvc21ldGlj","w650","LlNpbmdsZXRvbg==","IE5pY2FyYWd1YQ==","IAoKCgoK","IG3DrQ==","J30sDQo=","IEJvc25pYQ==","Plg=","Ly8qWw==","IHBpbGVk","Y2FzdGluZw==","IGdyw6JjZQ==","IEhlbHNpbmtp","R3Jv","I2Fm","7Iud","IHNvdWhh","IEluZGll","X25lYXI=","IGltbW9iaWw=","LkV4Y2Vs","IHJhZGlhbnQ=","X01C","IEtldG8=","dmVudGFyaW8=","X2FnZW50cw==","VGFibGVWaWV3Q2VsbA==","IFRoZW9kb3Jl","PT09PT09PT0K","LGxpc3Q=","KHNp","aWNpcGF0aW9u","QVJUSA==","c2V0RGlzcGxheQ==","LkZ1dHVyZQ==","IFNUQU5EQVJE","IE9JRA==","IGZyb3duZWQ=","IE1hcmlseW4=","b2xhcmU=","UHU=","IHPDqWN1cml0w6k=","UmVkdXg=","U0NP","CQkJCQkgICAgICA=","cml2","cGVydA==","IHNvZnRtYXg=","IHNlbmF0ZQ==","PWVtYWls","IGVzdGltYXRpbmc=","CXRk","RnVjaw==","IFdhdGVybG9v","IG1leGljbw==","TmV3dG9u","U2Fi","LOKApgoK","IGNlbGVzdGlhbA==","IFFOYW1l","IGdldEFwcA==","Tmll","X3BjaQ==","IFFQb2ludEY=","X2xpc3Rh","Lk5WYXJDaGFy","IENvYw==","S2Fy","IGJ1c3RlZA==","aXphdGlvbmFs","b3VyZA==","X2Nvbm5lY3Rvcg==","IFNla3M=","0L3Rg9GO","0II=","L0xpc3Q=","L2lj","XEZyYW1ld29ya0J1bmRsZQ==","dXh0","IGhlYWRwaG9uZQ==","RVhURVJO","LXJlc2V0","IEdlaWxl","IHRyaWFuZw==","IEFOTg==","IHTDrQ==","IFNQQQ==","IE1hY2Vkb25pYQ==","IGNyaWFy","IGNsaW1icw==","IFNPTg==","IENyaXRpY3M=","IGTDsw==","X1NQTElU","IEJvdW5kYXJ5","X0luc2VydA==","Q29sZA==","LmNyZWF0ZUNlbGw=","X3NhaWRh","LkJMVUU=","QmlnRGVjaW1hbA==","KEJ5dGVz","CVN0YXRl","LS0tQA==","Vmlld1NldA==","YWthaA==","X1JlcG9ydA==","LWNyb3Nz","LmdldEN1cnJlbnRVc2Vy","dWx0dXI=","KEZs","IEltYWc=","Q1Rlc3Q=","7IOd","IHN0YWc=","IG96b25l","IGvDqQ==","cmVwYWly","KSIpOw0K","IHZvd3M=","LkFsdGVy","IEFsZ2VicmE=","IEFoZWFk","Z2V0dA==","LklubmVyVGV4dA==","IFpoZW5n","LnJlYWxwYXRo","IGRpc3RyYWN0aW9ucw==","LGV2ZW50","IElOQ0xVREVE","Lk1hdGNoZXI=","LnNwb3RpZnk=","IGNvbnNpZA==","Lk1hcHBpbmc=","IEZvYW0=","IE5BTkQ=","IGRldmFudA==","XSIpXQo=","TGF1cmE=","IHNhY2tlZA==","X3hvcg==","IHJlYWxtcw==","IFJvYm90aWNz","LlNlZWs=","LiQk","IFJpYmJvbg==","CUhSRVNVTFQ=","IENyZXNjZW50","RUZS","IE1lZGl0YXRpb24=","LmdldFo=","INC60L7QvNC/","anNvbndlYnRva2Vu","Oj8=","ZmFm","VklPVVM=","YWxsYWg=","IHBpcGluZw==","IG1vZGVybmU=","cG9zdGFsY29kZQ==","IGxldmVyYWdpbmc=","IENISVA=","cGNt","bWFp","IGlQ","QUtFUg==","ZGF0YUdyaWRWaWV3","X2RlcHM=","LWRyaXZlcg==","TGll","ZGlzY2FyZA==","eW50YXhFeGNlcHRpb24=","IGVjdA==","IEV4aGliaXQ=","ICgqKg==","IOuU","Q2hhbmdlRXZlbnQ=","IHN1cGVybWFya2V0cw==","IHNobQ==","cHJvZml0cw==","cGlsbGFy","cmFpc29u","V2F0","IHBoYXJtYWNpZXM=","IG5ydw==","Ly89PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT0=","CXdvcmxk","U3RyZWFtaW5n","RGlhbW9uZA==","IEVudW1lcmF0b3I=","IGVucXVpcnk=","LmxhbWJkYQ==","YmVr","Uk9UTw==","IFBkZlA=","IGhpc3Rv","IGdldENoaWxk","L3N0cmV0Y2hy","IEFNQVo=","IEFyZ3VtZW50T3V0T2ZSYW5nZUV4Y2VwdGlvbg==","InVzZXI=","IHNhbml0YXRpb24=","IENsb3RoZXM=","Lm51bXB5","ZmVj","ICMjIyMjIyMjIyMjIw==","0LXQudGB0YLQsg==","X2xw","IGF6dXJl","WFBhdGg=","VmVudA==","TGFib3I=","IG1pc3Rha2VubHk=","IGNvbmR1aXQ=","IEZhaXJmYXg=","Z2V0U3RhdHVzQ29kZQ==","IE1veQ==","TGlzdEFkYXB0ZXI=","ICg/KQ==","R2VuZXJhbGx5","LmlzQ29ubmVjdGVk","dmlkbw==","TW91c2VCdXR0b24=","R2VuZXJhdGlvblN0cmF0ZWd5","X2Rlcml2","IGxla2tlcg==","TWVhc3VyZW1lbnQ=","X0NPT0tJRQ==","ICoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioq","IGNvbXBldGl0aXZlbmVzcw==","IGdhbWxl","IHJldHJvc3BlY3Q=","IEVkdWFyZG8=","IERhdGFTZXJ2aWNl","IGVzY29ydGVk","IFF0eQ==","SG9saWRheQ==","CXJhdw==","bGV1cnM=","QmlydGhkYXk=","IGhlYXRz","LmludmVyc2U=","IF8NCg==","aWxsdW0=","b2thYmxlQ2FsbA==","X21s","TGlrZWQ=","ZW51bWVyYXRl","RmluaXRl","LXByb3A=","QXJlYVZpZXc=","IG1lZGlhdGlvbg==","IGNoYW50aW5n","X05U","X3VuYw==","c21vdXRo","IHBpZ21lbnQ=","UGFzc3dvcmRFbmNvZGVy","IHbDqXI=","IHdhc3Rld2F0ZXI=","LVBhY2s=","IGpvdmVu","YWVz","S1k=","UGludGVyZXN0","IG11c2ljYQ==","bGFjZXM=","IFdpY2g=","KHJvdA==","KGly","IOyCreygnA==","44Gd44KM","X1RIRQ==","Z2V0RmlsZQ==","W3Byb3BlcnR5","IGVuZGluZ3M=","aXp6YXJl","PXRyYWlu","LWxvdmluZw==","IG5vdXZl","IGNvbW1hcw==","IGNhbWJp","IFp1c2FtbWVu","CUV4dA==","KG9ic2VydmVy","Zm9ybWlr","IHF1aW5kaQ==","IEl2b3J5","IEJvbGl2aWE=","YXNhZA==","X2xlZ2VuZA==","Q2l0aWVz","X0ZJUkU=","YXNkZg==","LkRlcHRo","VmFsdWVHZW5lcmF0aW9uU3RyYXRlZ3k=","dXBk","LkdldFJlc3BvbnNl","IHVyZ2VudGx5","SW52YXJpYW50","R2V0WA==","IHN0YXR1cmU=","IGltYWdpbmluZw==","YXRlYXU=","TU9WRUQ=","KFRyYW5zYWN0aW9u","X3Bvcg==","UmVmUHRy","Lmdsb2JhbERhdGE=","Z3JhdmU=","aW1lc3RlcHM=","Zm91bmRsYW5k","U2FsaXI=","YXJ0aXN0cw==","IGNyZWF0ZUFjdGlvbg==","IFNhbnRv","INC90LXRgg==","CQkJICAgICAgICAgICAgICAg","LXNvbmc=","IG51aXNhbmNl","IGltcG92ZXI=","XykNCg==","IGNyb3dkZnVuZGluZw==","IHRpbXA=","UGljdHVyZXM=","IGxvZGdpbmc=","6ZKu","YXRhc2V0cw==","44Ot44Kw","cGVyc29ucw==","Y29uZHVjdA==","IGV2YWRl","IGhhdW50aW5n","ICEhfQ==","IExBUkdF","IGtpdHRlbg==","IHVwaGlsbA==","KG1pbnV0ZXM=","IEVtYW51ZWw=","J0M=","IFNreXdhbGtlcg==","cHVycG9zZQ==","X21hcHBlcg==","IGFkYXB0YXRpb25z","LmZpbGxUZXh0","cnVr","IHJlcGVydG9pcmU=","KHByaW9yaXR5","KG1hcHBlZA==","Um9iaW4=","IGVycm9uZW91cw==","IGluaGFs","Qk9WRQ==","KCIsIikK","dWVsbGVtZW50","IGZpbmdlcnByaW50cw==","IFBZVEhPTg==","LWRlbQ==","bGVhbm9y","esSFZA==","IlBlb3BsZQ==","YXNpZXI=","IHBhdHJpb3RpYw==","LmZyZWV6ZQ==","SUo=","IEJhbmNv","IGlzU3VjY2Vzcw==","KHZlaGljbGU=","KExheW91dA==","IGNhcnZpbmc=","X2NpcGhlcg==","IHZlemVz","KCdfJyw=","IEZpcnN0bHk=","IGZ1bGxlc3Q=","IExpc3RlbmluZw==","X3NpZ25hbHM=","ZXdvbGY=","IFNDUg==","IE1lcnJ5","L3Rlc3RpZnk=","X1NBTklUSVpF","aW9jdGw=","SUVFRQ==","PU1hdGg=","IGVucXU=","CWF1eA==","4pml","IGRpc3BlcnNlZA==","aGFyZQ==","YmVybg==","IEFtZW5k","IGluc2lkZXJz","IEFsdmFyZXo=","IFp1Zw==","L2NhbGVuZGFy","IGhldXJl","LXBhcGVy","IHNvZm9ydA==","IHNtaXRo","IHBvYg==","KHJhdGU=","IHNvY2nDqXTDqQ==","IHdvZXM=","IGJydXNoaW5n","cWQ=","b2xvZ3Vl","c29ja2V0cw==","X1lFUw==","LmFkZENvbHVtbg==","IGV2YXNpb24=","U09GVFdBUkU=","YWJveA==","LnlsaW0=","IGVuZ3VsZg==","Ly8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLwo=","IG5nT25EZXN0cm95","IG5vc3Nh","LmxzdA==","KCl9Pgo=","Lmt3YXJncw==","IGNvbnRleHRv","IFBVQg==","RnU=","IGJpZ290cnk=","IGJyaWQ=","IHN0ZXJvaWQ=","IHZpZ29yb3VzbHk=","IGJ1cnN0aW5n","IHZlbmU=","IHNhbGFkcw==","IFZBUklBQkxFUw==","IE9uYw==","IGZpcmVFdmVudA==","c2FuZGJveA==","IHRvdWNoc2NyZWVu","c2Fucw==","L0luc3RydWN0aW9u","IGVvZg==","bGVjdHVyZQ==","Py0=","LmxvY2FsaXphdGlvbg==","VkVT","X3ZvaWNl","aXR1cmE=","LnJlcG9ydGluZw==","IF0pOw==","Tm92YQ==","X0NPTVBBVA==","IG91dGJyZWFrcw==","LmNsaWVudFdpZHRo","aWZsb3dlcg==","X0dSQQ==","SW5pdGlhbGl6aW5n","X3BlcmY=","KCl9LA==","PVA=","X0lNRVRIT0Q=","IHRpZ2h0ZW5pbmc=","IHRhYkJhcg==","IEJL","CURvdWJsZQ==","L2hhc2g=","IG1leg==","VG9VcHBlcg==","VEc=","KGluZGVudA==","IHNpbGljYQ==","IC8vLy8vLw==","w7Zr","IGVsdmVz","ZW1wbGF0ZXM=","LkNvbXBhcmVUbw==","IGd1bmZpcmU=","YW5pbWFscw==","IGtlcGFkYQ==","IENQUg==","X0xTQg==","CXZlcnRleA==","INC/0LXRgNCy","LCE=","IGR1bHk=","X1BBVENI","RU5B","CUND","Y29tcG9zaXRpb24=","X3N2","TGJs","amVq","0YHRgtGA0L7QuQ==","LkVkaXRWYWx1ZQ==","5YW3","YW50YXM=","IGJyZWFkY3J1bWI=","IFRlc3Rlcg==","IE1lYXN1cmVtZW50cw==","L0lucHV0","IFJheg==","X1BPTEw=","SW5kZXBlbmRlbnQ=","Lmx1Y2VuZQ==","IE1lY2hhbmljcw==","Y29sb24=","LnN1cmZhY2U=","IHVuYXM=","cmFkbw==","UExJQ0FURQ==","Q1JU","LnNldERlZmF1bHQ=","JUg=","IHJlc3BvbnNhYmxl","IHBlcnBlbmRpY3VsYXI=","IFJlc3Bpcg==","IFR1bmlzaWE=","XEFycmF5","6Lev5b6E","IHBhdw==","IGRlYm91bmNl","KE1QSQ==","INiv2LE=","IGVsaw==","IFJlbGF5Q29tbWFuZA==","L2xpZ2h0","LnNlcmlhbGl6YXRpb24=","QlNJVEU=","KSgoKCg=","IEJpb3M=","X3N2Zw==","KHN1cmZhY2U=","RHVwbGljYXRlcw==","ICg+","X0FTVA==","Lm5pY2s=","IldoeQ==","IEludGVsbGVjdHVhbA==","YWJicmV2aWF0aW9u","ZWFyYWJsZQ==","IGNvbnNlZ3Vpcg==","KEJl","X1BvZHM=","PEFuaW1hdG9y","X1VOREVGSU5FRA==","QVJSWQ==","IC8vfg==","cGVyYXRvcg==","LndyaXRlRmlsZVN5bmM=","QWxz","bGRlcg==","IG1pZWpz","IGZ1bmNz","aW5jaWJsZQ==","IGR1c3R5","IERyaWxs","IGNvbnRpbnVhbA==","IEVsZWN0cm9u","LmVuZW15","KHBi","IHJldW5pdGVk","U21va2U=","LWZhY2Vk","SW50ZW5zaXR5","IFRyZWVNYXA=","IEFyZ3VtZW50RXJyb3I=","LndyaXRlSGVhZA==","IFRSRQ==","U3BsaXRPcHRpb25z","LyoqKioqKi8K","IFw8Xg==","IEludmVzdG1lbnRz","U1VNRVI=","IGRhYw==","QU5J","Llllc05v","KG9mU2l6ZQ==","eXRo","ZWxvYWQ=","IGltcHJlcw==","IGJsb2Jz","LnJldHJpZXZl","IHR5cmFubnk=","IGNhbmNlbEJ1dHRvblRpdGxl","IGhhY2k=","IENhc2lub3M=","IGRoZQ==","UmV0YWls","IFBvcm5odWI=","IENyaW1lcw==","T2ls","KElTZXJ2aWNl","UmVzaXphYmxl","CVNv","T2Z0ZW4=","IGNvbW1vbnBsYWNl","X0dD","YWxkaQ==","YXRobG9u","KFZpZXdHcm91cA==","KEVtcGxveWVl","IHNhZmVndWFyZHM=","6YCA5Ye6","X0FVUkE=","IHVubm90aWNlZA==","IFRob3Ju","bW9kZWxl","IGFjb3Jkbw==","IFdlbmdlcg==","aW11cw==","ZW5zYnVyZw==","b21iYQ==","Y2nDs24=","Imh0dHA=","X01hdHJpeA==","fHx8fA==","b3JuZWNlZG9y","CUJ1ZmZlcmVkUmVhZGVy","cmVnaXN0ZXJz","cmVsZWFzZWQ=","IGFkZE9ic2VydmVy","IFZhbGVudA==","KEN1bHR1cmVJbmZv","IG1hbm5lbg==","IGJ1cmdsYXJ5","X21pbnV0ZQ==","IGludGVyY2VwdG9y","b2NyYXRlcw==","YXR0cm8=","IFlF","ZXNzbGVy","bGlzdGVuZXJz","L3Byb20=","IOek","dG91Y2hlcw==","RXNw","IEFib3J0","IGZmaQ==","IGNsdW1z","TklM","X1ZJUlRVQUw=","IGxvaW4=","eW5vbWlhbHM=","INec","IGd6","IE5lb24=","SVNJUw==","YW1lcmF0ZQ==","X2F2YWls","IG1heGk=","IGlzQXJyYXk=","Q29sdW1uSW5mbw==","aXppbg==","IHBlcnNv","IG91ZA==","aWFsaXplZA==","eW1p","IGNvbmZpZGVudGx5","PSIvIj4K","LmRhdGFzb3VyY2U=","IHBheWNoZWNr","IEJhdg==","L0JyYW5jaA==","IFRlYXI=","IG1lcnVwYWthbg==","IEJyYWg=","INC60L7QvdGC","74I=","LHBhdGg=","IGRhenpsaW5n","IFVDSEFS","IHByb3Zpc2lvbmFs","0L/Qvw==","IGxlZ2FsaXplZA==","X2FsZ28=","X1JTQQ==","YWx0ZXJuYXRpdmU=","IERFVEFJTFM=","VG9Ebw==","cmVmbGVjdGlvbg==","X1dFRUs=","IENMRUFO","IHNsb2dhbnM=","IOuTsQ==","IFZldGVyaW5hcnk=","aWRm","LmRhdGVUaW1lUGlja2Vy","aWNvbnRyb2w=","KHBsYXk=","IHVsbGFt","ICcpDQo=","IGNoZXF1ZQ==","5a6L5L2T","IHVuc2VyZW0=","IEFyY2hpdGVjdHM=","YW1lbnRhbHM=","IHZtYXg=","IGplbWFuZA==","Q0VFRA==","IE9saXZpZXI=","c2V2ZXJpdHk=","Uks=","RGlzY29ubmVjdGVk","IHdlYXBvbnJ5","dWnDp8Ojbw==","IGJpbmdv","ZG9udA==","X0NIQU5ORUxT","IERhZw==","IGTDpHI=","w6lyaXF1ZQ==","Z3JhZGFibGU=","IENPTVBMRVRF","IHNwYW5pc2g=","IGluc3RydW1lbnRhdGlvbg==","dmFzaXZl","RFJBVw==","IGZwdXRz","IFNwZW5k","IFJlc3BlY3Q=","Q291cnRlc3k=","IHNjaG8=","IHBvc3RhZ2U=","IE1lYWRvd3M=","IHR1dG9yaW5n","ZXJ2bw==","QWJzb2x1dGVseQ==","w6FuZGV6","vZTrk5w=","IFNIUg==","cGhvb24=","IERlcG9z","PScnCg==","IHBoeXNpb2xvZ3k=","KnRpbWU=","IFRvdWdo","ZG9jaw==","L2hl","KEhhdmU=","IE1vaW5lcw==","U1RZUEU=","IEJyaWRl","IHN0cm9u","IHdvcmxkdmlldw==","IGdyYXR1aXRv","IGFlcm9zcGFjZQ==","IElocmVt","IHFj","IG1hbmlmZXN0YXRpb25z","c2xhdWdodA==","PEFjY291bnQ=","IEluZm9z","YW1iaWw=","X0ZpbmFs","IGFkbWluaXN0cmF0aW9ucw==","IGNvbGxhYm9yYXRlZA==","LmpkZXNrdG9w","b2x1Y2nDs24=","YXNjdGltZQ==","X2FsbG9jYXRl","YXJyaXZhbA==","Sk9S","IHNoYWR5","IHBpbmVhcHBsZQ==","44KP","IHNhdGlu","YnJlcm8=","IExpZXM=","IHRlbnNvcnM=","IEludGVsbGlnZW50","LlNlbGVjdGVkSW5kZXhDaGFuZ2Vk","IHJhZGlhdG9y","YXNzaXN0YW50","JGZpZWxkcw==","CXN0ZXA=","IE1pdGdsaQ==","IEV2ZXJldHQ=","IFNjaGVkdWxlZA==","SG9yYQ==","Il0tPg==","IG1vdHM=","IERTVA==","Zm9udE5hbWU=","IFdhcndpY2s=","X1Rhc2s=","KkM=","44On","b2JlbA==","X0RFVA==","IHNvY2lvbG9neQ==","IEthdHo=","aWNpb25z","b3RsYW5k","YWRvbw==","X3BhcnM=","IHJpcHBpbmc=","aWNobw==","IG51dHJpdGlvdXM=","CWRhbWFnZQ==","S3k=","IGFuY2hvcmVk","IGFydGlmaWNpYWxseQ==","IEp1dmVudHVz","L3Blcmw=","IGV4cHJlc3NpdmU=","eEVF","IEVudW1lcmF0aW9u","Lk1FU1NBR0U=","KGRlZw==","5b+X","IyMjIyMj","ICIiKSw=","a2zDpHI=","XE1haWw=","RGVzaWduZWQ=","IHN0YWZmZXI=","IHNhbHRz","KioqKioNCg==","IOKB","IHNldFRpdGxlQ29sb3I=","RFZE","LldyaXRlQWxs","ZWxsYW50","IGNvZXJjaW9u","IFNvcnRpbmc=","6KiA","IHN0YXJ2YXRpb24=","Ly97ew==","LmhlYXA=","IE1lZGlldmFs","ICotLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0t","77yR77yQ","IHdhcmRz","IEhlcmM=","IEhvZ3dhcnRz","LWNvbW1lbnRz","IExhdWRlcmRhbGU=","5rw=","IHJpZnQ=","IHplaXQ=","IHByb29mcw==","LnZpZXdwb3J0","JHN0YXJ0","IEJvdWdodA==","LnJpY2hUZXh0Qm94","IGNsaW5n","ICcqKg==","T3duZXJzaGlw","IEJvZWhuZXI=","KGR5bmFtaWM=","IG1lZGljYWxseQ==","IFdURg==","IE1haW5NZW51","6LSt","IGRpZmVyZW50ZQ==","L3Jlc3VsdHM=","ZW50aGFs","IFdpZGdldHM=","cnVzaA==","IFJNUw==","IFZvbGxleQ==","IHJlbW92ZUZyb21TdXBlcnZpZXc=","IExhZmF5ZXR0ZQ==","IEZldGNoVHlwZQ==","YWNhcw==","IHBhdGhvZ2Vucw==","IE1NTw==","LkN1cnJlbmN5","b2Npb3Vz","IHNwcml0ZUJhdGNo","ZG9sbA==","IHZhbXBpcmVz","bGF1bmNoZXI=","IHBlYWtlZA==","IGRlYnVuaw==","IEFTRA==","IHVuZXF1YWw=","IHNxdWFkcw==","fS4kew==","bWFuaQ==","IkU=","IEZhaHI=","IElTSQ==","IHVuYXZvaWQ=","b3Bob25l","WzpdCg==","IERpcmVjdGVk","IGJ1c2hlcw==","LmZhaWx1cmU=","IGltbWVyc2Vk","ZXhv","SGlzdG9ncmFt","IEthbm4=","IHBpcmFjeQ==","IENydW5jaA==","IGzDpg==","Ly8i","IG1vbm90","IFNhdW5kZXJz","IFNldmVudA==","KEFic3RyYWN0","IHNtb2tlcg==","cm9uZQ==","LmNsaWVudFk=","ICItIiw=","IEZvdW50YWlu","IGlubmU=","7IOJ","Q3Ry","JGlucHV0","UFJPRklMRQ==","IERvbmF0aW9u","V2l0aEVtYWls","IGZyYWN0dXJlcw==","S2VlcGVy","IG1laXNqZXM=","IGFyY2hpdGVjdHVyZXM=","IEx1bmc=","J2ltYWdl","aGFybWE=","IGFiYW5kb25pbmc=","QUxMRUQ=","c3VidHlwZQ==","cmVpcmE=","IG1vc3M=","IFBhcnNvbnM=","YWtlZG93bg==","PW9iag==","IHN1Y2Vzcw==","IHdlYXJhYmxl","44Kn","IGFkdWx0aQ==","LnVt","IHZpYnJhdGlvbnM=","IHN3ZWxs","IERpc2Nsb3N1cmU=","IFJERA==","cGFpcnM=","YW5nZ2Fu","IG1haW5CdW5kbGU=","IERJTg==","IHJvY2tlZA==","c2hvdWxkQmU=","Lmdi","IElNRA==","IFdO","LGFyZw==","4oCm4oCm4oCm4oCm4oCm4oCm4oCm4oCm","W109JA==","LlNN","IGFsZ3Vucw==","YWRkb25z","X0NvbW1vbg==","X1JFRlJFU0g=","INmB2Yo=","IFRZUE8=","IEVjb2xvZ3k=","IGdsdQ==","LkRhdGFUeXBl","IFByb2Jl","THV4","b3dlZ28=","IHJlaw==","IFBsYWludGlmZg==","YWNoYWJsZQ==","Lm5hbWE=","Km91dA==","fX17ew==","IENBUElUQUw=","5L2G","SW1wb3J0ZXI=","LmNyZWF0ZVNlcnZlcg==","X3Jlc29sdmU=","X0VQUw==","c3RlbGxhcg==","X1Byb2ZpbGU=","CXN3","LW1vbg==","dWRldg==","XFBsdWdpbg==","X01JWA==","IERpc2NyaW0=","LmZyb21MVFJC","IFN0cmFuZA==","QW55dGhpbmc=","cG93ZXJz","XV0NCg==","LlRJTQ==","IGFkZHNsYXNoZXM=","IGVzaQ==","QEJlZm9yZQ==","IHNhaw==","ICcvJzsK","Y29j","xZ/EsQ==","ICkpOw0K","X2Fib3Zl","IEVDQw==","L2NwdQ==","IGNhZGU=","LlN0ZGVycg==","IHBlbGxldHM=","IFBhbGlu","IGfDqW4=","X2phdmE=","IHNhbGFo","IGJlcmdlbg==","X1NXQVA=","IGdpYg==","acOjbw==","X2Rpc3RhbmNlcw==","IENpbmRlcg==","IGFuYXJjaGlzdA==","aW1hdA==","CW1vY2s=","44GX44G+44GZ","T21lZ2E=","IGJhaHdh","X1BhcnNl","LnBhcGVy","CUludGVudA==","cmVucw==","L2dyaWQ=","IGZpbHRoeQ==","LmV2","IyMjIyMK","IHNhcmU=","IHNvYWtpbmc=","IFJlZ2lvbnM=","X1VTRUQ=","IFNpaw==","aWZpa2FzaQ==","CUVkaXRvcg==","THVjaw==","IOyXsA==","xINt","LiI7","IFppZWw=","IGdyYXlzY2FsZQ==","KEZ1bmM=","44OB","LkRlbnNl","LWxlYW5pbmc=","IGdyYWNlZnVs","R3JhcGhOb2Rl","X0NPTU1JVA==","IENWUw==","IHBsYWlucw==","IHJlag==","cGNpb25lcw==","IHVuZGVybWluaW5n","X2NhdHM=","ZmVi","Q29sbGVjdGlvblZpZXc=","U0VNQg==","IHRodQ==","dGV4dGJveA==","KEFuZHJvaWQ=","IHJpZ29y","IFlpZWxk","LmlzUGxheWluZw==","OnZpZXc=","cmVtYWluZGVy","IFBpcA==","KWluZGV4","IEJlY2tlcg==","dG9Mb2NhbGU=","YXV0b3JlbGVhc2U=","IFJvbWVybw==","LkhhbmRsZWQ=","IENhYmluZXRz","KVY=","IHJ0ZQ==","IEh1bHU=","aWNpZWw=","L2FuaW1hdGlvbnM=","IHByZXN1bWU=","LnRyYW5zcGFyZW50","IHN1Ym1lbnU=","cW0=","aWVydGVu","IHRleHRTaXpl","IHN0YXJ2aW5n","L2pvYg==","QXBhY2hl","IHlpZWxkaW5n","LWFydGljbGU=","Jz0+JF8=","IOih","PFNwcml0ZVJlbmRlcmVy","IFNoaWE=","KToo","IHB1Ymxp","emllag==","IHRlbGVzYw==","IHRlaWw=","TGVnYWN5","IFBsYWNlbWVudA==","KCkpew==","IHRyb3VibGVzb21l","5pif","IHBlcnPDtm4=","X0FzcE5ldA==","PX0=","KHVzZXJJRA==","U3Vz","44K6","LWF2ZXJhZ2U=","IFFJbWFnZQ==","LlN0cmljdA==","dGVib3Jn","LWZ1bmN0aW9ucw==","UkVHSU9O","Pk5ldw==","X2Nob29zZQ==","KGNp","IHVubGVhc2g=","IFJJR0hUUw==","IFNwZWFy","CW1ha2U=","IHR5cw==","YW5lbGE=","IFdY","X01BS0U=","L3NldHVw","IG9uU2F2ZQ==","IGNsaW5pY2lhbnM=","CWJhY2s=","LkxpbmtlZA==","IGNvbnNlcnZl","IGJpdHRlbg==","X3ZhcmlhbmNl","IGxpcmU=","IGluZXJ0aWE=","dWZmbGVz","X01QSQ==","aWRkbGVz","W2Fycg==","LnZvY2Fi","IHNoaXR0eQ==","IG5lc3Rl","c3NpemU=","IEtU","Ymxlcg==","X2xpbnV4","IG1vbmdvZGI=","IElURU1T","S29u","IEJ1cnN0","X3Bob3Rvcw==","Q29sb3JhZG8=","IGFja25vd2xlZGdtZW50","IG9pbHk=","IG5mcw==","IFppb25pc3Q=","IGFkZGljdHM=","IGFkZFVzZXI=","IE1pc2g=","IGtX","IFdhbnRz","KHJlY29yZHM=","b2N1cnJlbmN5","SlNHbG9iYWw=","LmVsYXBzZWQ=","IE5i","IHBwdA==","XERlcGVuZGVuY3k=","Um9s","IMOnYWzEscWf","IGV4cGFuc2lvbnM=","YnViYmxl","IG1pZHRlcm0=","ICcjew==","Y3R4dA==","SVN5bnRheEV4Y2VwdGlvbg==","IFZhbGxl","IENhZGlsbGFj","ICIifSwK","IHNlbXVh","cmljaFRleHQ=","c29mdG1heA==","b2JqUEhQRXhjZWw=","LmhzdGFjaw==","X2NyaXRpY2Fs","KDw/","ZGo=","IGNvbnNvbg==","IHJvb21JZA==","RE9NQ29udGVudExvYWRlZA==","cGFybXM=","IHplaWd0","VFBM","LW5vdGNo","IG9wcHJlc3NpdmU=","Q29kaW5n","IExlYXZlcw==","KERpc3BsYXk=","LnNpZ25Jbg==","Ly8tLQ==","IE9wcg==","Y3Rh","IG1ldGF2","U2VyaWFsaXplZA==","IHVuYWZmZWN0ZWQ=","IEFUTA==","IEtQ","QXRsYW50aWM=","LHVybA==","LHN0YXRl","IGJpc3Q=","ZW5lZw==","IHNpbXBsaXN0aWM=","IGJpZGRlcg==","IHBlcmNlcHQ=","IGNlbGli","IFRIUk9X","KC9b","VGNw","IGZ1cnRoZXJtb3Jl","LkFjYw==","b3BwYWJsZQ==","5Lik","IFRhcnQ=","IEJlbno=","IGVtYm9kaWVk","KENvbnN0","ICst","UGFydGljaXBhbnRz","IGh0dHBSZXF1ZXN0","YWNjZW50","IFPDvA==","IGhvcnJpZnlpbmc=","IC8+LA==","IGVuYWN0bWVudA==","IFVOSU9O","L2xvZ3M=","IHNjcmVlbkhlaWdodA==","IGV0d2E=","5L6L5aaC","IGHDum4=","5bem","X3RpbWVsaW5l","ICIiKSkK","JzonJw==","Qlc=","IHJlbm92YXRpb25z","IDwK","UGFsZQ==","Pjo8Lw==","U2tlbGV0b24=","IGdldFVzZXJz","X2RhdGFmcmFtZQ==","YWJy","bWF0ZXJpYWxz","JmVhY3V0ZQ==","LkRpc3BsYXlOYW1l","IGh2aXM=","X2xhbmd1YWdlcw==","LnN5","dG93ZXI=","SUZJQ0FUSU9OUw==","IGJhcnJpYw==","IFBsdXRv","YDs=","44OL","Y2VudGU=","I2Fi","IGxleGljYWw=","IEJSTw==","IHJ1bGluZ3M=","SEVZ","LmlPUw==","cmV0dXJuZWQ=","LmJvb2tz","IEh1YmI=","ZW9m","Pj46Og==","IOyG","IGdvVG8=","6ICD","44Go44GG","PEZvcm0=","Y29waWVz","LnF1YW50","IFBvdGF0bw==","IENvdXNpbnM=","IHPDuw==","R292ZXJu","IGdhbGVy","IEZJUg==","X1dpZHRo","IFNoZWxkb24=","LkRldg==","IFJlc3BvbnNpYmlsaXR5","c29uaWFu","IHN1cGVyY2xhc3M=","Yml0c2V0","ZWRkYXI=","IExhYm9yYXRvcmllcw==","IGNvaW5lZA==","IFRlY2huaXF1ZQ==","KENvcmU=","IHNwcmF5ZWQ=","IHBvbmc=","KE5ldHdvcms=","IHJvYXI=","IEVBU1Q=","c3RyYWlu","IG1lbnN0cnVhbA==","b21iYXQ=","IGNhbG1pbmc=","CURpbQ==","X21vdmllcw==","IFJBSUQ=","LWRpc21pc3NpYmxl","IGZyZXVuZA==","LWNoYW4=","IHJlc2lzdG9y","X0NvcHk=","b2NyaW5l","IGVzcGlvbmFnZQ==","Z2Fkbw==","TkRBUg==","IHBvcmNlbGFpbg==","dGhhbG0=","IGBb","IGdyYWRv","0LjRgA==","RE9VQkxF","IGFjY2Vzc2Vz","LkZsb29y","IOKGlA==","IHRva2VuaXpl","YW5hbHl0aWNz","LkNyZWF0ZUluc3RhbmNl","IHN1Y2hl","CWVudA==","aWduZXI=","INC/0LXRgNC10LQ=","IGNvbmRpY2lvbmVz","LmxpYnM=","Iic7","UERPRXhjZXB0aW9u","IG9uRGF0YQ==","IEF1dGlzbQ==","LWhlbHBlcg==","IHJld2luZA==","IGNvZmZpbg==","44O844K4","IHRyYW5zbWl0dGluZw==","LnNldEFsaWdubWVudA==","IGRlYWxsb2M=","IGFuY2VzdHJhbA==","b2dpZQ==","LkNPTVA=","OmZyYW1l","bW1v","Jzoi","IFJlZ2VudHM=","IGNoZWF0ZWQ=","Lmdn","IHBhY2Vk","IGVzdGFk","b2NlbmU=","bHNh","KGZj","L2dyb3Vwcw==","L21pc2M=","IFNodXR0bGU=","VVBJ","w6Fv","LWN5Y2xl","CXByb3Bz","IHJvdHRlbg==","UmVqZWN0ZWQ=","I2Fj","LnVh","IEFtbmVzdHk=","IHBlbm5lZA==","SU5DUkVNRU5U","PGRpbQ==","LnNldFVw","IFR3ZWV0cw==","IE1hZHVybw==","INmC","IENBY3RpdmU=","CUJZVEU=","KHNlcGFyYXRvcg==","LlJlc2l6ZQ==","dWZmbWFu","c3VwcG9ydHM=","IHVyYg==","IEZvdW5kZWQ=","X2hhcmQ=","IGVjbGVjdGlj","LkZpbHRlcnM=","IFJvdW5kZWRSZWN0YW5nbGU=","X3NhbXBsaW5n","IEpldHp0","YW1lcmljYW4=","Lmludm9rZUxhdGVy","IEJ1dHRlcmZseQ==","KGNvbm5lY3Rpb25TdHJpbmc=","IE5hb21p","IEphaW1l","cnRz","IG1hZ2ljYWxseQ==","Lm1hY2hpbmU=","IEFwcGFsYWNo","Iisi","dmFsZQ==","LW1vdW50ZWQ=","IGFjaGU=","TUo=","IFVJSW1hZ2VQaWNrZXJDb250cm9sbGVy","LUp1bg==","TWFuYQ==","a3JhaW5l","RENG","L1Byb2R1Y3Q=","IFJFU0VSVkVE","IEZIQQ==","OkAiJUAiLA==","IFByb2pla3Q=","IE5pcg==","IENhcm5pdmFs","ICom","IFFT","V0hP","IHdlbHQ=","IG1hcnJ5aW5n","QWxleGFuZGVy","IFJldmlld2Vk","YWN0ZXJpYQ==","IHdhbg==","KHJvYm90","IFdpbmRvd01hbmFnZXI=","IG1vbnVtZW50YWw=","IERvbWluZw==","L3dlYXRoZXI=","X3NlY29uZGFyeQ==","T3BlcmF0b3Jz","X1NJREU=","S2F0","LXpvbmU=","IHNpZ25pZmllcw==","IEh0dHBNZXRob2Q=","L2NvbnRleHQ=","Ig0KDQoNCg==","IFJvZHJpZ28=","IGJ1Yg==","L211c2lj","IHNlcm9udA==","IG1STkE=","X2VtYWlscw==","ICc+Jw==","IEdlbWU=","INGA0LDRgQ==","IH5+","IGR1Y2tz","IEZyZXVuZA==","RXhwZXJpbWVudA==","IHJlb3BlbmVk","IFwiew==","IGVsbGlwdA==","IGNvbmNhdGVuYXRl","IHBvbG8=","VGltZVpvbmU=","ICAKICAgIAo=","IGNhcHRpb25z","cmlja3M=","LmZyZXE=","Lm1lbW8=","IHNtYg==","RHJ1Zw==","XVsv","X0JBQ0tFTkQ=","IEVsbGE=","IFBvcnRpb25z","IGZldGNoRGF0YQ==","IGNvcm91dGluZQ==","IGVzdGF2YQ==","IEdlbml1cw==","OmB+","IFN3YW5zZWE=","KHBheW1lbnQ=","Vm90cmU=","IFBydWl0dA==","Lm9mZnNldFdpZHRo","YXJ5bA==","IHVuaWZvcm1seQ==","IFdhcnA=","IFNFQQ==","IGRlZHVjdGlibGU=","IGJ1bGxpZWQ=","IEJlc2No","IFByb3NwZWN0","T1NQ","IlllYWg=","IEFuZ3J5","LlZhbA==","IGdpZ3M=","IGJ1bGt5","ZXRlcmlh","LmdldFN0YXJ0","IE1FVEg=","IGNvaGVyZW5jZQ==","IG1lZGlhdGVk","0LXQs9C40YHRgg==","Li4uLgo=","IHN0cm9rZUxpbmU=","bWo=","IFVuc3VyZQ==","YXRocm9vbQ==","KEJpbmFyeQ==","X0tleVByZXNz","5p6E","aW5oZXJpdHM=","IHJlcHJlaA==","CVNjaGVtYQ==","IHVucmVzdHJpY3RlZA==","LmRlZmluaXRpb24=","XT8u","IGl0aA==","5aCx","IHNsaW1l","bXNncw==","X0pT","CVZlcnNpb24=","X1NFQ1VSRQ==","IGNvc3Rv","LlJlc3Ry","Y3Ny","X1RPT0xUSVA=","cGNs","IOKGkw==","U2VsZlBlcm1pc3Npb24=","LnJhdmVs","IG1lbWJyZXM=","QXNzZW1ibGVy","cm9taXVt","c3VyZg==","IFVQREFURUQ=","KGJyYW5jaA==","KGluY2x1ZGU=","IElkb2w=","XE9iamVjdA==","IGNsb25pbmc=","IGlzTmFO","IGFueg==","xrDhu51uZw==","IG9uYw==","X0NMVVNURVI=","IHt9KSwK","aW1pbmFyeQ==","CWNvbnRlbnRQYW5l","dHJhaWw=","IG5pbmV0eQ==","IE5pYWdhcmE=","IEFuZHI=","w6lzeg==","IGRpZmlj","dXRyYQ==","J319Pg==","44Kk44OI","c3Bhcg==","ICJcIiw=","IG15ZmlsZQ==","ZmZj","IG5vdGljZWFibHk=","ZXlh","IFB1dHRpbmc=","SlY=","LmRpbWVuc2lvbnM=","ZXJjYQ==","Z2VuZXNpcw==","ZWZmZWN0aXZl","IHBlcmRlcg==","Lk9S","X0NPTVBBUkU=","Omxlbg==","L3JlZA==","IEFyaXN0b3RsZQ==","IHF1ZXJpZWQ=","IGZvcmVzZWVhYmxl","IFVJQ29udHJvbA==","cmVtaW5kZXI=","IGNlbmE=","IGhpYw==","ICIiOw0KDQo=","L2Jhc2lj","IGFmZm9yZGFiaWxpdHk=","LGVycg==","INGB0LjQvNCy","IElTUg==","bGljZW5zZXM=","Vk9JQ0U=","Lkxhbmc=","LnJlbGF0aW9uc2hpcA==","IGxlbmRz","IG51dHplbg==","IGVzcGVjw61m","aWVuZGE=","PFBhaXI=","VHY=","X1JFVFJZ","IGhvbm9yaW5n","X2RlY2xhcmF0aW9u","KE5P","IEhpY2s=","IG1pbmxlbmd0aA==","IEdlc2NoaWNodGU=","YXBlc2g=","QVRPTQ==","JykiKTsK","ZW50ZXJwcmlzZQ==","Pn08Lw==","IHBvbGl0aXF1ZQ==","ZWRpdGlvbg==","X0RlYnVn","QW5uZQ==","LlNjb3Bl","Y3Rw","Y2Fub25pY2Fs","Pj47Cg==","TWVudXM=","IGZpZXJjZWx5","Lk9uY2U=","IEJvcnJvdw==","IHNvc3Q=","IHNlcnZpbmdz","LWZsYWc=","IHZlc3RlZA==","IGZyb24=","7ZWo","IGZhbWluZQ==","Il0pKXsK","ZXJlw6dv","IGtpamtlbg==","IEZsb29yaW5n","55CD","b2JzZXJ2YXRpb24=","IHVzZXJEYW8=","PSIiPg0K","Q09WSUQ=","YmFieQ==","IHRyb3VnaA==","IFNlYW0=","IEZpZ2h0ZXJz","b21pdA==","IENoYXJnZXM=","UnVzcw==","IHF1ZWxxdWU=","R2V0UG9zaXRpb24=","IE1pbmlzdGVycw==","X3JlY2VpcHQ=","IHJvb3ROb2Rl","bXVsdGlw","JHNlYXJjaA==","IikpKSkK","dGFrZXM=","ICghIQ==","IEJBVA==","Y2hhbmc=","xJM=","Lm9j","IHNraWxsZXQ=","IFNLVQ==","IEdhbGxhZ2hlcg==","IGNyZXNj","d2Vla2RheQ==","ZXJ2aXNlZA==","Q2FyZENvbnRlbnQ=","LmFjY2Vs","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAK","VGFp","IENvbXBhdGliaWxpdHk=","eENG","X3Jld2FyZHM=","cmRm","QVBQTEU=","LWZlZA==","IGRlcGVuZGVk","LWdlbmVyYXRvcg==","KFByb2Nlc3M=","0LzQvtC2","IGRpc2NyZXBhbmN5","IHBob3NwaGF0ZQ==","TmV0d29ya2luZw==","6K6+6K6h5Zmo","KHJv","IGNvbmN1cnJlbmN5","CWF1dGg=","UGx1Zw==","QVRBTE9H","c3Viag==","L3RlYW0=","KGF2Zw==","b2tpbg==","IHBsZWRnZXM=","IGNvbGxhYm9yYXRvcnM=","IGVtYmFya2Vk","IERvY2g=","IERhaXJ5","Y29tcGV0aXRpb24=","IE11dGFibGVMaXN0","LXNldmVu","IGNvbmN1cnJlbnRseQ==","IFZpag==","IHJlc2V0dGluZw==","ZHBp","IHNsaXQ=","IFBPSU5URVI=","IENBUlQ=","LmRleA==","Y3Vsb3M=","X3BlcnNvbmFs","IGFuYWx5dGlj","I2NyZWF0ZQ==","X21lbWNweQ==","KExpc3ROb2Rl","X1RhZw==","IElycg==","Ij4nOw0K","U2hvcnRseQ==","LnRpcA==","XFs=","IFJlcHJlc2VudGF0aW9u","X0xJVEVSQUw=","LmNibw==","IEthcm5hdGFrYQ==","IENvbXBldGl0aXZl","IFJ1ZQ==","IHJ1bm9mZg==","IFNwZWxscw==","ZmNsb3Nl","Y2lz","RnJh","IHJlbW9yc2U=","IENvbG9nbmU=","IHJhbmdlcg==","IE1vcmc=","ZmlnaHRlcnM=","LlJlcXVlc3RQYXJhbQ==","Q29ycw==","IGRlbm90ZQ==","IGNob3Nlcw==","w6JuZA==","LnJlY3ljbGU=","IExvZ2lzdGlj","IERFQUQ=","LWxvYWRlZA==","IENsZWFycw==","IGtlbGw=","cmFwaGlj","IE1hbmU=","RU1CRVI=","IG1hc2tpbmc=","CWVkaXRvcg==","SGFsbG8=","Omxpc3Q=","IGV0aG4=","LXNlYXQ=","ICopWw==","IEdseQ==","IEFDUw==","CXN0YXQ=","L0NvbW1vbg==","IGRpc2d1aXNlZA==","RmluYW5jZQ==","IEVsZXBoYW50","dGVtcG9yYXJ5","IENhcmx5","IGNvY29z","IEp1ZGl0aA==","IHdyYXBwZXJz","IEx1bmFy","IHLDqWN1cA==","LXNldHVw","IHNpemFibGU=","ICAJIA==","Y2xhc3NpZmllcg==","IGZpZ3NpemU=","IG1hc3R1cg==","IOabtOaWsA==","IFJ3YW5kYQ==","KXQ=","IEN1cHM=","QXp1cmU=","KCl9LAo=","U1BBUkVOVA==","KGRpYw==","IFRleHRGb3JtRmllbGQ=","IGRlZm9ybQ==","IGRpcmVjY2nDs24=","IHlheg==","IGdsdWVk","IGF0cmF2w6lz","Y29mZmVl","IFVwZGF0aW5n","IENvbGxlZ2Vz","w6RsbHQ=","YW5kZWxpZXI=","IHNhbGly","IFNDQUxF","cWU=","6rO1","KHJlY2VpdmVy","bWRi","Im1hdGg=","aXNuYW4=","dGVsZWZvbmU=","UkVQT1JU","LmFkZE1vdXNlTGlzdGVuZXI=","ZHVlZA==","e31d","KCkpOg==","IHdvcmtpbmdz","fSk7CgoKCg==","IGNvbXBvbmVudFdpbGxNb3VudA==","U2VydmVycw==","X0NMT1NFRA==","SVpFUg==","IGJvb2I=","IENPTkNBVA==","IEhhcHBpbmVzcw==","IGNvbW11bmU=","eEFC","b3duZXJzaGlw","X05FQVI=","X0hBUkQ=","IFlB","bGlvbg==","IHNwaWVs","IHRhZ2dpbmc=","IGltbW9yYWw=","LWdyb3VuZA==","IHRodW5r","IGxvY3Vz","IExhdHZpYQ==","aXppb25p","Y2xhcnNpbXA=","IHBhdGllbnRseQ==","XEhhcw==","IHN1Ym9yZGluYXRl","IFdISUNI","ZW50aW9uUG9saWN5","IGRlcGxldGVk","RlNJWkU=","IFss","IEJpb2dyYXBoeQ==","IFNhbmRz","U0hBUkU=","Q2hhcnNldA==","LndyaXQ=","X1NVUw==","IE1vcmVubw==","IGJyb2Njb2xp","IFZY","YW1pY3M=","LkdldFVzZXI=","IENvbW1vZA==","LnNjaGVtZQ==","KHZz","IGFuYWxvZ291cw==","UHN5","PWxpbmU=","LnB1Ymxpc2hlcg==","IG9ud2FyZA==","0LXQutGB","IERlYWxlcnM=","IHRvQXJyYXk=","IENob2ljZXM=","0JTQvtCx0LDQsg==","IGRlZmF1bHRNZXNzYWdl","IGFncmVn","IENvbmNhdA==","SFY=","IENpcmN1bGFyUHJvZ3Jlc3M=","X3N2Yw==","VEFC","X2ZpbA==","Lk1hcFBhdGg=","emJ1cmc=","IGdldFByb2R1Y3Q=","IFZFUklGWQ==","Lk1vbmdv","IHB1bmRpdHM=","cHVsc2U=","bGljdGluZw==","Z2lhdGFu","IC4uLiI=","IGZpeg==","IGFudGlt","IENoYXR0","X1RZUEVERUY=","R3V5","CXRlc3Rz","IFNsb3Zlbmlh","IENvbW1hbmRMaW5l","IGJlbmVmaWNpYXRpb24=","IGJpbmRBY3Rpb25DcmVhdG9ycw==","TlRBWA==","LUNz","IGNoYXJpc21hdGlj","LmFsbG9j","X25m","IGFzc2F1bHRpbmc=","INGC0LDQsdC70LjRhg==","IGPDoWM=","IFNjcm9sbHM=","SEFT","eXl5eU1NZGQ=","IEdhbGU=","IFByb3plbnQ=","IFRob3JudG9u","ZGVhbGVy","IGV2aWN0aW9u","IGFuYWxl","4oCO","PSIo","IGVhZw==","KCcnKTsKCg==","IGNvbnRlbXBsYXRpbmc=","aHlw","YmVsdW0=","IEZpdHM=","IEV4YW1pbmVy","IEJ1Y2M=","IG1lbWJyYW5lcw==","IGJyaWxsaWFudGx5","IENlcmFtaWM=","w6h2ZQ==","IFBvdW5k","IHRyZWFzdXJ5","LicpOw0K","CXRj","ZWNha2U=","Q3VycmVudFVzZXI=","LmhhYmJv","IHRyZWFzb24=","IEZUQw==","TVVY","IG51bWJlcmluZw==","UklB","LS0pDQo=","IGJlaWdl","IEFydGVt","YmFzZXM=","X0JBTkQ=","IFBhdmVs","0YHRgtGA0YPQug==","dGhlZA==","X25icg==","INCx0LDQtw==","c2xpZGVVcA==","IFRheGk=","IGFxdWVs","IE1pc2NlbGxhbmVvdXM=","ZWx1","IGluc3VsYXRlZA==","IGFzc2V6","LkNvbmZpZ3VyZQ==","IHF1ZWxsYQ==","IHBhcmFzaXRlcw==","QXdheQ==","ZHVjaWJsZQ==","KCc9Jw==","IHZlcm8=","IFdhdGtpbnM=","IFNlcGFyYXRvcg==","YXBzZXM=","ZW52aXJvbm1lbnRz","IGFwcHJhaXNhbA==","cGF1c2Vk","X2RlYXRo","IHNpdHVhY2nDs24=","IGZyYXRlcm5pdHk=","IGluc2lzdGVuY2U=","X2NyeXB0bw==","QXR0cmliUG9pbnRlcg==","Il1dLAo=","IG94aWRhdGl2ZQ==","IG5ldXJvbmFs","IFFHcmFwaGljcw==","Ij4nLA==","IFNtaWxl","T2JqZWN0aXZl","IFNha3VyYQ==","Wk8=","YW1pZW50b3M=","LkxvY2FsRGF0ZVRpbWU=","L3VuaXQ=","LWZyZXF1ZW5jeQ==","LUNT","In07Cgo=","IHJlbGV2","QWxsb2NhdGlvbg==","JU0=","IER1c3Rpbg==","IHN3aXBlcg==","IE5hcmM=","dGF0dXM=","IGxvbmdpbmc=","IHRodWlzb250dmFuZ3N0","IGNvbW1vZG8=","IEFEQQ==","aW11","X2ZvcnVt","YW5naQ==","CUFwcGxpY2F0aW9u","W2Zyb20=","IEJldGhlc2Rh","b3Ryb3BpYw==","IE1VQ0g=","IHByZWRpYw==","ZmlsbWU=","KGdyYW1tYXI=","KEFQUA==","IEN1cmw=","IHNob3J0aGFuZA==","YWZmaWxpYXRl","XSoq","X250aA==","aWFiaWxpdHk=","Ym9tYg==","WVQ=","KCItLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQ==","IEJpY3ljbGU=","aW1hdGluZw==","Lm5paQ==","IEthcmE=","YXNrYW4=","cmVhY3RzdHJhcA==","IHdsYW4=","b2dyYXBoZXJz","CSANCg==","cGFnaW5hdG9y","aWhhbm5h","IG1hdGNodXBz","X1BBRERJTkc=","X3JlZ2lzdGVycw==","eXRl","IHByaWNleQ==","IGZvb3Ro","IEh1Y2s=","UEFSVE1FTlQ=","IHByb2hpYml0aW5n","LmlzRGVidWdFbmFibGVk","4KS4","bGVpbg==","PXJlcw==","LyoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKg==","ZGRs","bXBy","IOqwmQ==","IFdBTEw=","IHJldm9sdmVz","IFBFUkY=","KTt9","IFRvYnk=","Ly4uLw==","IGthbw==","IGZvcmVjYXN0aW5n","X0NvbnRlbnQ=","IH0pKSwK","cG9ybm8=","bGVhZGVycw==","LWhvb2tz","aXN0cmlidXRvcg==","L3N0b3J5","CWxpbmVz","LXJlcGx5","IGFkcmVuYWxpbmU=","Rmxvd0xheW91dA==","LnJvdXRpbmc=","CXRpbWVvdXQ=","IHJhaWRlZA==","CURE","IGRpc2RhaW4=","Y29uc2lzdGVudA==","Z2Vpc3Q=","KCI6Lw==","KHN0YXRlcw==","IEhJVA==","LVJheQ==","LWhlYWx0aA==","IC8vLQ==","dGVtZW50","Lm5hdmlnYXRlVG8=","IGJlbmNoZXM=","ZXdpbmc=","ZW56aGVu","LXNwbGl0","UmVqZWN0","IHB5bGFi","IGZsYXNobGlnaHQ=","IGluaXRpYXRpbmc=","IE9FQ0Q=","IGVudHJlZ2E=","TmF0dXJl","Lm9yYW5nZQ==","IMO6bHRpbW9z","IGVjcw==","LmhvdmVy","IGRlbHV4ZQ==","Um9nZXI=","IFRpYw==","IixfXw==","IHBsYWNlaG9sZGVycw==","IHNwYXduaW5n","IG51cnR1cmU=","IGV4Y2hhbmdpbmc=","Q3JlYXRlRGF0ZQ==","IGxhbWlu","IFNlbWljb25kdWN0b3I=","ICovCgoKCg==","IGbDuHJzdGU=","IGluaXRpYWxz","IHByb3ZlcmI=","IEFjdHJlc3M=","Q29uY2F0","IE5pY29sYQ==","LXNob3BwaW5n","aXZpdMOg","aXRpYW4=","IFdlcnQ=","LkFkZFNjb3BlZA==","IHNhbGVzbWFu","Ym9z","IEZlcnJ5","Q0VOVEVS","bW9kZWxv","IFJvZQ==","IElzbGFuZGVycw==","dXBlcnRpbm8=","RGVjbGFyZQ==","IHZvd2Vscw==","IGJveGVy","KHRvb2xiYXI=","IGhhbGZ0aW1l","bmlu","IEJyb29rZQ==","IFZlcw==","0LvQsNGC","IG1vdGl2bw==","cHJvdGVpbg==","a3Vz","YnVzeQ==","IHN0cmluZ1ZhbHVl","CU15","TnV0","dXp6aQ==","IHNleg==","IG9sZHM=","IG1ldGh5bA==","IGLDvA==","aGliYQ==","IEluc3BpcmF0aW9u","IGF3YWl0ZWQ=","QnJ1Y2U=","QkFMTA==","IFRSWQ==","LWxpdGU=","IHVuZGVyZXN0aW1hdGU=","CXJ2","Lm1vdg==","IGhpc3TDsw==","IEVyaWU=","Y25hbWU=","L2Nvbm5lY3Q=","Y29uZmVyZW5jZQ==","X3RyYWl0","IGt2aW5kZQ==","IEludm9jYXRpb24=","IERhdGVUaW1lT2Zmc2V0","d2VjaGF0","Q0VP","IExpYnlhbg==","LmNhcGl0YWxpemU=","IGdyYWNlZnVsbHk=","IHJlZWxz","aW5jcmVhc2U=","Lm1heGNkbg==","ZmF2b3JpdGVz","SVRFRA==","PFNjYWxhcg==","LkZldGNo","IHN1c3BpY2lvbnM=","W01BWE4=","X1RSQU5TQUNUSU9O","IGN5bGluZHJpY2Fs","Lm5leHRFbGVtZW50","IG1vcnBob2xvZ3k=","IENlZA==","IGNuYW1l","KHJhd1ZhbHVl","V2Fsa2luZw==","TG9hZHM=","X0FMSUdOTUVOVA==","X1JPVU5E","IFJPQ0s=","Y2x1c3RlcnM=","Img=","dWV1cg==","cGxhbnM=","IGF0aGVpc3Rz","IHZhdA==","PSJfXw==","YXdhaA==","ZXJ2YXRpdmVz","IGZpbmRPbmU=","IG5vdGVib29rcw==","IFRUTA==","LkdldEFzeW5j","IG3DvG5jaGVu","bUFo","YnJ0Yw==","X1BZ","QnVpbGRlckludGVyZmFjZQ==","CWdiYw==","IGJsYW5rcw==","IGTDqW0=","UmVjdXJzaXZl","Lk1hbnlUb01hbnlGaWVsZA==","X1BBUlNFUg==","IGVuZGVhdm9ycw==","IGRyaWI=","X3BocA==","IGF1dG9tb2JpbGVz","bG9pdA==","IE9ydGl6","IFVE","KGRBdEE=","IE1pdHN1YmlzaGk=","QXR0cmlidXRlVmFsdWU=","IHBvYXRl","55u45YWz","IGNhdmFscnk=","Lk1hdGNoZXJz","IGluZ3Jlc3M=","IEplaG92YWg=","CXNlcQ==","X3N0cmVldA==","IFNvZmlh","IHNjcm9sbHM=","dmluY2Vz","ZWxlY3Ryb25pY3M=","XHBhcmFt","IHplbmQ=","IHNraW0=","LnBpeA==","ZW5r","X2FyZWFz","IEJvaXNl","LXZhbGlkYXRvcg==","IHVuZWFydGg=","b2ZpbG0=","IEJDRQ==","b3Zza3k=","IExldmVy","IHBvbGljZW1hbg==","IG1pZXM=","IFBvcnRyYWl0","IHBvdGlvbnM=","X21vdA==","bWFzc2FnZQ==","0LXQvdGL","IGN1ZA==","IG1hbnVzY3JpcHRz","Y29udGludW91cw==","LnRj","w7x6","IEZyZWV6ZQ==","Xzoq","Lmht","IENTUkY=","IE3DpGRjaGVu","LXBlZXI=","IHB1dFN0ckxu","IGltc2hvdw==","IEB7JA==","IEJhdWVy","KHRvbHVh","IHdyb3VnaHQ=","IEdpYW4=","IMO2bg==","ZnVuZw==","QnV0dG9uVGl0bGVz","fSkiLA==","IE11cmRvY2g=","S1c=","IFJlcG9ydGVk","c2ll","IG1laWxsZXVycw==","IEthZXBlcm5pY2s=","IGRzcA==","IEV2ZXJ5ZGF5","cmVuZHM=","IENvbmNl","IGluY29udHI=","LnJlbW92ZUF0dHJpYnV0ZQ==","44G+44GX44Gf","IHJldw==","IFByZXNlbmNl","L2dpbg==","LkNsYWltcw==","CXNs","RHJhZ2dpbmc=","IHNwcmVl","IGFjdHVhbGl6YXI=","IG5vc3M=","IGxpZmVzdHlsZXM=","O2M=","VURHRQ==","SW5NaWxsaXM=","IGl0aw==","YWJieQ==","KHBh","aXNzZW50","IFByZXNpZGVudHM=","IEhleGF0cmlnZXNpbWFs","ZWNpZGVk","KHRleA==","IGNyb3duZWQ=","UGhpbGlw","IFNhcms=","IEFkZGl0aW9u","IENvbGJlcnQ=","IEdMRVM=","IFFMaW5lRWRpdA==","IGRyYWlucw==","IHNvcnRPcmRlcg==","ZXNjb3J0","VGVk","IG1hbmlmZXN0ZWQ=","LnZhcmlhbnQ=","IFJFRkVSRU5DRVM=","KGdj","L3sk","b2N5dGU=","IG9ybmFtZW50","IGJvb2tzdG9yZQ==","SG9s","IFZhbGw=","Lycp","YWNhaw==","IE5hdkJhcg==","IG55ZQ==","X0RlYw==","b2x2aW1lbnRv","TVJJ","IGhvb3A=","ICAgCiAgICAK","IFBvc3Rpbmc=","IG91dGxpbmluZw==","YWdhc2Nhcg==","LmJyZWFrcG9pbnRz","Y2F0aWQ=","X3RyaWdnZXJlZA==","IHJ1bm5hYmxl","L3RydW5r","LWNoYWly","IGJhaXNlcg==","ZmFjaWxpdHk=","IHBvbGxlbg==","6Z+z","IFtbIg==","IENHU2l6ZU1ha2U=","IGFzc2FpbA==","IEF0aGVuYQ==","IEFkZGljdGlvbg==","aWxhbmQ=","O2Jy","LktleWJvYXJk","X2Zt","QWNl","IFJFUQ==","IE5ld2VzdA==","Oy4=","IE1BREU=","c2V0VGltZW91dA==","U2VydmxldENvbnRleHQ=","CQkJCQkgICAgICAg","IEx1cA==","LXJldmlld2Vk","IEFuYWx5emVy","Lk5hTg==","dXR1cmE=","R2VvbQ==","eW1lcw==","X3Npbg==","IHRydXN0ZWVz","Ly89PT0=","IGFkbWl0dGVkbHk=","IGFrbw==","IFVFRkE=","X2hlcm8=","R2l0aHVi","X2VzdGltYXRl","IGNvcnJvYm9y","ZW50aWZ1bA==","IFN0ZWVyaW5n","IE1pdGFy","IFBpcGVz","IGvDpQ==","X3NlYXNvbg==","IEJDSFA=","L3NvZnR3YXJl","bmV0dGU=","KiIs","dW5kcmE=","IGdldFJlcXVlc3Q=","LkJ1ZmZlcmVk","ZmVybg==","TWFyaW8=","IGRpc3BlcnM=","X2NhdGVnb3JpYQ==","IGVuZGxlc3NseQ==","Z3VhcmRz","CWF0b21pYw==","c2NvcGVk","IHVuZG9uZQ==","U0hPUA==","IFRvcmNo","IEhhc3Rpbmdz","IEZJTEVT","X1NhdmU=","V2l0aE1hbnk=","V2lz","IGludGVuc2lmaWVk","LmFyZ3VtZW50","IEFwaVNlcnZpY2U=","IEpTSW1wb3J0","ZWtp","SW5zdXJhbmNl","c3R5","LmRzbA==","IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQo=","bHRyZQ==","U0VH","RFJBTQ==","LWJsb2NraW5n","0L3QtQ==","cGlyaW5n","IFBSRVM=","IEZhY2g=","IHNhcmM=","IFNNRQ==","IEVsZW0=","IENhbGlmb3Ju","VW5zYWZl","IENvbXBvc2Vy","KGRlcA==","IEF0dGVuZA==","ICopKCg=","IHRlYXNlZA==","IEFUSQ==","KHBt","ICIoXDw=","J10r","IHNlY3Rhcmlhbg==","IFBoYXJtYQ==","RUk=","CVRva2VuTmFtZUlkZW50aWZpZXI=","w6d1","IGF1Z21lbnRhdGlvbg==","IHNhamE=","IGNvbG9yZQ==","ZGVhZGxpbmU=","LklURU0=","IFJpeQ==","bWFhbA==","CWNsaWNr","UGVybWFuZW50","SG91c3Rvbg==","UmVzcG9uc2l2ZQ==","IEVyZ2Vibg==","ICIlIg==","LnRvT2JqZWN0","CXBpZA==","LlN1Ykl0ZW1z","IFsr","IGZ1bmd1cw==","IGJyb2NodXJl","IEFwcHJveGltYXRlbHk=","IG1paw==","dmVsb3Blcg==","IHBhZ2FtZW50bw==","5Yqo55Sf5oiQ","IGN5dA==","IFRlbXBs","ZW5pYWJsZQ==","IENvbmFu","IHNldGJhY2s=","b2JsaW5z","IE5UTg==","b3NzYWw=","VkVSQk9TRQ==","LmJpbw==","IMWe","4buf","IEdyaXA=","PCo=","VFJJRVM=","LmNob29zZQ==","UGhvZW5peA==","IHByb3ZpbmNpYQ==","TUZMT0FU","Q2Fycw==","IHJldHJvc3BlY3RpdmU=","IGFnb255","IGxsZW4=","IGJ1bXBlZA==","eWxhdGlvbg==","IHdhcnRv","IHRvZGRsZXJz","bGF2","KHBhdGllbnQ=","ICgpLT4=","Y2xj","IG9uQWN0aXZpdHlSZXN1bHQ=","IGVtdWxhdGlvbg==","IGJ1bGxk","X0FVVEhPUg==","Pk8=","L3F1","IMK2","CWhy","c3RkQ2xhc3M=","IHNwYWNlcg==","VHJhbnNsYXRlZg==","LmFkag==","Oml0ZW0=","IGV4aGF1c3Rpbmc=","cGx4","IHJldml0YWw=","xZtuaWU=","IGNhbGlmb3JuaWE=","c2V0U3RhdGU=","L3RhYg==","aW5kc2lnaHQ=","X0xldmVs","aW1pbGFy","Lm5hdmlnYXRvcg==","IHRlbXBlcmFtZW50","IGRpZsOtYw==","IGluZXhwZXJpZW5jZWQ=","IGltcHJpbnQ=","IFJlc2lzdA==","X0ZPTExPVw==","IFJldHJ5","IGVuZ2FnZW1lbnRz","Q2FuQmVDb252ZXJ0ZWQ=","IHNpbmdsZWQ=","Lmljb25z","IGNvbmRvbXM=","IEZlYXRoZXI=","bGVybmVu","KWI=","IE5wZ3NxbA==","IENvbnNvbGlk","cGVrdA==","56uv","c3RyaW5nVmFsdWU=","R2Ft","IFNpbmFp","IE9iamVjdFR5cGU=","X2lucA==","IHBhcnRp","IFdhdGVycHJvb2Y=","IGNvbGxpZGVk","IGFpcnM=","L3dvcmxk","L1NlYXJjaA==","X3N5bnRheA==","xZ9p","X2Fubm90YXRpb25z","IFRhY28=","TEFU","IE9wY29kZQ==","44CC4oCdCgo=","IGxlYXNo","IEFsaWNpYQ==","77yM6buY6K6k","IFRTQQ==","IGhvdHRlcg==","X0hhbmRsZVR5cGVEZWY=","Z2luYXM=","IGluZGlmZmVyZW50","Q3VzdG9tTGFiZWw=","kZA=","b2R5bmFtaWNz","T25VaVRocmVhZA==","IENhcmE=","LmRldmljZXM=","IEZvcmVpZ25LZXk=","PicpOw0K","LmJ1dA==","LnRpZg==","IOaWsA==","IE9rSHR0cENsaWVudA==","KFRleHR1cmU=","LlNPQ0s=","KGluc3Ry","bWlzdA==","VW5uYW1lZA==","U3I=","Km51bQ==","KE5VTQ==","KioqKioKCg==","L2hlbHA=","YmVlbGQ=","LmFkanVzdA==","X1Bhcm1z","X0FOR0xF","VFJFRQ==","IGVzdHVkaW8=","d29ya3NoZWV0","Ly8tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tCg==","QWR2aWNl","w7bDn2U=","bkVudGVy","YcSH","IGFnZWluZw==","IEt1cmRpc3Rhbg==","X1JUQw==","YmFua3M=","LlVS","IGluY2FybmF0aW9u","IGdsYW1vdXI=","IOOCuQ==","IGltcGVyaWFsaXNt","7J6F64uI64uk","IHNpZGVsaW5l","LkFycmF5QWRhcHRlcg==","IyMjIyMjCg==","IFN5cmlhbnM=","IEF0dGVuZGFuY2U=","LWVzcXVl","IGdyZW5hZGVz","X3Fvcw==","T1ND","X2Rvb3I=","LkNhcA==","REFM","IGFtYnVzaA==","CWVz","VG9Kc29u","TWFudWZhY3Q=","RW1lcmdlbmN5","IFFGaWxl","IOWV","CUxQ","5pCc57Si","IEdhcmxhbmQ=","LmNvbm5lY3Rpb25z","LlJlYWRGaWxl","IEh3eQ==","4oCUZXZlbg==","eERF","IG5vdXZlbGxlcw==","IEh1c3M=","RGVwb3NpdA==","X2ZvcmVpZ24=","YWJhag==","IFBveg==","ZGJ1cw==","IGlvZA==","w5cKCg==","IENoZWVycw==","SmVzc2ljYQ==","IHNhaXNvbg==","IFB0eQ==","Ij48IS0t","aW5vYQ==","ZXhjbHVkaW5n","IGJpdHRlcm5lc3M=","dWVsaW5n","UHJvdGVjdGlvbg==","IEJlcmdlbg==","CQkJIAo=","QkVM","IFRvYmlhcw==","IHVwZA==","67KE","IGZvbGlhZ2U=","X1BVUg==","IEFkdm9jYXRl","IG9uUmVxdWVzdA==","LnBhcnRpdGlvbg==","IERldmVsb3BlZA==","IGNyaWI=","0YHQutC4","dm91Y2hlcg==","IEludGVyc2VjdGlvbg==","IG5pZWNl","IGxr","IENhdWN1cw==","KFsNCg==","IERldGVjdG9y","L2xn","IEhlZGdl","IHNsdWdn","YW5nc3Ryb20=","IENvbnRyb2xsZXJCYXNl","CXl5","LnBw","IEtsaW5n","IExUUw==","4oaT","YXJyYQ==","Z2V0SlNPTg==","X3dlYnNpdGU=","IGlkaW90cw==","IE1lZ2hhbg==","QnV0dG9uTW9kdWxl","ICU+","IHByb2plY3RpbGVz","c3dvcmQ=","ICAgIAkJCQkJ","IGFzc2Vz","IFN1Y2hl","IGtlZA==","csOhZg==","IHNhcsOg","TEVuY29kZXI=","UkFORA==","IFNvbWVob3c=","IFNhbGE=","IG11bHRpbQ==","IG51bVJvd3M=","IFJvY2tpZXM=","IHhk","IGRpc3Byb3BvcnRpb25hdGU=","CVJUTEk=","CVVSTA==","YWdsaQ==","IFN1YkxPYmplY3Q=","IEdyYXZlcw==","X3JlZ3VsYXJpemVy","X2NoYXJhY3RlcnM=","LmFuYWx5dGljcw==","Lm1vZHM=","IGltcHJvdmlz","IEJsb2NrUG9z","X2luc3RhbGxlZA==","X0NPTlRJTlVF","L2Rvd24=","U09D","LmFwaVVybA==","LlVzZXJTZXJ2aWNl","VHJlZXM=","5oqV","X292ZXJmbG93","YXVzYWw=","Ym94ZWQ=","Jgo=","IEphY3F1","X3Vzcg==","SU5UUg==","IHNpZ25hZ2U=","IGNvY2g=","Tm9ybWFsaXplZA==","CgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgo=","IHN1c3RhaW5pbmc=","IFNjcmFw","cHJhYWs=","LWF2YXRhcg==","LndlYnNpdGU=","KGd1aQ==","PXJlc3BvbnNl","KG9wZXJhdG9y","IGVmZm9ydGxlc3M=","IEFjdGlvbkJhcg==","RkZF","56uL","CVJlZ2lzdGVy","QVJTRQ==","KW4=","IE1PU1Q=","X1NQUg==","X0NISVA=","YXNk","IHRvcExlZnQ=","IFR4dA==","0LDQttC0","LlZvbHVtZQ==","IGlubGV0","IGZyYWN0dXJlZA==","IExvbmdpdHVkZQ==","IERyYW0=","LkNvbm5lY3Rpb25TdHJpbmdz","YWJlZQ==","cGVyYXRl","am5p","YHQ=","ZmluZ2Vy","IEplc3NpZQ==","LGxs","IFJ1ZHk=","IGdlbmVyb3VzbHk=","X0NPTlZFUlQ=","IGVpdXNtb2Q=","IERhaQ==","aW1hZ2lu","IEdPYmplY3Q=","IMSRw6M=","aWRpb3Vz","cmlkZ2Vk","IHNvcHI=","0LvQsNC0","IHN0aXRjaGluZw==","IGtyYg==","CiAgICAgICAgCiAgICAgICAgCg==","IGxhdmlzaA==","IENpdg==","U3RhcnRFbGVtZW50","IExvbA==","CXV0aWw=","J11dLg==","IE1hbGF5","IC4NCg==","548=","X0ludm9rZQ==","aXZpc3Q=","RGVwZW5kaW5n","KSI7DQo=","IHRvZnU=","IE1DUA==","IHN0b2NraW5n","IGNhdGhlZHJhbA==","IHF1YWRyYXRpYw==","YWxlemE=","Lm1vdmVUb0ZpcnN0","Q29sb3JCcnVzaA==","IEVyZWN0","IFJDUw==","OmJlZm9yZQ==","PW5vZGU=","IHByb2Jsw6htZQ==","X3Jobw==","IHN2ZW5zaw==","Um95","YmFzZVBhdGg=","IGtvbmQ=","INC10YHRgtGM","Z2V0U2luZ2xldG9u","IERTTQ==","SWFu","IGh1bnRlZA==","IFRlcnJhY2U=","IGNoaWxkY2FyZQ==","IGNvZWZmcw==","IGdyYWRlZA==","IEx1Y2lh","IGpzb25PYmo=","YWJsZU9iamVjdA==","VmF1bHQ=","w61zdGljYQ==","X3BhZ28=","X1BG","YW5kcmU=","IEFuYXRvbXk=","LkpDb21ib0JveA==","b3VyZQ==","IGdlbm90eXBl","YmVuY2htYXJr","IGJhaWs=","IFF1w6liZWM=","KCkpDQoNCg==","IGt1bm5l","IFBvc3NpYmx5","IEJlaXNwaWVs","IGNvbmRvbGVuY2Vz","PXF1ZXJ5","IHbDtQ==","IG51ZXZhcw==","IEFwb2NhbHlwc2U=","dmVjdGlvbg==","CXNwcml0ZQ==","bGV2YXRvcg==","LiJdCg==","Z2V0TmV4dA==","KFJlZ2lzdGVy","IHVuc3Vi","dHJlZXZpZXc=","Tm9kZUlk","IOyK","JikK","Zmx0","IGhvdHNwb3Q=","IGdhc3Ryb2ludGVzdGluYWw=","ZmlnY2FwdGlvbg==","b3dlcmVk","IENzcw==","X3Jvcw==","X3NjYWxpbmc=","IGVkaXRhcg==","J11dKTsK","Lm5lZw==","IGZ1dHVyaXN0aWM=","IHN0YXRh","dWN0b3I=","VUxBVEU=","IHfFgg==","LWNoYXJhY3Rlcg==","ICAKCgo=","IEJlYXU=","IHBlcm1hbGluaw==","Qnl0ZUJ1ZmZlcg==","IGRpY3RhdGVz","IE1MQQ==","X0xvZ2lu","Q29uZGl0aW9uYWw=","U1lN","QXJyYW5nZQ==","IFN0b2Nrcw==","IG1lYXNsZXM=","4KSk","RW5jcnlwdGlvbg==","IEVudGlyZQ==","IG1pbk9jY3Vycw==","IGh1Z3M=","L3dpbmRvdw==","CXByb3A=","PSQoKA==","IFVDUw==","IEZpcg==","LkNsb2Nr","LWRlc2t0b3A=","IG1hbGZvcm1lZA==","IEFiZXJkZWVu","IMOF","IFJvYWRz","IEJlaGF2aW91cg==","KCkn","5bGe5oCn","LkNvbXBhcmF0b3I=","X21v","X0lPUw==","IE9yaW9sZXM=","Lkxvb2t1cA==","IGZzZWVr","X0lC","L3N0YXI=","Kzwv","X0Rlc3Ryb3k=","LXRyYQ==","KCcuJyk=","IEZvckNhbkJlQ29udmVydGVk","IEZvckNhbkJlQ29udmVydGVkVG9G","IEZvckNhbkJlQ29udmVydGVkVG9Gb3JlYWNo","IEFhZA==","IGFpcnN0cmlrZXM=","aXNPaw==","IGZlZGVyYXRpb24=","IExhYnJhZG9y","X2xhdW5jaGVy","YWxvZ3k=","Pj4oKTsKCg==","IEp1Yg==","dXRy","aXN0aW5ndWlzaGVk","YWJhbnQ=","UmVnaW9ucw==","L2hlbHBlcg==","X2xpc3Rlbg==","CVRvYXN0","IEZpbGVNYW5hZ2Vy","aXRvcmlz","IGVsZWN0cm9kZXM=","R1JBREU=","IGJlZ2dlZA==","IFBsYXRlcw==","YWZvbmU=","ISEhCg==","IGVieA==","IGRlZmF1bHRQcm9wcw==","IGNvbXBhcmVUbw==","IFNDQw==","LmV4dGVudA==","YXV0b3M=","IOyW","IFRvbGtpZW4=","OjoqOwoK","Kics","LmRvY3VtZW50cw==","c2luZw==","PUJpdENvbnZlcnRlcg==","IEtyaXNobmE=","IHBsYWlzaXI=","IGJ1Z2d5","IHJlZ3VsYXRlcw==","IGZyaWRheQ==","IGNvbXBsZXRlbmVzcw==","IGF1ZGlibGU=","IFJlY29nbml0aW9uRXhjZXB0aW9u","IHNoZWRkaW5n","W10pewo=","KGJhbGw=","IENoYXRDb2xvcg==","KENvZGU=","KCksCgo=","IHRlcnRpYXJ5","IFNJREU=","KEpTT05PYmplY3Q=","pOaWrQ==","UmVtYXJrcw==","IGxpc3RCb3g=","LmltYWdlVXJs","IGRlbGF5aW5n","IHNvY2lvZWNvbm9taWM=","Lmxw","PE15","Lm9uU3RhcnQ=","IFNjb3I=","Ynl0ZXJpYW4=","LXJvY2s=","X21ldGVy","IHJlcG1hdA==","IHByZWd1bnRh","IE1FVEE=","KGd0","IEZSSUVORA==","IHNvcnRl","IGhlcA==","b25vbWllcw==","IGF1dG9tw6F0","IEZvcm1hdHM=","c3RhdGVQcm92aWRlcg==","LWZsb29y","X01VWA==","KENvbnRlbnQ=","IElOU1RBTEw=","IFRpdGFuaXVt","cnVj","LkRhdGFzZXQ=","YXNjbw==","Lk1BVENI","IGZlc3Rpdml0aWVz","TVNO","Lm90","IEdldExhc3RFcnJvcg==","aWVucw==","IF9fX19fX19fX19fX19fX19fXwoK","X0dG","X3BsYXRl","IEZvcm1hbA==","LWxldHRlcg==","S2F0ZQ==","YXBpYQ==","ICoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKi8K","L2dlbmVyYXRlZA==","IERpbmc=","IEZyaWVkcmljaA==","ICcpJw==","VUJMSVNI","IEFiaWxpdGllcw==","IHVubG9ja2luZw==","Lnl5","IEludGVycg==","bm90aHJvdw==","aXBvcA==","IENPUlBPUg==","W2FycmF5","PFdlYkVsZW1lbnQ=","X1NJRA==","LnF1YWw=","RGlhZ25vc3RpYw==","OiIiLAo=","KG1vbWVudA==","anVyZWQ=","IHRlcnJlc3RyaWFs","ZXJ1bGU=","ICYpOwo=","IGJ1cmVhdWNyYXRpYw==","b3BwaW5z","IGphcG9u","bGVvbg==","X3JlbmFtZQ==","X0RFU1RST1k=","LkVuZHNXaXRo","IGVydXB0aW9u","KioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKi8K","UEVU","X3JlbG9hZA==","IHN1cHBsZW1lbnRhcnk=","IHppZW4=","Q0xMb2NhdGlvbg==","IGtsZWlu","X2Vm","Ont9","IGNvbWVudGFyaW9z","KHZhbGlkYXRpb24=","Lnh0ZXh0","X0lNQUdFUw==","LnNldElucHV0","IERlY29tcGlsZWQ=","X1RCTA==","Y29tcGxleFR5cGU=","X2ZlYXR1cmVk","ID8+PD8=","LnZvdGU=","IEZyaWRheXM=","LmNvbnN1bWU=","Lk1FRElB","IHN5bmVyZw==","jpjsnbTsp4A=","X0hFQURFUlM=","eEFD","X252","zq0=","IFNpbW9uZQ==","Q2VycmFy","YWRkb2Nr","LnNlcmlhbGl6ZXI=","IENsYXNzaWZpZWQ=","Lkl0ZW1zU291cmNl","IHByZWNvbmRpdGlvbg==","44Gd44GX44Gm","RElTVA==","SW1hZ2VVcmw=","L3JhbmRvbQ==","IGVyw7N0","W3Jvb3Q=","QUxMRVJZ","Y2o=","eEFE","IyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIwo=","IGl0YWxpYW5p","fCM=","IHJlZ2VuZXJhdGU=","IHN0cnI=","KHx8","IEVtZXJzb24=","IFBJRQ==","Y2xpZmZl","CWFu","PlBhc3N3b3Jk","dG9EYXRl","Q2lwaGVy","IGNvbnZveQ==","IFhDVEFzc2VydFRydWU=","L19f","LWZvY3Vz","IFJoaW5v","IGdvbw==","IGJvdG9u","Lk5vU3VjaA==","IFJlZHVjZWQ=","TUlTUw==","IFdpbmNoZXN0ZXI=","dXJsZW5jb2Rl","IG11ZGR5","aXlh","IE1icHM=","IHN0YWw=","b2RhZm9uZQ==","5Lus","IHBo4bqpbQ==","ICIvIjsK","IEFtbW8=","TmV3UHJvcA==","ID0KCg==","INCf0YA=","IHBheg==","IGxpYmVybw==","CVJlc291cmNl","bmVpZ2hib3Jz","LHJlc3BvbnNl","X2F0dGVtcHRz","IG5r","IG1pbGl0aWFz","X1BBWUxPQUQ=","LkJ5dGVTdHJpbmc=","INGB0L7QtNC10YDQtg==","YXJ0b24=","PkhlbGxv","bGlnaHRseQ==","b3dlbGw=","IGd1YXJkaW5n","IFRPSw==","IHdoZXJlYWJvdXRz","X2R3","IFJvdWxldHRl","IGd5cg==","IEZlZG9yYQ==","LkJ1dHRvbnM=","IGV4Y2xhaW1lZA==","IFNvbW1lcg==","QXV0aEd1YXJk","LXJhdGluZw==","TWV0aG9kQmVhdA==","LnBvc2l0aW9ucw==","TWVkaWFu","LuKApgoK","IGdsYWM=","IHVuZGVybWluZWQ=","JSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJSUlJQ==","X3RoaXJk","LmtlZXA=","IGhheWE=","IHRvSlNPTg==","IExhdXJpZQ==","IAkgICA=","IEFjY3Vt","IHBydW5l","dXJ2ZWQ=","IE5TRg==","IEdyYXBl","RkxJQ1Q=","6LI=","IHByZWRpcw==","X3B0cnM=","IG11bHRpY2FzdA==","KEdyb3Vw","IGhlacOf","IGZlZGVyYWxseQ==","X1BBVVNF","IG1hbGF5c2lh","IFJlY2FsbA==","IHJvZHo=","IFNlbnRlbmNl","aW50ZWw=","X2RydmRhdGE=","LXNjZW5lcw==","PHk=","IGZvb2xlZA==","IExvdWQ=","IGFudGl2aXJ1cw==","LnBsaXN0","IHZlcndlbmRlbg==","IFdvbGZl","KWl0ZW0=","IHR3aXN0aW5n","IGVzcGFu","YXRlcm5v","IEFjY29yZA==","KCldLA==","UkVNT1ZF","ZGVoeQ==","X1ByZQ==","IG1pc2Nhcg==","dmxh","IHNlbWJs","IHRldGhlcg==","IEJpag==","LycKCg==","IENvcGllcw==","LXBhdHRlcm4=","Lm9uVmlldw==","LXRha2luZw==","X3NpbXBz","44GX44GL44GX","IERBQ0E=","b3JuaW5n","IFBlc3NvYQ==","b3JueQ==","X3Bhcw==","IGVpZ2h0eQ==","VGFj","X1NUT0NL","LmxvY2F0aW9ucw==","Iil9LAo=","IHTDoQ==","LWZpZWxkcw==","b2thbmU=","L2t1YmVybmV0ZXM=","IGNoaWNh","IGFydMOtY3Vsbw==","7II=","Q1JFQVNF","QVNB","IExvbmQ=","IGV4ZW1wbG8=","QWxsb3dz","aHRtbHNwZWNpYWxjaGFycw==","KHZpcw==","IGpy","54Gr","IEVDTQ==","IGVtYmFy","X0FEQVBURVI=","IGRpbHV0ZWQ=","X29mZmljZQ==","IHNraW5jYXJl","QUdJTkc=","IMO+","IFNNQVJU","L1RhYmxl","IGJhc2Fs","Q29uY3VycmVuY3k=","IFZveA==","IFVJQ29sbGVjdGlvblZpZXdDZWxs","IHdvbA==","IFNPVVRI","IGZyb21EYXRl","IGNvcmRz","RU1T","LndlaXhpbg==","J2VsbGU=","IOWx","IGdvYWx0","dWli","IE5lcHR1bmU=","KG9yZA==","xLFuxLFu","IG1pY3JvYmVz","V2VhcG9ucw==","LURlYw==","IFJvb25leQ==","IFN3YWdnZXI=","66qF","X2xh","IGdlbmVyYWRv","IEhpcg==","Q29taWM=","IGNhcnZl","X3Jx","aWN0ZXI=","IGNhcnRlbA==","YW5jaWFz","IFBhbmFzb25pYw==","IHJvYWRzaWRl","IGZyZXNod2F0ZXI=","IGRiYw==","X3RleHRz","X3NrdQ==","IFN1bW1lcnM=","IFBpY3R1cmVCb3g=","Lmdyb3VwQ29udHJvbA==","VkFSQ0hBUg==","UmVMVQ==","IHNhYm90YWdl","DQogICAgICAgICAgICANCg==","IHNjcm9sbGJhcg==","IGJhdHRlcmVk","Y2lw","LXBpY3R1cmU=","CXN0YXRz","LmNyZWF0b3I=","X0NMRUFO","Lk1PRA==","IGJpZ2ludA==","IFRlcnJvcmlzbQ==","X1Nob3c=","IFNwaWNlcg==","X0VUSA==","IMSR4buD","IHN1bW1lcnM=","IFVyYW4=","L21lbW9yeQ==","UmV2aWV3ZWQ=","IGR1ZXM=","c2V0U2NhbGU=","IFJheXM=","IENTQw==","aW5jb21pbmc=","LWJ1eQ==","IHByb2N1cmU=","ZW50YXI=","IGJ1bGxz","IAkJCQkJCQ==","IEZpYm9uYWNjaQ==","LXNjaGVtYQ==","bWFrZXM=","RWY=","X0Rlc2NyaXB0aW9u","L2FsZXJ0","IGpzb25TdHJpbmc=","dWZmbGluZw==","IEtFUk5FTA==","IEhveQ==","IGdyYW50UmVzdWx0cw==","b25hbGQ=","IFByb3ZpbmNpYWw=","c2VuZGluZw==","cHRvbQ==","INCe0LE=","IGNvbnN0cmFpbg==","IMWhdG8=","IFJhaXNlZEJ1dHRvbg==","VVRET1dO","IEdMc2l6ZWk=","IOekug==","44OR","IEdvbg==","UExJRVI=","J119PC8=","Y2xhc3NpYw==","IGVuZ3JhdmVk","IG1hc2N1bGluaXR5","TWFyc2g=","c3NxbA==","KEdyYXZpdHk=","IGxvYnN0ZXI=","67aE","X0ludGVy","XGJhc2U=","JzpbJw==","IGRldGFsbGU=","dHdlZXRz","IGplYWxvdXN5","YWdlbmRh","LGl0","c3dpcmU=","K0I=","IHRyb3V0","X2FsdGVybg==","OiIj","IER3YXJm","IFNoYXBpcm8=","ZXJvb24=","IG5vaw==","X2xvbmdpdHVkZQ==","IFdlcm5lcg==","IHZpb2xldA==","dXJzaXZlbHk=","LWF3YWl0","IH0KCgoKCgo=","IExlbm5vbg==","IEFudGFyY3RpYw==","IGLDpWRl","X3Nsb3Bl","bWFuZG8=","b3VuY2Vy","LWlvbg==","IERlc3RydWN0aW9u","aXNzZW5zY2hhZnQ=","UGl6emE=","IEdlb2xvZ2ljYWw=","Qk9VTkQ=","IGNpbmU=","RGVtb24=","LnBlb3BsZQ==","X1RPR0dMRQ==","CW5vZGVz","YnVzY2Fy","LnByb2Nlc3Nvcg==","Tmg=","L3Nkaw==","IG15Y2tldA==","YXVjdGlvbg==","TWVn","R01FTQ==","IGlyb25pY2FsbHk=","5riF","IGNvbnZlcmdl","IFVJVGFibGVWaWV3RGF0YVNvdXJjZQ==","QXJkdWlubw==","PmU=","Sm95","IFNob3VsZGVy","IER1Yw==","UFJJTUFSWQ==","Lioo","LXByZXM=","IGRpYWxvZ1JlZg==","aW1hZ2VOYW1l","X2ludm9rZQ==","XFRlbXBsYXRl","T0k=","IHZyaWVuZA==","IEd1ZXJy","IHByZXJlcXVpc2l0ZQ==","IFBHQQ==","IFJlc3A=","KSIsIg==","bGxlbg==","IHNuYXBwaW5n","X0ZpcnN0","S0lU","LnNldEZvY3Vz","IEN5cHJlc3M=","Y3JhZnRlZA==","LzsK","d2VpZ2h0ZWQ=","dm95","X3RG","X2luc24=","IEluc3RhbGxpbmc=","IEdhbGx1cA==","QURPUg==","IEFMT0c=","Q29udGV4dEhvbGRlcg==","IFRvdXQ=","IEZvbGV5","IGNvbnRlbXBsYXRl","IENvaW5iYXNl","WMOj","d2FuZA==","LkNyZWF0ZUNvbW1hbmQ=","U29jaw==","IHVud3JhcA==","Y2xhc3NwYXRo","PFJlc291cmNl","X0VTVA==","PXJhbmRvbQ==","IFNoYWRl","IGRpY2k=","2K/Zig==","IGtpdHR5","0LDRgtC10LM=","4buNbg==","LkNvbXBsZXRlZA==","cGxvcmVy","IGJhYmVs","Lk9uSXRlbUNsaWNrTGlzdGVuZXI=","IE1jTWFob24=","IHJlc3RUZW1wbGF0ZQ==","IHRlc3M=","U2V0VXA=","L29jdGV0","IGNhbGFt","IGhpbmdlcw==","IGFydGVyaWFs","IFRydW1hbg==","IENoZXJ5bA==","X0REUg==","IHRtcGw=","IExlcg==","W2hhc2g=","S0VS","IHByb3BvcmNpb24=","IGNvYXN0bGluZQ==","YWNpb3M=","Ij4tLX19Cg==","IGRpc2FkdmFudGFnZWQ=","VG91Y2hMaXN0ZW5lcg==","IFNlZ2E=","Y29lcw==","SWxsZWdhbEFjY2Vzc0V4Y2VwdGlvbg==","PEJveA==","IEluY3JlZGlibGU=","VXBkYXRlcg==","RkxU","aW5hbWU=","IEludGVyZmFjZXM=","Kylc","ZW5kaW1lbnRv","IHBhbmNha2Vz","IGluY29uc2lzdA==","LnBldA==","IGtleW9m","SW5uZXJUZXh0","Picp","RGVhbg==","IFDDqQ==","KENvbnRyb2w=","IHNwYXI=","bGluaWs=","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIA==","IERhbmU=","X1BBR0VT","IHNldEJhY2tncm91bmRDb2xvcg==","c3ViY2F0ZWdvcnk=","IFN0cmluZ1NwbGl0T3B0aW9ucw==","QWxsZW4=","ISgie30iLA==","hOyerA==","IGJhYw==","X1BST0RVQ1RT","dXBwZXJjYXNl","PSQoIiM=","xJlr","IFVJVGFwR2VzdHVyZVJlY29nbml6ZXI=","TUVUQQ==","IHNjYXJjZWx5","6aA=","X21hbmFnZWQ=","IGNvbnN1bW8=","TW91c2VNb3Zl","IFNwZWNz","IFNlYXJjaGluZw==","SGVhZGVyVmlldw==","Oicp","IG1pY3Jvc29mdA==","IEtvc292bw==","ZW1hbm4=","LmZmdA==","IEh1YmJhcmQ=","IGRleA==","X1RFUk1JTg==","X0ZD","IHBoaWxpcHBpbmVz","XENvbGxlY3Rpb25z","IHRlaA==","IHF1YWxpZmllcw==","IGlucHV0VmFsdWU=","IEdPVA==","KHNh","SUxMRUQ=","IHNsYW5n","IGtlaW5lbg==","IGZlbG9u","IEVyaWNr","YWJpbGlkYWRl","LnNlcg==","IHJ1bmVz","IFVucmVhbA==","KG9y","IOusuOyekA==","IGJpZGk=","IGlyYw==","CWl0ZXI=","Im5pbA==","L3VidW50dQ==","IG11cmRlcmluZw==","ID8u","dW5rZXI=","UmVjdFRyYW5zZm9ybQ==","JykpCgoK","IGFyaXR5","IEZyZWVs","Lm1vdW50","Q09NTUVOVA==","ICIqIiw=","ZW5jcnlwdGlvbg==","W21vZGVs","In19Pgo=","LlRvdWNo","L3RodW1i","IHByZXo=","L2NvbXBhbnk=","IHLDs8W8","IHNvZnRlbg==","IHBvc3NpYmlsZQ==","IEVDQg==","X0Jvb2w=","IC0tLS0tCg==","IGludGVydHc=","X3N0YQ==","X0JBTA==","Lm5hdmlnYXRpb25CYXI=","IFJHQkE=","Z3JpbHk=","c3RvZmY=","YWNreQ==","UUI=","QEFwaQ==","cGVjaWE=","IFJwYw==","IGFtcHM=","IEZlbmNl","IGdlbm9taWM=","KGFsaWFz","Vmllbg==","U3BpbkJveA==","LmdldFNlY29uZHM=","IGdsb2JhbGl6YXRpb24=","IGN1cw==","a3ViZWN0bA==","IHRocm90dA==","IGluZXJ0","IFNjcmF0Y2g=","w5c8Lw==","Lmlzc3Vl","ZXNzYXk=","LUlzbA==","IG3DoXI=","CWJpdA==","IGFib2xpc2hlZA==","LmluZmluaXR5","bGluZW5v","LmFsZ29yaXRobQ==","b3JzY2g=","RW1haWxBZGRyZXNz","IERBRw==","YnJpbmdpbmc=","Lm15YXBwbGljYXRpb24=","LlN1cHBvcnQ=","X2xlYWRlcg==","IERldmlu","IFtdDQoNCg==","IHJtcw==","IGJ1Y2tsZQ==","aWdsaWE=","L3Byb2JsZW0=","IGhhdXRl","IGluc3RpdHV0ZWQ=","SVU=","bGFtYQ==","RVhQRUNURUQ=","IEJlY2toYW0=","IEh5ZHJhdWxpYw==","U3RhdGljcw==","X25vcm1hbGl6ZWQ=","LmAsCg==","IG1pbWV0eXBl","IHNoYXZpbmc=","T3ZlcnJpZGVz","IE1lcmNlcg==","dHJmcw==","LXN0YXRz","b3NwYWNl","IGFudGlveGlkYW50cw==","aW5maW5pdHk=","Um9ja2V0","IEV1bGVy","LXZhbHU=","IGzDuA==","LUlO","SG1t","LXJldHVybg==","IFBBTkVM","IHRlcm1pbmF0b3I=","IHRla24=","IHByZWRpY2F0ZXM=","U3RhbXBlZA==","IHN2ZQ==","YW50ZXI=","IGN5Y2xpc3Q=","IEVwc3RlaW4=","IGhpdHRlcnM=","ZG9ncw==","LkFkZExpc3RlbmVy","X2V4Y2VwdGlvbnM=","IEZPT1Q=","aWNhcmU=","W3RhZw==","LWZldGNo","VVBMT0FE","LmRyb3Bkb3du","IGNlbnRyb2lkcw==","IGFyYmU=","IGhpam8=","IERhdGFiYXNlUmVmZXJlbmNl","UG9saXRpY2Fs","IEJBU0lD","LWZvcmNl","fCQ=","IFJFVklFVw==","LmRlY29yYXRl","IEFzcGVjdA==","IGNvbW1lbW9y","IGNsZWFuc2U=","IENsYXVkaWE=","Z2VuZXJhdGlvbg==","SExU","dHlwZW9ybQ==","cHJlZmVy","b3ZlcmxhcA==","YmlvbG9neQ==","U3RyZWFtZXI=","Y29tbWlzc2lvbg==","IHRodW1ibmFpbHM=","LkN1cnJlbnRDdWx0dXJl","IHVybHBhcnNl","IGdpb3Jubw==","IGRldnM=","X2FzcGVjdA==","IGNoZXJpc2hlZA==","IE5hY2hyaWNodA==","IHJpZ2dlZA==","L2xvZ2dpbmc=","aHVudA==","VHlwZUVycm9y","PFNlbGVjdA==","KHByb2c=","IEdyaWRMYXlvdXQ=","6JA=","IEVYUEVS","CUtFWQ==","LmRt","CWNhcmQ=","IFRhdQ==","IG5vdGFtbWVudA==","IGhlcm9pbmU=","IGJhdGh0dWI=","YXRyb24=","IOaU","77yS77yQ","Y29ub21pY3M=","IHJldmVyc2libGU=","6YeR6aKd","IGpzeA==","IFNwZWFrZXJz","RGVzZXJpYWxpemVy","LnRvRmxvYXQ=","INC/0LXRgNC10LzQtdC9","IFByb3ZpZGluZw==","6LSm","W2VsZW1lbnQ=","Kjo=","PlJldHVybnM=","IHRpdHVsYXI=","IGhlYXJ0YnJlYWtpbmc=","X05C","LkFyZ3VtZW50cw==","IG9wdGlj","YXR0YWNrcw==","IFZ1bG5lcg==","CWtleXM=","IGNvbnRyb2xl","LlJHQg==","IHN1Ymdyb3Vw","bWFuZGF0b3J5","IENBQg==","CWVuZ2luZQ==","44Gw","TUVESUE=","L3RyYW5z","IGRhbms=","IHNlcnZpY2Vk","IGluY2FyY2VyYXRlZA==","IEZyZWFr","IHVwdG8=","ZHJhd2Vy","WyIr","IGVudHdpY2s=","Z0w=","TW9kZWxFcnJvcg==","IHJlYWRkaXI=","aXN0cmlidXRl","IGdsYXJl","aXF1ZW1lbnQ=","Y2hpbmE=","IEthcGxhbg==","IFN0YWJpbGl0eQ==","cG9zaXRlcw==","IEpBWEJFbGVtZW50","IHRvdGFsbWVudGU=","KGNvbW0=","X3Byb2Nlc3Nlcw==","VGhvdXNhbmRz","IElscw==","ZXJ0YWludHk=","IFNoYWRlcw==","YWN0YWw=","bG9nZ2VkSW4=","IE5pY2hvbHM=","IE1pZGxhbmRz","ZGV2aWw=","IHN0clNRTA==","In0p","IEpvcmQ=","KGZm","IEp1bmk=","5bCx","YXJ0aXNhbmxpYg==","IG1vb25z","IHVucmVzb2x2ZWQ=","IHdpdGNoZXM=","IEfDvA==","IEdvYmxpbg==","YW5zc29u","fCU=","IGJ6","IGR1cGxleA==","ICIpKQ==","Lmxpa2Vz","KHZlcnRpY2Fs","IGNvd2JveQ==","U2VsZWNjaW9uZQ==","ICcqJyw=","IFNhcA==","IFNhYmJhdGg=","U09SVA==","4Ka/4KY=","X2NlbnRlcnM=","XFBvc3Q=","KFRyZWU=","IHBhcnRlcw==","X3lhdw==","YXJlbW9z","c2V2ZW4=","IGhpYXR1cw==","X2ludGVuc2l0eQ==","LW1hbnk=","IERvbGxhcnM=","LXVuc3R5bGVk","IGdyaXBwaW5n","IG1hcnZlbG91cw==","IHJlY2VwdGlvbnM=","IG92ZXJjbG9jaw==","YmVybWFu","IGhlYWRxdWFydGVyZWQ=","eEJC","Y2xhc3NDYWxsQ2hlY2s=","IG9ic2VydmVz","U3VibWl0dGluZw==","0LjRh9C10YE=","IEh0dHBTdGF0dXNDb2RlUmVzdWx0","IGhpZXJvbnRh","cm9wcGluZw==","Rk9SQ0U=","CXV0aWxz","IHZlbnRz","YWRkZXJz","IE1JWA==","IEVsZWdhbnQ=","IGFjb3M=","KG1hY2hpbmU=","IG1lZGRsaW5n","IHZpbGU=","LWNvbXBhdGlibGU=","IGNyZWFtcw==","IFRhYmxlUm93","IFJlaGFiaWxpdGF0aW9u","QWJi","KHVzZXJJbmZv","X2V4cGlyZWQ=","Lk9iamVjdE1ldGE=","IGdvZHQ=","dXN1YWw=","LmJpbmRpbmdOYXZpZ2F0b3JNb3Zl","IFJlZ2lzdHJhcg==","bWlncmF0aW9u","YXB0dXJlZA==","LHBhcmFtcw==","IGNlbnRlclk=","b3dhbg==","bG9jYWxlcw==","SW5wdXRNb2R1bGU=","IHZpZ2lsYW50","IG5jb2xz","IGluZ3I=","IGPDtHTDqQ==","dmVydGltZQ==","IHdpZGVzdA==","IEhERg==","IEFsZ2VyaWE=","IGNoYXR0","JHNlbGVjdA==","Il0pDQo=","IG11bHRlcg==","IENoZW5leQ==","ZnVzY2F0ZWQ=","PSciLiRf","IERlbmlzZQ==","IHJpZmY=","QWJzZW50","IHRhbWHDsW8=","IGplc3pjemU=","LlByb2dyYW0=","CWJy","ZXJhaXM=","IHNhbmRhbHM=","ICws","IGRpc3NvbHV0aW9u","IHVudGVyc2NoaWVk","UHJvdg==","LnRyYW5zYWN0aW9ucw==","IFRyb3VibGU=","Lm1pZGRsZQ==","LmdldERlY2xhcmVk","IHN3ZWF0aW5n","IEhhbmNvY2s=","6LS5","IHBvZw==","IEtpYQ==","IG1vZG5l","IEFjY2Vzc2liaWxpdHk=","IGxlYWthZ2U=","IGRlY2VwdGl2ZQ==","IFdPTQ==","INC+0YE=","IGNzYWs=","YWNvY2s=","LlN5bnRheA==","ICxb","LicpLAo=","IGZvcmVjbG9zdXJl","IHVuZmF2b3I=","IGV4Y2w=","Q1VEQQ==","ZGVuc2U=","PFVuaXQ=","IHZhcGluZw==","IG1hamVzdGlj","aWF0b3Jz","IGF1dGlzdGlj","LmdhdGV3YXk=","VXJsUGFyc2Vy","SGVsbA==","IENvc3Rjbw==","IEhJUA==","T2JzZXJ2ZXJz","IFBlb3BsZXM=","IFNwb3RsaWdodA==","IFRhdmVybg==","IFRPVVI=","cGxpbmdz","LldSQVA=","IGFsZA==","TkFM","KCIqKio=","c2V0UHJvcGVydHk=","X1N0b3A=","YW5ub3VuY2VtZW50","IEltbWVkaWF0ZQ==","IEhTVg==","X1RFU1RT","IGNyYXZl","X1VD","LmRlY3J5cHQ=","KFJvbGVz","IHN1Ymo=","X0ludGVnZXI=","Lm5vdE51bGw=","IEdzdA==","IEJ5cm5l","IEFxdWFyaXVt","IENhbmM=","X0NIQU4=","IERUTw==","Lmhs","IG1lbmdndW5ha2Fu","RnJhbmM=","RGlhbG9nQ29udGVudA==","Li4uJwo=","IEt1bnN0","IEFsbG9jYXRvcg==","VVNBR0U=","S25vd2xlZGdl","CWNwdQ==","IG1vcmFscw==","cGF0aWVudHM=","IGlsaw==","IGNyaXRlcg==","IFZldA==","IE1lc3NpYWg=","X186","YXZlbm91cw==","X3ZpZXdlcg==","KERpY3Rpb25hcnk=","IEJvZGllcw==","aGFzT25l","0LjQvNC10YA=","IHppcGNvZGU=","U3Rlcg==","IGLDoXM=","X0Rpc3BsYXk=","IGZpcm1h","IFJhaWRlcg==","IEtI","V2l0aERhdGE=","KEFSRw==","IHByb3Ry","IG1zZWM=","IGxhdmVuZGVy","KFV0aWw=","INC/0YDQvtCz0YDQsNC8","X211eA==","X2xhdGl0dWRl","UG9ydHJhaXQ=","IHNpdGNvbQ==","IGFkaWNpb24=","KGNvbnN0YW50cw==","IEFueGlldHk=","IFJvc2Vz","IHN0aW11bGF0ZWQ=","IGNocm9ubw==","IGZvc3NpbHM=","IEFpcmJ1cw==","bGVmdHJpZ2h0","IE3DqXRvZG8=","Inc=","IGtsZWluZW4=","IGNsaXF1ZQ==","b21pbmF0aW9u","IG1vdGVs","L3ZlY3Rvcg==","ZGVjbGFyYXRpb24=","IG5ld1k=","W0g=","LnNjYWxhcg==","b21ibw==","aHVk","O3NldA==","ZnR5cGU=","KCcnKS4=","b3JkZXM=","eW5vcw==","J10sCgo=","X0ZMVVNI","aWRlbnRpZnk=","L2RldmljZXM=","IGRpY3RhdGVk","IGRlamFy","IEVtaW4=","IFBlbmRhbnQ=","IG9uVXBkYXRl","XSkpKQ==","IEJhcmtlcg==","T3Jt","6K+36YCJ5oup","X2d1aWRl","w6FiYWRv","b3BoZQ==","ICIuCg==","IEJyZXdlcnM=","IGJyaWRhbA==","IENFUw==","X0NhdGVnb3J5","IEJUTg==","IERhcnRo","I2Zvcg==","ZXRobmlj","YXJjaGl0ZWN0dXJl","IENvdXBl","aWRvcmVz","IGZhc2Npc20=","IGNvbnRyYWRpY3Rpb25z","ZWZmZWN0cw==","SW5pdGlhbFN0YXRl","IOekuuS+iw==","bWF0cGxvdGxpYg==","LmRlc2t0b3A=","INCt","IFFQaXhtYXA=","CWJlZ2lu","IHduZA==","IGNvbnRpZW5l","KGhlbHBlcg==","Lk5vdGlmeQ==","KEJvb2s=","IEd1YXJhbnRlZWQ=","cGxs","aW9sYQ==","IGZ1bmdp","aXZlbnQ=","IE9B","5rKh5pyJ","IHdpxJljZWo=","CQoJCgkKCQo=","77yaIis=","IFRhbGtz","LnN0YXJ0ZWQ=","b2NpdGllcw==","IGVzcG9ydHM=","PElucHV0","IEVYQ0VQVElPTg==","IGFjdHU=","LmltcA==","ICIvIgo=","T3RoZXJ3aXNl","IFBlbnNpb24=","IFdhdmVz","xrDGoQ==","aWFyZHM=","ICo8Lw==","dXJnZW9u","IFNDSQ==","IExhdXJlbA==","ZXRhZw==","TmV0ZmxpeA==","IFJlc3BvbnNlcw==","IG5lb2xpYmVyYWw=","aXNDb250YWluZWQ=","PW15","IHJlcHJpbnQ=","b25lc3RseQ==","IGRlcGFydGluZw==","UFdN","ZXdoYXQ=","PSI8PA==","Lnlhbmc=","IFRyYWRpdGlvbg==","KyI6","ZGVwZW5kaW5n","X1VuaXQ=","IENvZGFibGU=","IHdoaXNreQ==","IGNvcnJlbGF0ZQ==","IGRpcmV0","TGFzdGx5","CU91dHB1dA==","KGlub2Rl","XExvZw==","IERlcGVuZGVuY2llcw==","V2lsbERpc2FwcGVhcg==","IFBhbmVscw==","IOKUnOKUgOKUgA==","IG9zdGVuc2libHk=","fC0t","QW5udWFs","IGF1dG9sb2Fk","VmFsdWVIYW5kbGluZw==","LmNvaW4=","ZWR1Y3Q=","Wlk=","IENhbnVja3M=","IHNtZWFy","IHJlYWxpZGFk","IHt7Cg==","aXZvbA==","ZXRTb2NrZXRBZGRyZXNz","IEtlbXA=","L0ZyYW1ld29yaw==","IHF1aWNrZXN0","XyIuJA==","IHdpdGhob2xkaW5n","IGludHJpZ3Vl","IEFERFI=","RGllc2U=","V2Vla2x5","X19fX18=","IEludmFsaWRBcmd1bWVudEV4Y2VwdGlvbg==","b2xhdGVk","UnVuTG9vcA==","IHBhc3PDqQ==","LmZpcmViYXNlaW8=","LmV1bGVyQW5nbGVz","aXN0ZW5jZQ==","IGZlYXJpbmc=","IEVsZW1lbnRUeXBl","L1Rlc3Q=","IOafpeivog==","IGZvbmRv","IFBhcnI=","IHplc3Q=","IFRyYW5zZm9ybWVycw==","TGluZVN0eWxl","IGV0aGVybmV0","YWZmbGVz","IG5hbWVkdHVwbGU=","IFNjYWxhcnM=","TlNVUkxTZXNzaW9u","LWV4dGVuc2lvbg==","KE1lc3NhZ2Vz","IGF0ZW5jacOzbg==","IEplcnNleXM=","YmVkUGFuZQ==","IFN0dW5kZW4=","IHZvaXR1cmU=","IOm7mOiupA==","Lm9wZW5nbA==","ICJ9","IFJldmVuZ2U=","IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0K","SW5zdGFudGlhdGU=","IGVucg==","VmFsaWRhdGlvbkVycm9y","X0FMUkVBRFk=","TG90cw==","b2Nl","IHNjcmlt","IGVtYm9keQ==","0YDQsNGC","IGNvbmNlZGU=","YXNzZWw=","IEJSRQ==","UExFQVNF","CWRpZmY=","57uT5p2f","LmZw","YmFt","TWVhbA==","IE1hZG9ubmE=","IHB1bmlzaGFibGU=","aWZmaWVz","X3VuaXg=","7JmA","IEdhZ2E=","InN0cnVjdA==","VG9TZW5k","IE9DUg==","IHByYWlzaW5n","Z2V0U3RvcmU=","IGV1dGg=","IGFycmVnbG8=","IGZlcm0=","ZmRm","Q29vbGRvd24=","IFJlY3ljbGluZw==","QW5h","aW5kcg==","X0hQ","IEdvdmVybmFuY2U=","IGJhcnJhZ2U=","L2Nh","ICwo","RsO8cg==","IElTUHM=","IG1lbmFjZQ==","VmlyZ2luaWE=","IGZhbmM=","IG5vbWJyZXM=","Lmluc3RydWN0aW9ucw==","IGVzY2FsYXRlZA==","YWdpbmE=","IExldmluZQ==","CWZpbmQ=","X2Vy","IGRlanRpbmdzYWo=","c3Zw","YWdvcw==","KHNvbA==","IExpZA==","UFJJVkFURQ==","IElNUExFTUVOVA==","ZWZlbGxlcg==","KFRhcmdldA==","4LmJ4Lit4Lih","aG91c2luZw==","LnNldEN1cnNvcg==","IG5laG1lbg==","LnJlY2VpdmVy","IFR1dG9y","IG1hdHRlcmVk","bWRhdA==","cmVndWxhdGVk","IGdldEFkZHJlc3M=","IE1pbnV0ZW4=","IElV","0LvQsNCy","IHR1cm5vdmVycw==","IHN1aXRhYmlsaXR5","CWVzYw==","Y2FsY3Vs","X1N0cmVhbQ==","X2ZpbGVuYW1lcw==","LXZhcnM=","Li4uLi4KCg==","RGlh","IHN3aW1z","T3B0aW1pemVy","PGJvb3N0","IFBlcm1pdA==","J10pKXs=","XE9wdGlvbnNSZXNvbHZlcg==","5qGI","IGhlY3RhcmVz","KHVz","IERldmVsb3Bpbmc=","X3hz","IG5vdmVsaXN0","IENvbnZlbmllbmNl","d2Fsa2luZw==","IGNoYXJtcw==","IExlYXNl","CUhBTA==","KFsm","IHJlc3RhcnRlZA==","TWFnZQ==","SXB2","INGN0Lo=","UkxG","IGFzc2VtYmxpbmc=","IEVjYw==","dmluZm9z","cGVkaWRv","IHN5bm9wc2lz","IFN0YW50b24=","c3RhcnR1cA==","LmdldHZhbHVl","IEtpdHQ=","cHJvcGVy","IHByZXRyYWluZWQ=","IFBFTg==","LlRlcm0=","IHBlcXU=","ZXBoaXI=","IEFsbGllcw==","IG1vZGVsQW5kVmlldw==","IGJ1dHRlcmZsaWVz","IEtpcnN0","IENoZWNrZXI=","IGN1bm5pbmc=","LnNldFk=","X01hc3Rlcg==","SW5jcmVhc2luZw==","IGh1cmRsZQ==","IGZpc3Rz","IFNsb3Zha2lh","IG5vbWJyZXV4","IDo6Cg==","dGFza0lk","IGZvbGx5","PFRyZWVOb2Rl","IFZvbGRlbW9ydA==","IGJsaXN0ZXI=","xYJl","LkVudGl0eU1hbmFnZXI=","LkRPV04=","IEdyZWdn","LWNvb3JkaW5hdGU=","KHZj","w6FiYg==","LlRvZ2dsZQ==","IExpc2Jvbg==","56I=","INC/0L7Rgg==","cGFyZW50Tm9kZQ==","LnNldFNjYWxl","X01JU1NJTkc=","IG91dHJh","IGt1cA==","YF0=","X3ZpYQ==","ZWRpY3M=","IEJvcmRlcnM=","IGlwYWQ=","IGVkdA==","IENhcnRlc2lhbg==","L21hYw==","IGJhcmxleQ==","IFNjYXJsZXQ=","ICAgIAogICAgCiAgICAKICAgIAo=","cXVlcnlQYXJhbXM=","IHJoeXRobXM=","IGdlYXJpbmc=","Wlg=","aHlkcmF0aW9u","U1RT","IHBsZW50aWZ1bA==","Y29ycA==","fUA=","aW50ZWdy","L2F0","LmRlYg==","IHVuZGVuaWFibGU=","IG9wZW5zc2w=","LmRlYWQ=","IFBpbGxvdw==","IEJlYW5z","LmFudA==","X3Fz","LWluZm9ybWF0aW9u","IOuzgOyImA==","JSIpLAo=","INC00YDRg9Cz","IFNwb25nZQ==","IHNpZnQ=","dGVzdGltb25pYWw=","IHVubmF0dXJhbA==","VUlTY3JvbGxWaWV3","dmVyZ2VuY2U=","KHRleHRCb3g=","LXBhZ2luYXRpb24=","IERpc3F1cw==","X3Byb2R1aw==","YWduYXI=","S2V5VXA=","CQkJICAgICAgICA=","0LXQu9C1","PHNvdXJjZQ==","Lmls","LmF0b20=","X0NvbXBvbmVudA==","IHlu","WydfXw==","IHdlYWtlc3Q=","X2RlY3J5cHQ=","L21zZw==","Y2Jj","IHBvbGl0ZWx5","b21hdA==","IGVubGlnaHRlbm1lbnQ=","IGNyZWE=","IGJydWs=","X2FscmVhZHk=","IHNvY2tmZA==","dW5wYWNr","b3JnZXM=","IFVORVNDTw==","aW5hbGl0eQ==","IHNlbnRpbmVs","IGFmZmx1ZW50","IHRocm93RXJyb3I=","aWV0cw==","QU5KSQ==","IFN1ZmZvbGs=","YmVybw==","a2V0w7h5","RW5kcG9pbnRz","ZXhlY3V0b3I=","R2E=","LkxB","X3BvcnRmb2xpbw==","dW5zY2g=","ZWxhZ2U=","IGdvYmllcm5v","IEJpb2w=","TW9kaWZpY2F0aW9u","IERlY2ltYWxGb3JtYXQ=","IFZvY8Oq","IG1ldGhvZG9sb2dpZXM=","W10u","IEdW","IHJlcGxpY2Fz","4oCUd2l0aA==","KTspOwo=","cG9zaXg=","U3VjY2Vzc0xpc3RlbmVy","cGhl","X25vcm1hbGl6ZQ==","IExhcmdlcg==","IHJlcGVyY3Vzc2lvbnM=","X1ZlcnQ=","IGhvc3RlbA==","IGluY29tcGV0ZW50","aGV2","X0RFTFRB","IHB1ZWRv","aW5zdGFsbGF0aW9u","X2ZyYWc=","KHJy","IE1BVg==","IExvY2FsaXphdGlvbg==","KCIiKS4=","IC0tLS0tLS0tLQ==","DQoK","IFB5VHVwbGU=","IEp1bGlv","CUdMdWludA==","bWFya3Vw","X0ZBTUlMWQ==","UFJPR1JBTQ==","IEZpcm13YXJl","KnNpemU=","V2lmaQ==","IHZpc2l0YQ==","IEVybA==","RmluZE9iamVjdA==","LlVOUkVMQVRFRA==","cGh0aGFsbQ==","IHBlcnNvbmFsaXpl","IGNyw6lhdGlvbg==","ICAgIAkg","LnByZWNpc2lvbg==","IHNldHRlcnM=","IG5ld1NpemU=","IENhdGFsYW4=","CW9wdGlvbg==","IHBpZWw=","IGNhZ2Vz","IFN0ZW0=","ZHJhd2luZw==","ZXhwbGFpbmVk","IOaOpw==","IGRyZWFkZnVs","ZXJydXB0ZWQ=","LmdldFZhbHVlQXQ=","IGVsYXBzZWRUaW1l","IGluZGVmaW5pdGU=","IFRIQU5L","X3N0YXJ0dXA=","U1VSRQ==","IGtpZG5leXM=","IEN1aXNpbmU=","fGFycmF5","U2VuZE1lc3NhZ2U=","ZmF2","IEFlcm9zcGFjZQ==","X21lYW5z","IG5lYg==","IE9UUA==","IGNodXJu","L2Zy","IFJlaWdu","X2NsYXNzaWZpY2F0aW9u","IE1hY0RvbmFsZA==","Ii4KCgoK","IGNoaWxseQ==","IOivt+axgg==","aWhhdA==","U1RB","J2F1dHJlcw==","IGxhc2M=","Lm1peA==","IGJsb3Q=","IElERA==","ZGF0YXRhYmxl","c3BpZWw=","IMOpeGl0bw==","YXJ0aWM=","LkF4aXM=","LmFkdmFuY2U=","IG1vdXNlWA==","J8Og","IHJlY2lldmVk","IHBvc2k=","IGZvdXJu","IE1hZmlh","IHBjYQ==","YmVsb25ncw==","YWJseXR5cGVk","QVVUSE9SSVpFRA==","LnNjYWxhYmx5dHlwZWQ=","7JyE","LWRvdA==","IGVtcGhhc2l6aW5n","TWVtYmVyc2hpcA==","KnBvdw==","LXNwaW4=","cnV0YQ==","aGV2aWs=","X0FTWU5D","X2NvbXBpbGVy","LkZsYWc=","IGVsYm93cw==","LkNSRUFURQ==","TWV0cm8=","LmxvZ3M=","em1hbg==","cG9uZQ==","xJnFvA==","IGludGVycw==","IHdlYnM=","X0hJRERFTg==","CW5vdw==","Q29tbXVuaWM=","JHRwbA==","c2NvcGVz","IFppa2E=","IHN0cmluZ3N0cmVhbQ==","IFVuY2F0ZWdvcml6ZWQ=","Rlk=","L3N3YWdnZXI=","UGVubg==","aW1lSW50ZXJ2YWw=","IGNvbnRlbmRz","eGllcw==","IFNhbGVzZm9yY2U=","IHV0ZW5z","IHVuZGlz","Q3J5c3RhbA==","Lm5kaW0=","IGZvcm11bA==","IEZhdg==","5bm/","cmlzaw==","bmFk","L3Rvcw==","IFBFUkZPUk1BTkNF","IHdyaXRlbG4=","IGNvbGxv","YW50aWNhbGx5","VURFTlQ=","Umdi","IG9mZXJl","IG1lcmdlcw==","ZmlkZg==","IGt6","VmljdG9yaWE=","IC9eXA==","IGt1YmU=","IEFwb3N0bGU=","IGRlZmVuZHM=","PD0o","IE1FTU9SWQ==","XElk","IEFjdGl2ZUZvcm0=","IE9uZVBsdXM=","SHR0cFNlcnZsZXRSZXF1ZXN0","IFRlbXBEYXRh","7KCB","LkFTQ0lJ","2YTYpw==","S0k=","IGZyYXQ=","X0NJUEhFUg==","LlN1cmZhY2U=","IHBpdGZhbGxz","LW1lZGlhdGVk","eXBp","LWFsaXN0","eEJD","dGVhY2hlcnM=","IEN5Yw==","IHBzeWNoZWRlbGlj","IER1bWJsZWRvcmU=","IikuCgo=","IFRoYXRjaGVy","IFByaW5jaXBsZQ==","VG9nZXRoZXI=","IGZsb3Jh","d2Vla3M=","X2NyaXRlcmlh","Ym9uZXM=","LmludGVybmV0","IGJsb2NrRGlt","LlNpbmdsZU9yRGVmYXVsdA==","RGljZQ==","IEV2ZWw=","IFRMYWJlbA==","IElnb3I=","IENvcHA=","IGluYXVndXI=","L3ByaXZhdGU=","IGFiZXJy","bmRz","O2lm","LXJhbmdpbmc=","YWNodHM=","X21hcnNoYWxs","IF9fX19fX19fX19fX19fX19fX19fX19fX19fX19fX19fX18=","LmVuZFRpbWU=","IE1vZGVsUmVuZGVyZXI=","KGZvb2Q=","KCJ+","IHN1cHBs","KCJcKA==","U3E=","VHJhbnNsYXRlZA==","IENvbnRpbnVpbmc=","IHBvc3Nvbm8=","RklYTUU=","IEFuZ2Vib3Q=","aWV2ZXI=","IEt5b3Rv","Y2ls","TmV3VXJsUGFyc2Vy","LkRp","IGh1bWFuZQ==","RGVtYW5k","IE1hcnRpYW4=","d29vZHM=","IEhlYWw=","IFl1ZQ==","IGNvdXJ0aG91c2U=","IHZvbnQ=","IGJvbnM=","aW50ZWdyYWw=","ICQoJyMn","ZXRlcm1pbmF0aW9u","Lm1vZGlmaWVk","IHByaW5jaXBhbHM=","IGFsYXJtZWQ=","LmNyZWF0ZU9iamVjdA==","Ly8tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQo=","L2NvdW50","IGVudHJlbmNoZWQ=","XGE=","IGludHJ1c2lvbg==","IE54","CQkKCQkKCQkK","Y2hlbWF0aWM=","IHNsaWRlcnM=","IHNlbGVjdGFibGU=","X25s","aWVzZQ==","X2VzdGltYXRvcnM=","IFN2Zw==","IGRlbGV0ZVVzZXI=","KG1hcHBpbmc=","IOyymOumrA==","IGFudGFnb25pc3Q=","IGtpbmFzZQ==","IHdlbGRlZA==","IExlbmE=","ZWRpdGg=","aWFsaQ==","KHBpYw==","IGJyZWFjaGVk","UElD","IGNvYXN0ZXI=","RkRB","IGtyZQ==","cGVyZmls","IEdlbXM=","X2ZlbmNl","VVJMUmVxdWVzdA==","4oCZYXBw","UkVGRVJFTkNF","LkV4cG9ydA==","IG1pbmltaXplZA==","aXBlbA==","aWRhdGE=","KWRlYWxsb2M=","ZXNjYWw=","X2Z3ZA==","bWVtY3B5","IExvcmk=","X1JlZg==","IGJhcmE=","IFNlbGxlcnM=","IGRldGVyaW9yYXRpb24=","ZnJhY3Rpb24=","KV07","L3BsYXk=","wqU=","LXRlc3Rz","T2Zmc2V0cw==","T2k=","IEtsYXVz","IHF1ZXJ5aW5n","d2lzaA==","YXBlbA==","X3dvcmtpbmc=","bXlNb2RhbExhYmVs","IHRvRGF0ZQ==","cGVybWFsaW5r","IGZyZWM=","b2xlY3VsZXM=","IEdvb3Nl","LXdpZGdldHM=","dHVydGxl","SW1wcm92ZWQ=","IHJvYWR3YXk=","a2Vocg==","IGFzdHJvbm9teQ==","Q29tYmluZQ==","IGNpZ2Fycw==","X0dBVEU=","L21hbmFnZQ==","IEdlcmFyZA==","IFByb3RlY3Rvcg==","U3Vic3lzdGVt","L2ZpbmQ=","L1lZWVk=","IHRvdGFsaW5n","0LzQvtGC","IE9tYW4=","IGluZmluaXQ=","LW9mZmljZQ==","IGluc3RhbnRpYXRpb24=","LsKn","Y2V1","KGF0b20=","IERyb3BvdXQ=","7YGs","IGNvbmRlbW5pbmc=","X2Jhc2VuYW1l","XX08Lw==","RGF0YUNvbnRleHQ=","IFdhc2hpbmc=","Lk9O","IG1vbW15","KCl9Owo=","IDspCgo=","L2V4dA==","Zm9yZWdyb3VuZENvbG9y","dW5zdXBwb3J0ZWQ=","IHNvbGxlbg==","IGNvbWXDpw==","RElTQUJMRQ==","IG9uUGF1c2U=","INGH0YLQvtCx0Ys=","IEFpbg==","R3M=","CVRhc2s=","aGF3aw==","Ik5vdA==","QUdS","LmdldFRhYmxl","IGRpdmVyZ2VuY2U=","IG5lZ29jaQ==","UmVwbGFjaW5n","XX0pCg==","aWxsdXNpb24=","IM6U","X0tFWUJPQVJE","S3I=","CW9y","56Gu6K6k","CXByaW50bG4=","IFNlYXJjaGVz","IEZyZXNubw==","IHZlcmRhZA==","XE1pZGRsZXdhcmU=","IOy1nA==","fSkoKTs=","dGV4dEFsaWdu","aW5rZWw=","LlR4dA==","IG9wdGltaXphdGlvbnM=","eW91bmc=","IGxlYXNlZA==","SlQ=","IElvbmljTW9kdWxl","ZXR0aW5ncw==","ZXNlaGVu","IGZhdm91cmFibGU=","YW5leQ==","IG90aGVyQnV0dG9uVGl0bGVz","IFRoYW1lcw==","CXVuaXQ=","Q09MVU1O","IGxvaQ==","LHByb3Rv","X1BSSQ==","IHdhbmRlcmVk","IHNhcGk=","YmFja3dhcmQ=","YXJhb2g=","IEZI","IEFsZw==","CWFj","YXJybw==","5Y6G","IFNPUw==","IERyZWFk","VmVjdG9yWGQ=","LnJtdHJlZQ==","X2V4ZWN1dG9y","IHByZWduYW5jaWVz","IHByYWN5","IFd3dw==","IEFyY2hiaXNob3A=","IG1laW5lbg==","RlU=","LkVudg==","IGVubGlnaHRlbmVk","IG9yaWdpbmF0ZQ==","5Y+K","IHpsaWI=","X1NB","IHdhc3Rlcw==","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg","cHJhcw==","IGhvcnJpZmllZA==","IENhbGR3ZWxs","dG95","X3Nob3Q=","IGxlc2Jp","IE1hZ25ldA==","b3hpYw==","U3VybmFtZQ==","IHNob3dUb2FzdA==","CURlc3Ryb3k=","LmdldEV4dGVybmFs","SUxJ","IE5ldmlsbGU=","dHNreQ==","IG1lbGFrdWthbg==","ICImIw==","IGZsb3dlcmluZw==","IHZldGVyaW5hcmlhbg==","IGhhcm1vbmlj","IENhc3NhbmRyYQ==","KENyZWF0ZQ==","cGVyc2U=","UGVybQ==","KU5TU3RyaW5n","IGlzSW4=","IEZsb2F0aW5nQWN0aW9uQnV0dG9u","L05ldw==","IPCd","Y2FwYWJpbGl0eQ==","IGN1Y2tvbGQ=","IEJhaW4=","KCl7DQoNCg==","UEVBUg==","IGphd3M=","IGdvZGU=","IGNhc3NldHRl","LmZyZXF1ZW5jeQ==","U0NPUkU=","LmludGVudA==","Olsi","IOWmguaenA==","77yf4oCd","L0ltYWdl","IHNpZW5kbw==","X2FsbG9jYXRpb24=","OkI=","L1JlZ2lzdGVy","X2thdGVnb3Jp","dW55YQ==","Lmluc3RhbmNlcw==","IFVOSVZFUlNJVFk=","IHBsZWFzYW50bHk=","IGdsYW5kcw==","IFlFTExPVw==","IFRoaWNr","QW10","IHByeQ==","IGx1aw==","KHByb2JsZW0=","IHByb2plY3Rpbmc=","W25vdw==","IGVzdG95","KCgpPT4=","IHdheXBvaW50cw==","IEJsaWNr","LlJlcXVpcmU=","TGFrZQ==","IElHTk9SRQ==","IFFIQm94TGF5b3V0","X3Jlc3BvbnNlcw==","Lndy","JmFjdGlvbg==","LmNoYXJhY3RlcnM=","SVc=","cGFnZU51bQ==","IGRpc3RyYWN0aW5n","XS0n","cGVlcw==","b3VuY3k=","IHNlZ3U=","LmdldFNlbGVjdGlvbk1vZGVs","SW5saW5pbmc=","J2FmZg==","IFByZXNlcnZl","IGFjcXVhaW50YW5jZQ==","IGFudXM=","aW5zdGl0dXRpb24=","IC8vKg==","IFNpY2s=","IEtvZGk=","IEFWUg==","IGJldHI=","IEJlcm5zdGVpbg==","LGN2","Y2Ni","Q0FG","CXNpZ25hbA==","6KiI","UmVzdWx0c0NvbnRyb2xsZXI=","IHNhbG9wZXM=","IHBoZW5vdHlwZQ==","dWJhaA==","X2RhdGFzZXRz","IGdyYWNpb3Vz","IENsaXBib2FyZA==","IGdlbmRlcnM=","ZG93bmxvYWRz","RXhwZXJpbWVudGFs","IGJla2FubnQ=","IG5pdmU=","LkVk","ZGlzbWlzcw==","XFR3aWc=","LkF2","L3Rhc2tz","LnBpY2tsZQ==","KkI=","Y2VzdG9y","Y2FwaXRhbGl6ZQ==","LkdldFNlcnZpY2U=","S2V5SWQ=","LnBpdGNo","IENvbnRyb2xsZWQ=","LnNhdmVk","IHphag==","IENhdGh5","KENhbmNlbGxhdGlvblRva2Vu","LWFuaW1hdGU=","XFxc","IEphc21pbmU=","LkxJTkU=","IGJvdGhlcnM=","IGJ1ZmZhbG8=","IEZPUkVJR04=","IHRhY2tsZWQ=","X0hFQVA=","IHNlcnZpYw==","Pj4s","IEFjdG9ycw==","LlR4","ZWJ4","X3Zpc2l0b3I=","X21hcnNoYWxlZA==","LG1hcA==","IGhlYXRlcnM=","IHVMb2NhbA==","IEthcG9vcg==","IG1pbnV0","LnJlYWRBcw==","IC4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4u","X1ZPTFQ=","LmJ6","IGNvcnJlY3Rpbmc=","U0VQ","YnJpbmc=","SHU=","IEd1cw==","QUFE","aWVyYW4=","ZnJhcmVk","X3JvbQ==","IHNjYXJjaXR5","IGFwb2xvZ2lzZQ==","IHNvbGlkcw==","IEZvcm1hdHRlcg==","ICclJA==","LXZpcw==","IiwiIiw=","VU5ERVI=","ISEhIQoK","IEVsZXZlbg==","KSld","IHNhdGlyZQ==","XHVC","IHNldmVudGVlbg==","TEFOR1VBR0U=","IGFkdmVyc2FyeQ==","IHN0cmZ0aW1l","IG5leHVz","dWJpdHM=","ICclIg==","IFNLSVA=","S0hS","LmJhdA==","IEplYW5z","Lj8=","IGltcG9zdA==","LnF0eQ==","Q29tcHJlc3Npb24=","IHByaW5jaXBhbGVz","b25pbw==","IGJhcmNlbG9uYQ==","IENoaWxp","X21vc3Q=","LnVm","IGNvbnRlbnRWYWx1ZXM=","IEZpc3Q=","dWdhZG9y","VGV4dFdyaXRlcg==","QkFDS0dST1VORA==","IGxpdnJv","IERlc2lyZQ==","bWVhc3VyZW1lbnQ=","UHJvYmU=","IHB1ZGRpbmc=","LnNob3dFcnJvcg==","IHVudGVyc3TDvHQ=","44CB44CB","IMSHZQ==","IHB1bml0aXZl","5q2i","TGlzdEdyb3Vw","LkFyZWE=","IPCfmIkKCg==","b29yZA==","IHNjcmFwaW5n","KHRpY2tldA==","IFdvY2hl","IGV4cGVjdGVkUmVzdWx0","IEtvc3Rlbmxvcw==","Y29uZmlndXJlZA==","X3N0cmVycm9y","LmFkZEhhbmRsZXI=","bW91c2VsZWF2ZQ==","IEZlbGlwZQ==","IENoaW0=","X0NTUg==","UENB","aWZpY2HDp8Ojbw==","KysKCg==","eWFz","IOaWueazlQ==","IElETQ==","IGFuaW1hdGVXaXRoRHVyYXRpb24=","IHNhbWVu","LnN1YnRpdGxl","X0tleURvd24=","IFRyZXk=","IHRlbXBvcmFkYQ==","IHNwZA==","IFJj","IE1hc3NpdmU=","IGJvd3M=","SG9zcGl0YWw=","IGdyb290","IHBhdmluZw==","IGNob3Jlcw==","IEFsbHk=","IGNlcnRpZmljYXRpb25z","IHhib3g=","c2VsZWN0QWxs","R2FtZU92ZXI=","IGNvcm5lcnN0b25l","UmVjb3ZlcmVk","IGRlZW0=","VWx0cmE=","IGdldExhc3Q=","IGFsbWE=","LnRleHRGaWVsZA==","IHdhaXZlZA==","Pih7Cg==","IEVzdHI=","aXNhYmxl","IHByb3Rvbg==","X2ZhY2Vib29r","X1RSQUlO","IGNvb3BlcmF0aW5n","dW5naQ==","QXJpem9uYQ==","I2VjaG8=","LWV4cHJlc3Npb24=","Lm1pbnV0ZXM=","IHByZWZpeGVk","IGZpc2hlcmllcw==","LmNvcnJlY3Q=","IG7Dpg==","KFNwcml0ZQ==","TW9kcw==","IFZpZGU=","IGdldEJ5SWQ=","IEtleW5lcw==","IEVneXB0aWFucw==","X0NPRA==","Qmllbg==","cmVvcGVu","aWdoZXQ=","UkVERU5USUFM","IHVud2luZA==","JA0K","IHJhY2tldA==","IGZsb2F0VmFsdWU=","IFNwZWNpYWx0eQ==","b2NhdGU=","bW91bnRlZA==","QXR0ZW1wdHM=","T2ZmaWNlcnM=","SGFzaFRhYmxl","IGTDqXZlbG9wcGVtZW50","IGRhcA==","IG10eA==","TmFycmF0ZWQ=","a0I=","X1NUQQ==","LUNsYXNz","IGR1bA==","IExlYWRz","IHRyw6pz","ZnJpZW5kbHk=","IEZpbHRlcmluZw==","LXByb3ZpZGVy","INGD0YHQvw==","IEtvbGthdGE=","bWFza2Vk","SURhdGE=","IFt8","wqQ=","IFJlZXNl","IEhvbm9sdWx1","VG9PYmplY3Q=","IHRocmlmdA==","YXNzaQ==","IGNvbmdyYXR1bGF0aW9ucw==","U0tJ","ZW50YXJpb3M=","IEZST05U","dWZpZw==","aG9u","CWdldGxpbmU=","IGhlYXJ0eQ==","Y2FsaW5n","IMOpY29ub20=","ICoqKi8K","X0hFUkU=","YCg=","TWljaGlnYW4=","QmVhbnM=","LXJvdXRl","IHByaW5j","IEd1aWRhbmNl","CWVtaXQ=","Lk9Q","dGhpYw==","ZWxvcGU=","IElSZXF1ZXN0","IGhhbmRsZUNsb3Nl","ZGF0YUFycmF5","LkV4ZWN1dGVTY2FsYXI=","RVBISVI=","IENvbnZlcnNlbHk=","KEZvbnQ=","IG1ldHJl","IFNwaWVsZXI=","RWxsaXBzZQ==","IFBWT0lE","IERhdGFDb250ZXh0","Y29uc3RydWN0ZWQ=","QU5ESU5H","LS0tLS0tLS0tLS0qLwo=","Qm9uam91cg==","X1BIUA==","cHJvZ3Jlc3NiYXI=","Tm90U3VwcG9ydGVkRXhjZXB0aW9u","IHZlcmRhZGU=","L2NoYW5nZQ==","b3Jzaw==","IGFyb21hdGlj","cmVzcG9ucw==","cmVhbGxvYw==","YXRpc2No","LGV2","IFNpb3V4","dGVh","IFBvZQ==","5LmI","X2Ntb3M=","IGFsYg==","KGxy","IEFwcGFyZWw=","IGRlbGxv","INGC0L7Rhw==","IHN0cmVhbWxpbmU=","d2NoYXI=","QWRvYmU=","LG1vZHVsZQ==","IHVuaW5zdXJlZA==","fSIpDQo=","KCIvLypbQA==","LXBoYXNl","IGZldQ==","X3RB","em9law==","IGZvbGxpYw==","IHR1Zw==","IGJlZmluZA==","IHRhbGxlc3Q=","KG10","aWVkeQ==","X0xlbmd0aA==","IHN0YXVuY2g=","IHJlbW92ZU9iamVjdA==","IGZsYWtlcw==","Z3Jlc3Fs","IGlua2w=","IFNDU0k=","IEtlZXBlcg==","O2w=","IEhpbmR1cw==","X1BFRA==","X0NPTkQ=","IExhdW5kcnk=","KytdPQ==","X0FVWA==","IGJ5xYI=","IGF1bWVudG8=","bWFyZ2luTGVmdA==","ZXF1YWxpdHk=","IEx1eg==","IEVjaw==","X21hcw==","X2xlbnM=","IHN0ZXJpbGU=","Y2xpZW50ZXM=","J30pCgo=","IGdvb2R3aWxs","IEVsbGlzb24=","U3BhY2VJdGVt","IHNob3dNZXNzYWdl","66Gc6re4","IGNvbnRyYXRv","UG9zdGluZw==","LmludGVycG9sYXRl","KGZpbGw=","IGJ1bGxwZW4=","LmdlbmVy","IGh1ZXM=","IG1lbW9yYW5kdW0=","dG9Qcm9taXNl","IEJ5eg==","KHB4","KFByb2dyYW0=","UkVTU0lPTg==","YmZk","IHBsYW50YQ==","Lm1vdXNlUG9zaXRpb24=","IFNwYW0=","6LSn","dGVsZWdyYW0=","YWd5","IGdlZnVuZGVu","LkRvbQ==","IGxpbmVtYW4=","LmJ0bkRlbGV0ZQ==","IHNlbGVjdGl2ZWx5","65Og","SUZT","IEdldEhhc2hDb2Rl","IHJldGly","IHJlcXVpc2l0ZQ==","QlRUYWc=","cGxpYg==","IGZpcmVmb3g=","LnRyYWRl","ICMk","LmNvbXByZXNz","IGxhZGVu","IERpcmVjdG9yeUluZm8=","IE1vZGVz","IGtvbmU=","IGRpdnVs","CWhz","Y3JvZnQ=","IFdIWQ==","eENF","L0dyaWQ=","X0FVRA==","IFNjcmU=","IGVycm9yVGhyb3du","U2FkbHk=","YXRpdGlz","IG5lZ2xpZ2libGU=","LlJlZ2lzdGVyVHlwZQ==","IE1vaXN0","5rWL6K+V","IEJNQw==","bGVhZmxldA==","eW5l","cm9rZW4=","IHZpbmM=","dHR5","IGJldXJldHRl","IEFscGluZQ==","IE1jTQ==","U3BvaWxlcg==","ZGlzdHJpYnV0aW9u","LXJheXM=","IOuwlA==","X3BhcmVudHM=","IGNyYXRlcw==","IGNvbW11dGVycw==","IEFyZ2VudGluZQ==","77u/LyoK","L2ZyYW1ld29yaw==","IGNoYW5uZWxJZA==","Z3JlZW5z","LnNldFN0eWxlU2hlZXQ=","IGluYWNjZXNzaWJsZQ==","aXRhdGVz","IHdhcm1lZA==","RmFicmlj","Z2V0YXR0cg==","ZGlzcGxheVRleHQ=","X01PTklUT1I=","IHNpZGV3YWxrcw==","SW50aWFsaXplZA==","IGtvbWVu","IGRpc2NyaW1pbmF0b3I=","IE5hdmlnYXRl","KERpcmVjdGlvbg==","IFNwaXQ=","X2FkZGl0aW9uYWw=","IGh0b24=","IGVzcGVyYQ==","IGRlbHZl","IGNvbXBhcnRpcg==","IHByZWVtcHQ=","cHJvY2Vzc29ycw==","LWdpdA==","YmVlbg==","LlNVQg==","IFJlZXZlcw==","L2dlbg==","O3RvcA==","CU1QSQ==","Wlc=","R0VTVA==","YWJpbGly","IHByb2dyZXNzaXZlcw==","aGFmdA==","QXVm","IEFjdGlvblR5cGU=","bGVv","IHV0YW4=","SW5pY2lhbA==","PlVzZXI=","IH0pOwoKCgo=","INio2Yc=","IENoYWlucw==","aXNzcGFjZQ==","L3JlbQ==","U1FMaXRl","IGNlYXNlZmlyZQ==","JGFy","VFJT","Oi8vew==","IFNwaXJpdHM=","2Lo=","KFNpemU=","IG51Zw==","IE9sc2Vu","IGNobG9yaWRl","IERpc3BsYXlOYW1l","IFBlcnQ=","IGdldE1heA==","IEVkaXRvcnM=","IFBhaXM=","YXNtdXM=","VmFj","IFRhYmxlTmFtZQ==","IG51YW5jZWQ=","Rm9yTWVtYmVy","IHNsZWVweQ==","YWR2aXNvcg==","IHN0YWxraW5n","Lm1lZGlhbg==","X0F0dA==","IGdldE5vZGU=","IEZhbmN5","5pWw6YeP","LkF0dHJpYnV0ZVNldA==","KGluc3RydWN0aW9u","eEJE","IGtvcA==","QWZmZWN0ZWQ=","L25hdmJhcg==","IGFpbG1lbnRz","IFJhbWFkYW4=","IEFjY2VudA==","IFBhcmFtb3VudA==","IEdBTQ==","5L2N572u","PSov","LklOUFVU","PFByb2plY3Q=","TGVhc3Q=","IEdlbm9tZQ==","QWNjZXNzb3JUeXBl","bGVmdHJpZ2h0YXJyb3c=","dmVudGluZw==","L3BheW1lbnQ=","X1B0cg==","IHRhbWU=","IE1FTUJFUg==","IEJpdGNvaW5z","LmVwYW0=","LlBsZWFzZQ==","IHNjaHdhcg==","Q3BwTWV0aG9kSW50aWFsaXplZA==","IHVuaWNvcm4=","IGJlZGV1dA==","X0hT","IGF1dG9nZW5lcmF0ZWQ=","IExpbGx5","IEFzc2Vzcw==","IEhlaWRp","LnNvdXJjZXM=","LnRlbGw=","YXJnaW5z","KCInIiw=","0LvQvtC2","IEVyb3RpYw==","IGp1c3Rv","IGVzYWM=","Y29tYQ==","IENvbG9ueQ==","IHBjdA==","CWVu","IGVtcGV6","IERlbGV0aW5n","TkVM","IGVuYW0=","UHJlc3NFdmVudA==","IFJlc29sdmVy","IFJURQ==","Rng=","IEluY29ycmVjdA==","IHlj","X3JlYWRpbmc=","O2Jhc2U=","IGhhc2h0YWdz","IE1hcmluZXJz","LlNldEZsb2F0","IHJlYXNzdXJpbmc=","aXJzY2g=","KHVzZXJpZA==","ID09PT0=","XSkpKTsK","a2Y=","IHRpbGVk","ZWd1YXJk","Q2xpZW50ZXM=","5pmC6ZaT","ZHNs","UmlnaHRz","IFBzYWxt","ZHVyaW5n","Q2xlYXJDb2xvcg==","dXN0YQ==","PENvbW1lbnQ=","IG5venpsZQ==","IFBMQUNF","L2hpc3Rvcnk=","aWh1","aVZhcg==","IGdlcm0=","IHRyaW1taW5n","IEh1bnRlcnM=","IFJTVlA=","SW50ZXJlc3RpbmdseQ==","amlhbg==","KSl7Cgo=","LkV4cGVjdA==","IFRvaWxldA==","IHdhbGxwYXBlcnM=","LldlYlNlcnZsZXQ=","YXJwYQ==","L21haW53aW5kb3c=","aHE=","IHV5","IGluZGlnbg==","Q2hlY2tlZENoYW5nZUxpc3RlbmVy","IGNhbGxlcnM=","IE1vdXNlRXZlbnRBcmdz","IEpTY3JvbGxQYW5l","IHfFgmE=","cmVwb3NpdG9yaWVz","IMWbdw==","IHJlZmVyZW5jaWE=","IGlvdGE=","IGNhcmdhcg==","X29ic2VydmVy","SENJ","c2lsdmVy","IGRldmFzdGF0aW9u","LXNlbWlib2xk","IEV4cGxhaW4=","IEJsb2NrbHk=","Llhy","ZXN0dXJlUmVjb2duaXplcg==","Q2FuY2VsQnV0dG9u","IExvY2tl","VHJpYWw=","X1BMQUNF","anVhbGFu","IFJ1Ymlu","U3RyaXBl","IG1ldGFEYXRh","Y29uZmlkZW5jZQ==","X2JhdHRlcnk=","IGlzbA==","IGJvYQ==","LnRhcmdldHM=","bGlqa2U=","IGFkb2xlc2NlbnRl","YmV3","LEZhbHNl","IHlPZmZzZXQ=","UHJldmlvdXNseQ==","PXBhdGg=","X0FB","iOadgw==","IGJha2VrYQ==","IGxlZQ==","IEJsb2NraW5n","L3RpdGxl","IOW8gA==","IFN0ZXZlbnNvbg==","KW9iamVjdA==","aXN0cm9z","LmdldFNlcnZlcg==","IHBsYW50YXRpb24=","X0JveA==","ICc7Jw==","dGljYQ==","KSldOwo=","IGRpc3Bhcml0aWVz","xrDhu5s=","aWNyb2JpYWw=","IHNwYXM=","L0RE","KHBvaW50ZXI=","IG1pZHBvaW50","LmdldENsYXNzTmFtZQ==","IFRvdGFsbHk=","IGNvbmdlbg==","IHTDqnRl","LnhsaW0=","Q09NUExFVEU=","KGZp","b3dhcmQ=","0LzRjw==","LmFzYw==","IHBhZ2luYXRl","IGx1cmtpbmc=","LnNpZ251cA==","U1RZTEU=","IHdvcnNo","aHY=","IGRlZmVuc2l2ZWx5","IEx1dGhlcmFu","LmZ1bg==","INC40L3RhNC+0YDQvA==","cHNj","IGFkbW9u","IEVzdGltYXRlZA==","IE15U3FsQ29ubmVjdGlvbg==","LnN0YXR1c1N0cmlw","IGFudGlnZW4=","IGhlcnJhbWllbnQ=","IENvbnN1bWVycw==","IFlU","Lm1hc2tzVG9Cb3VuZHM=","Lnh0aWNrcw==","OnJlcXVlc3Q=","IE1vbw==","LWF1","IHRvUmV0dXJu","IFNhcHBoaXJl","Y294","ZXhhbXBsZUlucHV0RW1haWw=","IGNvcmF6","KHBpZWNl","IHJlY29uc3RydWN0ZWQ=","X3NpZ251cA==","J10pPw==","QmlsbGluZw==","IENyb3dsZXk=","c3Rvcm1z","Zm9yY2Vy","IHN1cHJlbWFjaXN0","X3doZWVs","CXBj","LmdldERvY3VtZW50","LnVuc3F1ZWV6ZQ==","LmdyYWRl","ZWxsdW5n","LnNob3BwaW5n","Y3VzdG9tZXJJZA==","IG1lZGlkYXM=","IE1vbWVudHM=","ZW51b3Vz","SUZJQ0FURQ==","IyMjIyMjIwo=","5paH56ug","4buNYw==","b3Jtc2c=","YWxvbQ==","LXRyYWRl","CWJ0","L3N0dWRlbnQ=","YnJpZw==","YW5uZXNz","KHJh","IHJpY2VyY2E=","U3BlYWtlcg==","csOz","Z3Rlc3Q=","R2x5cGg=","w7xnZW4=","QEpzb24=","KHN1bW1hcnk=","S29t","YmV0aA==","L2VuZ2luZQ==","Q2xpbWF0ZQ==","c3VibWl0QnV0dG9u","ZXZl","ID09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09Cg==","cGVkaWE=","IHVzZXJuYW1lcw==","IEpN","IG1zZQ==","aW5zcGVjdA==","IFNuYXBkcmFnb24=","IGRlZmVuc2VtYW4=","IFVJVGFibGVWaWV3RGVsZWdhdGU=","aW5kaG92ZW4=","IEJveWxl","IEFsdGE=","YXJkdQ==","IHdyZXN0bGVy","IFN0cmFpdA==","IGVncmVn","X2Jhc2VsaW5l","RW52aXJvbm1lbnRhbA==","IGludml0","IEJUUw==","IElTSUw=","IGNvb3A=","aG9yZXM=","I0A=","IGNvbXBlbA==","KHNraXA=","6Ziz","X0RFUFJFQ0FURUQ=","aXBoZXJz","ZG91YmxlVmFsdWU=","IEFSUg==","LlNjb3Jl","IGNocm9tb3NvbWVz","Y2xhdXNl","IEx1aWdp","IHN1bnNjcmVlbg==","IGN5dG9r","LnRvSlNPTlN0cmluZw==","IHByb3ByZQ==","cG9vbnM=","bWl0dGVycw==","IGtpdHRlbnM=","IGNhdGhvbGlj","Lmx0","wqw=","X3F1aWNr","IHZyYWk=","IElSZWFkT25seQ==","IEhpZ2dpbnM=","IHNob3ZlZA==","IGxpYWlzb24=","X293bg==","IG1vc3F1aXRvZXM=","X25n","LlNldEtleU5hbWU=","X1JlbmRlcmVy","X09zYw==","LnVucmVnaXN0ZXI=","TWVzc2FnZVR5cGU=","LWZvdW5kZWQ=","IHNvdXRoZWFzdGVybg==","IGhhc2h0YWJsZQ==","LmluZGVudA==","IGpveWZ1bA==","X3NleA==","c2Fk","LmRlYmlhbg==","X2dhcw==","IHBlcmlzaA==","IGhldGU=","X3NpbmdsZXRvbg==","KGdyYWQ=","IGt0w7NyYQ==","IGR3aW5k","aXR0YWw=","U2VlaW5n","IFJvb2tpZQ==","CUxhYmVs","c2hhbg==","PDw8PDw8PDw=","IHLDqA==","aWVzZWw=","YXJyZXJh","Y2hyaXN0","IGN1cnZhdHVyZQ==","IGVwaGVt","Rm9ybWF0dGluZw==","LmRpY3Rpb25hcnk=","LlNldHRlcg==","IEhpc3RvZ3JhbQ==","IFN0dXR0Z2FydA==","IHBhY2luZw==","dXRhdGlvbnM=","IE5TSw==","IFBhbWVsYQ==","IEJhaWw=","IHBvbGFyaXphdGlvbg==","IEfDtg==","IEVsYWluZQ==","IGtpY2tvZmY=","IGNoYXBlbA==","PXBvc3Q=","IG1pZHdheQ==","ZXdpcw==","X01S","aWVlZQ==","LXRlc3Rpbmc=","bWV6","Pi0t","IGRvY3RyaW5lcw==","IG1pbGlldQ==","IFJBRElP","dGFrZW4=","UmVzcG9ucw==","IGhhbmRzZXQ=","IGNvbnRybw==","IEFwcGxpZXM=","6Zif","LkJpbmRpbmdTb3VyY2U=","INis","IGh1bWlsaQ==","IE1lbGFuaWE=","T3ZlcmxhcA==","KFBhcmNlbA==","IHdhcmVob3VzZXM=","LkdldEJ5SWQ=","IGZyYW5rZnVydA==","IFdpdHQ=","LnByb2o=","IFNhc2hh","IFJldmVy","IGFydGljdWxhdGVk","YW5jaGVz","IFNlbWluYXI=","IERhZ2dlcg==","IEFnaWxl","T1dM","IEJz","b2tseW4=","RXRh","IGFnb3N0bw==","7ZWY7Jes","IG9wdGFyZw==","CW9uQ2hhbmdl","IFJPQUQ=","R0JL","IGVudGZlcg==","LkF1dG9Db21wbGV0ZQ==","IGhlbGZlbg==","Q2hlYXA=","IGFwcHJlbnRpY2U=","aW90aWNz","5oqA","T2ZZZWFy","aW5kZXJlZA==","Lk1TRw==","IE1hcsOtYQ==","KGlucGxhY2U=","IGZpbmRl","KERF","LlNlcmlhbGl6ZXI=","JHRpbWU=","dW5uYWJsZQ==","TWFpblRocmVhZA==","ZGVwbG95bWVudA==","IG1wZnI=","cmljaFRleHRQYW5lbA==","KTsKCgoKCg==","IGRhbnljaA==","X0JFRk9SRQ==","X2FyeQ==","IEJhdW0=","IHR1cmJ1bGVudA==","IE11bHRpbWVkaWE=","IHBoeXNpY2lzdA==","5Zy6","QW5pbWF0ZQ==","PUY=","UGFnbw==","L3R3aXR0ZXI=","b3R0aWU=","dWN1cnNhbA==","X3BhZ2luYXRpb24=","LmFyY2hpdmU=","LWRvY3VtZW50","aW5pbmU=","U2VsbGVy","YWRyZXNz","6ZO+5o6l","0LDRgtC10LPQvtGA","X2ZybQ==","bm9EQg==","aWdhdGVk","IE9zYW1h","cGV0dG8=","Pnk=","LVVu","IGNvcHBpYQ==","QWxtb3N0RXF1YWw=","LmxleA==","IGxldmVsZWQ=","IFNDSVA=","X0hPT0s=","SUxvZ2dlcg==","bmVhdQ==","77ye","24zZhg==","aWtoYWls","IHVwbG9hZGVy","IENhcm9seW4=","LmFkZFZhbHVl","dGhpbmtpbmc=","cHJpbnRTdGF0cw==","IGNhbWJpb3M=","cG9p","IEJFRA==","IHhibWM=","Lu+/vQ==","IHNhcmNhc3Q=","IE5FQw==","JGJvZHk=","QWxsV2luZG93cw==","IHlvdW5nc3Rlcg==","IHVuZWFzeQ==","KEFU","IG5vc3RhbGdpYw==","UFJJQ0U=","IFNlaXRlbg==","IG1ha2E=","IGxpbXA=","IGNvbnRyYXN0cw==","Q29mZmVl","CWdlbg==","IHBlcm1z","IE5lZWRsZXNz","b3V2ZQ==","YXJjaGluZw==","X3BlbmFsdHk=","cm93YWQ=","b25nYW4=","X2R1cg==","IGlmbmRlZg==","aWF1eA==","IGNhcGFjaWRhZA==","IE5vcnRl","IC0qLQ0K","aWZlcw==","IE1hbnNpb24=","I1JlZ2lvbg==","Q2FuY2VsbGF0aW9u","IG5lYXJpbmc=","IGxhbmd1","ZXJlcXVpc2l0ZXM=","X2V4cGVyaW1lbnQ=","b25kaGVpbQ==","XSwm","IENvb2xpbmc=","IHNhZmFyaQ==","IHBpb25lZXJz","IGZhcm1ob3VzZQ==","IGRpc3RhbmNpYQ==","IGRlc2VydGVk","IE5hcnJvdw==","LnNn","IGVudHJhcg==","LnJh","IHJlZnVyYmlzaGVk","IGludGVyY29ubmVjdGVk","IHN1cnZpdmVz","IHF1YWxpZmllcnM=","X0NIQVJT","LWFqYXg=","IFJvcnk=","IGtvbGVq","L0dM","X2xlZ2Fs","IFRZUEVT","IFZvaWNlcw==","IEZlcmQ=","dWplbXk=","IHNjb3JlYm9hcmQ=","IEJPVA==","eERE","IEl2YW5rYQ==","IGhzdg==","bm9kaXNjYXJk","IFRIRVNF","bW9qb20=","IHRpY2tpbmc=","cGVx","IOa3u+WKoA==","IE5pY29s","CWFuZ2xl","X2FsbG9jYXRlZA==","IHN0cnV0","eERC","RXZhbHVhdGU=","IFZBUklBTlQ=","IHJlZmVyZW5jZWRDb2x1bW5OYW1l","bG9o","IFJlcXVlc3RPcHRpb25z","IGNvY28=","IGJsZWFjaA==","X29yZ2FuaXphdGlvbg==","IENITw==","SFRUUFM=","X2JhcnJpZXI=","LnZpc2l0TWV0aG9kSW5zbg==","IHZpdGU=","IC0k","W2NlbGw=","IGNlc3NhdGlvbg==","CgoKCgoKCgoKCgo=","INGB0LDQuQ==","RXZhbHVhdGlvbg==","IENJTQ==","cXVhbGl0aWVz","WG1sQXR0cmlidXRl","IEVtb2pp","ICIoJw==","IFRVUk4=","eHNk","IEdJUw==","IGNyZWF0ZVNlbGVjdG9y","cmlwcGxl","IHVubmVjZXNzYXJpbHk=","IG5ld1Bvcw==","IHN5bWJvbGlzbQ==","b2J1dHRvbg==","IHNhbW8=","ICgqKCg=","LnJld2FyZA==","S0VSTkVM","KGpTY3JvbGxQYW5l","IGJ5c3RhbmQ=","X2ljYWxs","IGR1bmdlb25z","IGNvbnN0ZWxsYXRpb24=","IGVtYnJhY2Vz","IEluZmFudA==","QXVzdGlu","LmFic3RyYWN0","IGNvbXBhZ24=","IENvbmRpdGlvbmluZw==","TWFpcw==","VmVyaWZpZXI=","IFB5cmFtaWQ=","IG1MaXN0ZW5lcg==","X2J1aWxkaW5n","LlJlZGlz","IFRvb3Ro","TE9HR0VS","LkFzeW5jVGFzaw==","X3ByaW5jaXBhbA==","ZXhhbXBsZU1vZGFsTGFiZWw=","CUxvY2Fs","TWFya2Vycw==","IGRvbHBoaW5z","LlRleHRFZGl0","J2Fs","IG92ZXJzdA==","LWRyaXZl","IGluc29tbmlh","IGFkYg==","X3F1ZXVlcw==","RWI=","IERhbW4=","aXN0cmluZ3N0cmVhbQ==","CUR1ZWw=","aWJibGU=","IGltcmVhZA==","LmZpbmlzaGVk","IG1pc3JlcHJlc2VudGVk","xYRzdA==","aW9uYWxlcw==","Ik5vdw==","LlNlbGVjdFNpbmdsZU5vZGU=","IHdlYWtlbmluZw==","X2luc3RydWN0aW9ucw==","LW9z","IHN0YXJ0UG9pbnQ=","IE1pbWU=","IEhlbGQ=","fHwo","dW1taW5ncw==","b2tpbm8=","IHJlZmw=","cmlkb3I=","SW50ZWdyYXRlZA==","RU9iamVjdA==","cGVhdHM=","Q2lyY3VsYXI=","IFNvZGl1bQ==","IHBvZHLDrWE=","bWVkaWNpbmU=","IHBhcmFub2lh","L2JhY2tncm91bmQ=","KGJvcmRlcg==","X3Nsb3c=","IHByZXNlbnRWaWV3Q29udHJvbGxlcg==","IGNvbnRpbmdlbmN5","IFBhc2FkZW5h","bG9vcHM=","IE9j","YXBwbGljYXRpb25z","IG1wZw==","IEFR","LldpbkNvbnRyb2xz","bGVkb24=","IFJlcQ==","IEFjcmVz","aWJpcg==","IGdldFdpbmRvdw==","IFlhaA==","IG5lZWR5","4pa6","IFRPTQ==","KFsuLi4=","IGZx","IENhbWRlbg==","b3JkaW5hdGVk","CWNoaWxkcmVu","dmVnZXQ=","CWRpcmVjdGlvbg==","PEZpZWxk","X2NvcnJlY3Rpb24=","KEVORA==","SEVFVA==","RmFsc3k=","LmR5bGli","X1JFUE8=","IGJyaWxsaWFuY2U=","b2dyw6Fm","bG9k","IHBvd2RlcmVk","KEFydA==","IE1JTEw=","0LXQtNCw0Lo=","X3NpbXVsYXRpb24=","IHNtYXNoaW5n","IHVybFN0cmluZw==","IGRyZWFkZWQ=","cmllZw==","L25z","IEludGVycHJldGVy","Om1heA==","ZGVyaXY=","IFBldHQ=","IG1vZMOobGU=","IGFtcGxpZmllZA==","IFNpZ25hbHM=","Lm5hdkN0cmw=","5ZY=","IHNlcGFyYXRvcnM=","IFNISUZU","IGZpZGVsaXR5","LnNvbg==","KGNh","IFBMVUdJTg==","IGxpZ2h0ZW4=","UEJT","ZmxvYXRpbmc=","KGxvYWRlcg==","IHBlZWxlZA==","aGlj","IHRhcGVk","IG5vdmVtYnJl","IHN0dWZmaW5n","IEZpcmVhcm1z","LkRyYXdhYmxl","IGNvcnRpY2Fs","IEdVSUNvbnRlbnQ=","IFZlcm9uaWNh","X3JzYQ==","IGNvbW1lbW9yYXRl","LlNZU1RFTQ==","IGRhbXM=","LmlzVHJ1ZQ==","IFByZWduYW5jeQ==","7Iug","IGF1ZGl0b3J5","KENlbGw=","IGludmFkaW5n","IGZvckVhY2g=","CURyYXc=","TWFyY3Vz","UHJvY2Vzc2Vk","IHNwcmF5aW5n","IE91dGxpbmVJbnB1dEJvcmRlcg==","ZXNzZXJhY3Q=","IOacgA==","UGc=","LXF1YXJ0ZXJz","IHNrbA==","L3Byb3ZpZGVycw==","dG9IYXZlQmVlbkNhbGxlZFRpbWVz","IGNvc21vcw==","IGZpbmFsaXN0cw==","IHNsZWVwZXI=","IE1hdGVyaWFsQXBw","ZGFj","IGJ1c2luZXNzbWVu","xJ9lcg==","Qmlhcw==","ZGF0YWw=","VXBFZGl0","IFRpcg==","SVNUSUM=","IEhlcmE=","X2ludGVyc2VjdGlvbg==","IExhbWE=","CWFwcGVuZA==","IHBvbGx1dGFudHM=","IFNpa2g=","IGNvbGxhYm9yYXRpb25z","bnV0cml0aW9u","IGhhbW0=","IERpbGxvbg==","X0RPVA==","IGZpcnN0aGFuZA==","U09BUA==","PXo=","LnByaXY=","TWlzbWF0Y2g=","LnNlbmRSZWRpcmVjdA==","LmxpbmtMYWJlbA==","IHdyZWFr","TWFydmVs","L3Ns","IyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIw==","IG1vdmFibGU=","0YPQuQ==","IERyaW5raW5n","YWNlYQ==","IHRyb3ZhcmU=","LkNTUw==","IGtlcm4=","dmZz","5pWw5a2X","IHN0ZXNzbw==","IEZPUkNF","IGxpZWY=","IGFjaGlldmVz","IEVsaWphaA==","R2V0UHJvcGVydHk=","LypA","IEh1bWFuaXR5","KFRoZQ==","d2FybQ==","PiIp","IGNvbXB1dGF0aW9ucw==","LnRpbnRDb2xvcg==","IHVzbGVlcA==","IEdQTHY=","bmRhdGE=","L2NsaQ==","TW9o","PiINCg==","LmJyaWRnZQ==","IGVuY3ljbG9wZWRpYQ==","IEJJTg==","IFN1cHBvc2U=","INio2Kc=","cmlldmVk","cGFnZW4=","aXJzZQ==","UGFjaWZpYw==","LmZ1bGxOYW1l","IGFsbGVnZQ==","aWxsdXN0cg==","IOqysA==","IGRldGVycmVudA==","IE5hcGxlcw==","aW5jbHVkZWQ=","UmF0ZXM=","IGhhc05leHQ=","IEplcmVtaWFo","IEZlcm5hbmRleg==","IGdldE9yZGVy","LlN1YnNjcmliZQ==","UG9zcw==","OikK","IFdvcmtzaGVldA==","YmxlbmQ=","IHdpdHR5","IGNvdW50ZXJmZWl0","X2R5","L1J1bnRpbWU=","IHNvZG9t","L2Rv","IDx8","IFJlY3J1","5aOw5piO","IG1vZGVsb3M=","IGJpdHJhdGU=","LmNybQ==","bHVz","IGZpbGVUeXBl","5bCR","IG1hcnJvdw==","IFZlbmV6dWVsYW4=","IHNjYXY=","IFNUT0NL","IEltcG9zc2libGU=","bmF2aWdhdGlvbkJhcg==","IHNpZ2h0aW5ncw==","IGNlbGxGb3JSb3dBdA==","IHJlY3Rz","IGFpcmw=","IExlc3Rlcg==","IG5vZHM=","QHJlZ2lzdGVy","eENE","cG5hbWU=","IHBvdHRlcnk=","IHp3YXI=","IFN1bmRlcmxhbmQ=","4oCmYnV0","L2NvbnRyb2w=","IGNhbGN1bHVz","KGlzb2xhdGU=","cGxhY2Vob2xkZXJz","Kilf","IH19DQo=","IEtvaGFuYQ==","Y29kaWxl","b3Rlcmlj","IHByZXBhaWQ=","IGdyYW5kbWE=","IHN1bHBo","IEdhaW5lcw==","XE1vZHVsZQ==","IGNvdW5zZWxsaW5n","LWdlbmVyaWM=","IFR1ZXM=","LkdyYWRpZW50","IFRodXJz","IGVudHJh","IGFkdmFuY2VtZW50cw==","U1dFUA==","X01BUktFUg==","IGtsdWI=","IG3DqWc=","ZmZmZmZmZg==","Il0pewo=","L2NvbXBpbGVy","YWRpZW5z","U3RyaW5nVmFsdWU=","IFNjdWxwdA==","cGFuZWxz","5b2i","5Lqn5ZOB","YXLDrWE=","IGRlcmFpbA==","IExvY2g=","IHBlcHA=","bXB6","IOKe","S1Y=","IERpZXRhcnk=","QVJSSUVS","IHBvbw==","IFJBTkRPTQ==","6LM=","IEhvbWV3b3Jr","LlZhbGlkYXRpb25FcnJvcg==","IE1hcnhpc20=","0YPRgtGM","IGNvbWVudGFyaW8=","X0JPVEg=","IHBybQ==","Y2FzdEhpdA==","aXBsaW5h","IFZvdGVycw==","LmFzc2lnbm1lbnQ=","bmV0dA==","U0FNUExF","amlz","InRpdGxl","LnZhbGlkYXRvcnM=","ICI/Ig==","dW5pZGFk","X2ZpZ3VyZQ==","IGFjY3J1","IFJlbWFyaw==","Rm91bmRlcg==","LmluaXRpYWxpemVBcHA=","IFByZXNlbnRz","IE1VTFRJ","dmVzdGVy","LnZpc2l0SW5zbg==","IGdldFBhdGg=","X2RpZmZlcmVudA==","IGxvb3Nlbg==","IGFycm9nYW5jZQ==","IGp1bmk=","IFphaGw=","IEdDQk8=","IG1vZGVyYXRvcnM=","TGluZUNvbG9y","IE5vZGVUeXBl","X2JlbG93","b3JndA==","IEhhcmxlbQ==","IE9yd2VsbA==","X1VOSVg=","LnJlc3RhcnQ=","aXRoZQ==","IGdlbmll","IGNsYWQ=","Jzp7Jw==","IHNob3djYXNlZA==","IGxhcnZhZQ==","TWljaGVsbGU=","IExI","LmdldExvZw==","Q29uc3RydWN0ZWQ=","IGh2YQ==","X3N1YnM=","IGRhYg==","LmRvY3VtZW50YXRpb24=","IG5pZw==","IE1hbmRhcmlu","4oCUYXJl","LXBpYw==","X2Nvcm5lcnM=","LkJvdA==","XVso","X18nOg0K","LkVkaXRvckJ1dHRvbg==","LXN5bnRheA==","U2FuZGVycw==","IFRhbmtz","ZGVzaXJlZA==","c3RhbnRpYXRlVmlld0NvbnRyb2xsZXI=","R2Vhcg==","IHVzZXJNb2RlbA==","CWNvbnRyb2w=","RGF0YUJhc2U=","IERlYmF0ZQ==","aW5lc2lz","IHhl","Lm1hZ25pdHVkZQ==","IHlhbg==","IEFwaUV4Y2VwdGlvbg==","KHdoaWNo","YXRoZXJpbmc=","Q29uc2lkZXJpbmc=","IEFMUEhB","568=","IFJhbmtpbmdz","LmxpZmU=","6rCS","T0ZGU0VU","LnRlbGVncmFt","IGZhdmljb24=","X3NzaA==","IEVER0U=","UmVmcw==","YW5kYW4=","IGFkb2xlc2NlbmNl","IFNoYW5r","IFN3YW1w","X3BlcmM=","IGNvbnRyYXJpbw==","Lm55","LiIpLA==","IHVudGVu","X0VOU1VSRQ==","L29yZGVycw==","KGNm","IHVudHJlYXRlZA==","YXplbg==","KElucHV0U3RyZWFt","IGFwcHJvdmFscw==","IGdlcm1hbnk=","IGF2ZXJl","VHJpcGxl","LWJhcnM=","IHNldFBhZ2U=","SmFj","IEZpcmVz","IERBWVM=","56i/","IHNjcmF0Y2hlZA==","IEJFTg==","LXdpZmU=","IGludGVsbGVjdHVhbHM=","IHBvdWNv","IHN0YWJpbGl6YXRpb24=","IHBlbG9z","IFNUT1JZ","PGZpZWxkc2V0","IE1haWRlbg==","LkNpcmNsZQ==","IHNtw6U=","Ly8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLw==","L2VuZA==","6Iux","KG51bXB5","LnBhbmVsQ29udHJvbA==","Y2hyaWZ0","Y29udGluZW50YWw=","X3BlbA==","RFNM","PFwv","IE9QUw==","IE5vb24=","IHVuZGlzY2xvc2Vk","IFlpbg==","c3Bv","CWRlc2NyaWJl","dG9ncm91cA==","IGRpYXBlcnM=","IG1IYW5kbGVy","CUNsb3Nl","IHJlbmRpdGlvbg==","PXsoew==","RW50ZXJpbmc=","KERJUg==","X09MRA==","IFN0aW5n","IFBhd24=","dXNzZXM=","IGdldENvZGU=","SXRlbUxpc3Q=","IGluZGlz","ID4iLA==","IGNvbmZs","IGRvbWluYXRlcw==","dGhlc2l6ZWQ=","c3RlcmVk","IGNhYw==","IEdlbnVpbmU=","PFBhdGg=","IEhvZGc=","LWZseQ==","LmNpZA==","IG9iamVjdElk","KCMp","Lm1vdmVUb05leHQ=","RGlhbG9ndWU=","PHBjbA==","dGVhckRvd24=","Jyl9fQo=","5ri4","TGl2ZXI=","TWF0cml4WGQ=","IGNyYXBweQ==","X0RFQUQ=","LnBhcnRpYWw=","LkRyb3BEb3duU3R5bGU=","ZnVy","LkNvbGxhcHNlZA==","LXRvd24=","SUNJQUw=","RGlyZWNjaW9u","IHNldFJlc3VsdA==","L3Jlc3VsdA==","IFNoZWVw","eXNjYWxl","Y29udGk=","IHJlY29ub2M=","6b4=","W2Jsb2Nr","Y2xheno=","IGJlbmVmaXRpbmc=","QUFQ","LnJlcXVpcmVz","LkNvb2tpZQ==","IGNhcHRpdml0eQ==","LlNlY3Rpb24=","XSkpOw==","LWNhcmV0","KHZh","IHbDpGw=","IEhpZ2hsYW5kcw==","Tm90YQ==","IEZNTA==","d2ludGVy","IGFnZW5kYXM=","X18sX18=","ZGVtYW5k","IHR1dG9ycw==","X1NZTQ==","KENI","IHVuZXF1aXY=","LnRyYW5zaXRpb25z","IENhbG9yaWVz","IEVjb25vbWlzdA==","LlBpbg==","IGRlZmxlY3Q=","RXhwb3NlZA==","IGdlcA==","LkxheW91dENvbnRyb2xJdGVt","IHJhaw==","ZmliZXI=","IGFwb3B0","IEVudW1z","aXRldXI=","IG1vZGlmaWVz","IHJlbHVjdGFuY2U=","IHNwaWxscw==","QXNjZW5kaW5n","IHRlbXBlcmF0dXJh","LWludGVyZmFjZQ==","IGNvd29ya2Vycw==","IDpc","IFJvdW5kZWRSZWN0YW5nbGVCb3JkZXI=","PEtleVZhbHVlUGFpcg==","UGFyc2Vk","IHdpdGhkcmF3aW5n","KGhpc3Q=","IHRoZW9yaXN0cw==","LW5n","IGNoaWZm","66W4","UEFJUg==","IEJyZXdlcg==","S2E=","IEJvd2xpbmc=","X3Rs","J30pLg==","IHByb2Jpbmc=","QXJz","LnJlYWxt","IGVzdGF0ZXM=","dmFyeQ==","IEtlcw==","ICIsIiw=","fSwNCg0K","UGxhbm5pbmc=","IFJlY29u","IGNvbmNsdXM=","dmF1bHQ=","IGluY2VudGl2","IGJpbm5lbg==","IFBoaWxsaWVz","LkxvYWRlcg==","IEZhbGxlbg==","X1R3bw==","IEJpYXM=","Um9sZUlk","IFBhcmNlbGFibGU=","IERvZGQ=","ICQoIiMi","5Lq/5YWD","LW1lYW4=","KE91dHB1dA==","QVRUUklCVVRF","IHNlY3JldGl2ZQ==","IFBlcmlwaGVyYWw=","IEZpbGVk","IOW3","X21lZGlhbg==","LklD","IEFycmF5QnVmZmVy","KFRBQkxF","IF0KCgo=","IGFudGhvbG9neQ==","IG9ic2NlbmU=","b3BhdXNl","IEVTVg==","w6F2ZWlz","b3NlbWl0ZQ==","R3J1cG8=","IE1PQ0s=","IHVuYXZvaWRhYmxl","IGNvdmlk","aG93ZXI=","Lk5ldmVy","U2V0QWN0aXZl","e3RleHQ=","X3Byb2Jh","XENvbmZpZ3VyYXRpb24=","IEJyeWNl","IGNvZXJjZQ==","IFZhbmRlcmJpbHQ=","Z2VtZW50cw==","bGVnZw==","IHJlYnV0","IFZJTg==","5YiG6ZKf","IG9ic2Vzc2l2ZQ==","L2NtZA==","IGtvbW1lbnQ=","IExhdWdo","64uI","IHNlbHZlcw==","b3JyYQ==","LnJvb21z","IGNvbXBsZXhpdGllcw==","CW9wZXJhdG9y","QWx0ZXJuYXRl","IHNvcnRpZQ==","Z2V0TnVt","IHJlYWxpemFkbw==","RG9pbmc=","X0dyaWQ=","IHNldFN1cHBvcnRBY3Rpb25CYXI=","w6RobHQ=","5ZQ=","OnsNCg==","SW50ZXJlc3RlZA==","IGRpbWluaXNoaW5n","IExvb3Q=","QWRhcHRlckZhY3Rvcnk=","LXJ1bm5lcg==","c2F2aW5n","KHNlbQ==","ZmFk","RURVUkU=","X2RvY3VtZW50bw==","IENhbGVi","IGd1aXNl","IE1jR3U=","KHVuaXRz","IGJlemllcg==","IHBhdHQ=","IHBlbHZpYw==","IGNvbm9zYw==","YWN0aXZv","IE1hbG9uZQ==","LlRha2U=","KHNxcnQ=","c3Rhc2hvcA==","LWVuZGVk","IE1pZGk=","IEJhbmM=","IFBlcHNp","X01BWQ==","IHBsbA==","L2luZXQ=","LWVuaA==","IEl0YWw=","bW91cg==","IHJlbHVjdGFudGx5","LnJjUGFyYW1z","IHBhbHM=","LnBrZw==","IGZvcm1hcw==","bGllw59saWNo","LWJvb2tz","b21hbHk=","IHJlY29tbWFuZA==","UExJQ0lU","acSN","LmNnQ29sb3I=","KEJvYXJk","0LXQvdC40Lg=","IExFTg==","Xy1f","IFVubw==","IE5PVElGWQ==","aGFuYQ==","W3Nsb3Q=","XGFkbWlu","SW5JbnNwZWN0b3I=","KWNvbnN0","IGZsYXR0ZXJpbmc=","aWdyYW1z","Y2Fj","IGhlYXJ0ZmVsdA==","SW5kdXN0cmlhbA==","QWlycG9ydA==","WEk=","IHZhbGlkYXI=","cmVwcmVzZW50YXRpb24=","IFJlbnRhbHM=","IG9taXNzaW9u","IG15dGhpY2Fs","IEVudHJhbmNl","IHNlcmdlYW50","IHdyaXRlVG8=","IE5vcndpY2g=","IExpb25lbA==","LWJhbA==","IFp3ZQ==","X3JlbnQ=","IHJlbWFy","IEJhaGFtYXM=","IEJhbGU=","OiIiLA==","U3RhdGVNYW5hZ2Vy","IGLDqW7DqQ==","ICEqKio=","IGJsb2NrZXJz","LnNlbA==","KExFRA==","IGZzbQ==","IHdpcGluZw==","IHphbWFu","IFJlaQ==","YWd1YXk=","Li4n","IGxvdW5n","ZXRjb2Rl","IGxhbno=","Y2l0YXRpb24=","W2A=","LWVs","YXNib3VyZw==","IFNPTEQ=","IE9yY2hhcmQ=","Q0hhbmRsZQ==","IExvZnQ=","LmRpdmlkZQ==","LVdpdGg=","L2Rlc2lnbg==","LlNlcnZpY2VNb2RlbA==","TWlz","IHJhd0RhdGE=","IGludGVyYWN0cw==","IEVyb3Rpaw==","IG9uUG9zdEV4ZWN1dGU=","6Jk=","IHZleA==","IHN0cmluZ2lmeQ==","eW5lcw==","X0VtYWls","X09N","cXVpdGU=","X2VmZmVjdHM=","QURY","IGFkb3JuZWQ=","c3Nm","ZWRpdGFy","IE1hZGFtZQ==","IHJlZnV0ZQ==","IEx1Y2E=","IFdvbHZlcmluZQ==","c2V4bw==","QW5kcmU=","PFJvdXRl","IFNjZW5lcw==","IHJlb3JkZXI=","X214","Y3JlYXRlVGltZQ==","IHN5bnQ=","LG1vZGVs","aWNyb3Vz","IE1PVVNF","6rk=","Y29tcHJlc3Npb24=","IHByaW5jZXM=","IHNoYW1lZnVs","IHBhdQ==","IFRFRA==","KGNvZWZmcw==","4K+B","L3VtZA==","IGNhbnlvbg==","L3JlbmRlcg==","LnVzZWQ=","IEFncmVl","IEpld2Vs","L2NvbW1hbmQ=","QmFyY29kZQ==","KGRlYWQ=","d2Vic29ja2V0","dW11","R0xPU1M=","IGZvcnRu","IGJvYXN0ZWQ=","ICJcIj4=","aXN0dW5n","LW1hY2hpbmU=","IGluY2lkZW50YWw=","IG1N","LXJlYWRhYmxl","LmZ4","IFBPTElU","IHN5bWxpbms=","KHVzaW5n","eEVE","ICIiIi4=","LlN0ZG91dA==","IOiL","IGFsbWFjZW4=","CXRyaWdnZXI=","LXRpcA==","IENPTU1JVA==","LmluZ3JlZGllbnRz","IG1hbmlmZXN0cw==","IE9TUw==","IEhhdXQ=","L2xvYWRpbmc=","LlR5cGVTdHJpbmc=","KGNsZWFu","IExJQw==","IEJhcmJpZQ==","T09TRQ==","LuKApg==","IEludml0YXRpb24=","IHJlZGVlbWVk","KS4nPC8=","IGltZGI=","IGJlbGFuZw==","IHNjcmFwcGVk","LW5pbA==","IFByb3Vk","0LDRgdGC","LlNJWkU=","IHNldFZpc2libGU=","IHJhaW5pbmc=","IGxlbmdodA==","IGFuYWs=","X0NNUA==","IHBhbm9yYW1pYw==","IGdpbQ==","c2FpZA==","IHByb2dlbg==","IEdCUA==","4oCg","IGludmVzdGlnYXRlcw==","IHByw6hz","L25hdmlnYXRpb24=","Lm1vdGlvbg==","IExpZ2h0d2VpZ2h0","CQkgICAgICAgICAgICA=","IG9udG9sb2d5","IE5JSA==","KHNpbXA=","LnB1bGw=","IHByb3Bvc2l0aW9ucw==","QFdlYlNlcnZsZXQ=","IHJlZGVmaW5l","IEVORVJHWQ==","7KC4","T1JJWkFUSU9O","IFZlcmbDvGc=","fX1dLAo=","IHdlZ2Vu","4LmH","Jm9hY3V0ZQ==","LkJvYXJk","IGN1bHBh","IEdlbmV0aWNz","IH0+","IGFkYW1hbnQ=","44GV44KM","CWF1ZGlv","6riA","IG51bWVyYWw=","IHJlc3RyYWluaW5n","LklOVEVSTkFM","IE1vbXM=","IElQQWRkcmVzcw==","aW1lbnRp","IGFscGhhYmV0aWNhbA==","IEpGSw==","IEF0dGVtcHRz","ZnJhZ2U=","IGRhcm0=","IGJhc2VtYW4=","PWxvZw==","LGVycm9y","IERJU0NMQUlNUw==","CXRleHR1cmU=","LWNvdmVyZWQ=","IFBsdW0=","IOWVhg==","IHDDqXJp","KHJldmlldw==","IEZvcmNlZA==","Rkg=","IOy0iA==","IGV5ZWJyb3c=","X1JFR1M=","IGNoZXN0cw==","IExhcmdlc3Q=","XV06Cg==","VVRPUg==","IGVucXVpcmllcw==","IGNva2U=","LWNhdGNoaW5n","IEdlb2dyYXBoeQ==","YXRlbA==","KHByb2Q=","b3JXaGVyZQ==","TmluZQ==","IFBpZWQ=","IGFkanVzdHM=","KHByb20=","X21lbnVz","X2V4YW0=","IE5vdGlmaWNhdGlvbkNlbnRlcg==","CWRz","TElL","X3R3aXR0ZXI=","Q1JD","IGV1eA==","IFN0YWJsZQ==","aXlvcg==","IGNhcmJvbmF0ZQ==","LnNhbA==","TWFwcGVk","aWV2aW5n","KXk=","eW5hbW9kYg==","LkNvbXBhcmVUYWc=","IHNldmVyZWQ=","J2VtYWls","IGZvcnNr","bGV4cG9ydA==","SU1JVEVS","IEFwZXg=","IGhtYWM=","IE9kZHM=","b3ZlcnJpZGVz","OiI7DQo=","IG9waW9pZHM=","IG1lc21lcg==","IEdBTA==","LWxpbmVz","IGFwcGx5TWlkZGxld2FyZQ==","IHNlcmlh","RVNJUw==","IG5pbGFp","IG1hbGxz","IFBhb2xv","IExlbnQ=","LmJ1aWxkZXJz","LyY=","IENsaXBz","IEp1cmFzc2lj","4pWd","LWNvbmQ=","44O844OI","fHd4","LmhvdXNl","IGhlcmF1cw==","IGhr","IENvY28=","IlwK","IGFjY3JlZGl0YXRpb24=","IFJhY2g=","ZXJ0ZXN0","c2hvcnRjb2Rl","IHZhbGlkYXRpb25z","VUxTRQ==","IGV4Y2VycHRz","U2Vla0Jhcg==","IGdldExvY2F0aW9u","IGZlbmNlZA==","KGdz","IGx5cw==","IGhhcm1z","IEhvbW8=","4oCcU2hl","IOKAuw==","PXNlc3Npb24=","X0NPTVBJTEU=","TWVhbnM=","IHBldGl0aW9uZXI=","SU1P","Il09Pg==","ZGJl","X2dwcw==","IG1q","X2V4cGlyZQ==","IERBTg==","IHh2","IGZ1bmNpb25lcw==","IHNoYWt5","U3VnYXI=","IGdldFJlc3VsdA==","PFRva2Vu","aHR0cENsaWVudA==","Lm9uUGF1c2U=","c3Rp","U25ha2U=","TWFwcGluZ3M=","IFJlYXBlcg==","IGZyZWk=","IENvc21vcw==","dWVycw==","IEhhag==","IEJsYXpl","b2ppcw==","Q3JMZg==","LnByb2M=","IG90cA==","IERyYXdz","CVJFRw==","KCcnJw==","IGdlbmVyYQ==","IEF0dGFjaGVk","UkVN","JTsiPg==","dXJuaXNoZWQ=","X3Jw","IHpvYWxz","IGFzc29ydGVk","aXRpemVk","IGNhbWlubw==","IGFiZHVjdGVk","LnRvQmU=","J10pOg==","IE1vb3I=","SW5jbHVkaW5n","IGdyYXppbmc=","c2V0U3RhdHVz","YWlyb2Jp","X0V4ZWN1dGU=","aWZpYW50","ZWxkbw==","YXV0b21hdGlj","KCQp","IGxlYXBz","b25lZERhdGVUaW1l","KGxheWVycw==","LXByb2R1Y2Vk","IFdvcmtib29r","IGVub3Jtb3VzbHk=","IGRlcHJlc3NpdmU=","IGFhYQ==","RW1iZWRkZWQ=","QlVN","IGVsbGVz","IGJvYXJkZWQ=","xZtteQ==","IG1hc2lo","X2dlbmVz","CVRleHR1cmU=","aXN0YXI=","IEF1Z3VzdGE=","IEFwcE1ldGhvZEJlYXQ=","IGtvZGU=","YWJleg==","X3BpZWNlcw==","Q3Vycg==","IGxpYmVyYWxpc20=","RGljaw==","QWxl","IHF1YWxl","fSc7Cg==","LmFuc3dlcnM=","IEpBTg==","IFBVUkU=","IGNhbm9l","IFNBTUU=","UXVhbGlmaWVy","IGRibmFtZQ==","IElubm9j","CVRSQUNF","aXZyZQ==","IG1lY2g=","YXNlbA==","Iixb","IGFzaWE=","IENhbnRlcmJ1cnk=","LkRhdGFCaW5kaW5ncw==","a2Fo","KCkpKSk=","IGR6aWV3","cmV0ZQ==","IHNjcmVlbmluZ3M=","Lk1PVVNF","IGJ1c2llc3Q=","CXJlbmRlcmVy","IHRlc3RpbW9uaWFscw==","IGFzcGlyZQ==","Zm9ydHVuZQ==","IE1TQw==","IGRhbXBpbmc=","XCIsCg==","V2Vs","V2lr","IOyXrA==","KHRpZA==","IENhbm5lcw==","b2NvcA==","PiIrCg==","ZmFjZXQ=","IHNsYXNoZWQ=","IExpYmVyaWE=","U21vb3Ro","X2NoZQ==","TGFib3Vy","IGVtaW5lbnQ=","Olg=","XEJhY2tlbmQ=","ICsrKQo=","IHRlYW13b3Jr","X2FnZw==","LlNlcnZl","IFNORA==","IFBJQ0s=","IHdpcGVz","L1R5cG9ncmFwaHk=","IEFQQQ==","aWtraQ==","IGNvZGVy","Z2FiZW4=","IHVua25vdw==","LkRlcGFydG1lbnQ=","4Lix4Lia","IHBsYXllck5hbWU=","KmU=","PEJsb2Nr","X3VwZA==","IEdpYmJz","bGVhc2luZw==","IENvbG9tYmlhbg==","KFBIUA==","ICoqKiEK","IOydvA==","IEN1cnRhaW4=","L2F5","2YTZiQ==","c3BvcnRz","IGRlc2Vh","aXLDoQ==","IHVuY29uZGl0aW9uYWw=","IHRocm9t","IENIUklTVA==","IEhPUg==","b3Njb3BpYw==","IHlhxZ8=","IG5vc3Rybw==","Li4uIik7DQo=","IHNsdXI=","IGhhdHRlbg==","IHBlc3RpY2lkZQ==","IGZyZWV3YXk=","IENvaA==","IHdhbm5vbmNl","IG1laWRlbg==","X3N1YnN0cg==","X0NTUw==","IFN5bWJvbHM=","4Li34Lit","REVU","IE1hZGRlbg==","IHJlcXVlc3Rlcg==","LnZpcnR1YWw=","IHd4RGVmYXVsdA==","IGF1dG9tw6F0aWNhbWVudGU=","YnJpZHM=","aVQ=","LlByaW9yaXR5","Jyk7PC8=","YnVuZw==","RGVhZGxpbmU=","Q29uY3JldGU=","IG5leHRQYWdl","IOuwmw==","IFN0b2tl","a29w","INCx0L7Qu9GM","IFByb2R1aw==","LW1ha2Vy","IFByb2plY3RpbGU=","YW5jZWxsYWJsZQ==","IFRIRUlS","VG9SZW1vdmU=","RU1V","Y29tbWVyY2lhbA==","QVZFRA==","IHdlYXZpbmc=","IGJpb21l","QFNldHRlcg==","cW1s","IGJyb2FkZW4=","INGB0L8=","SVNS","IGRlYWN0aXZhdGVk","IHNlbGVjdGVkSW5kZXg=","cmlvdXM=","ZWxwcw==","LkVzY2FwZQ==","IHBvbGxlZA==","cXVpYQ==","X3JlZmw=","X21pbWU=","PEF1ZGlvU291cmNl","KFRyYW5zZm9ybQ==","ZXZlbm9kZA==","CXJhbmRvbQ==","bG9jcw==","IGRldXQ=","cmVwbGFjZW1lbnQ=","IGV4YW1pbmVy","SGFzS2V5","IOumrOyKpO2KuA==","IENsb3Ro","IOCkqg==","IFJlZ2lzdHJv","IEVzdGhlcg==","IFNoYXJlZE1vZHVsZQ==","LmJvcnJvdw==","IG9zY2lsbGF0b3I=","IGZvb2xz","uqs=","IGJvYXN0aW5n","X3B1bHNl","c2hhcmluZw==","IHBpc3RvbHM=","X1BMQU4=","IHNlcHRlbWJlcg==","IG11c3Rlcg==","IG1hcmNow6k=","Q0hFTVk=","IHN1aQ==","IGdlYnJ1aWs=","Lj0n","ZXJyYXRlZA==","IExpYQ==","IGhhdW50","IEN1c2g=","cm91dGVQcm92aWRlcg==","Inw=","ZW5kcGhw","Il1dCg==","IGF2YQ==","77yBIiw=","7Ke4","IGNvbGE=","X1NQRUxM","IGFsw6lt","KExhbmd1YWdl","KGR1bW15","IGJ1bmtlcg==","IEVtcHJlc2E=","IGNyZWF0ZUNvbnRleHQ=","Om1pbg==","IEJPT1Q=","IE1lcmVkaXRo","Wmg=","IERvd25pbmc=","d2pnbA==","LmRj","c2RhbGU=","IGluY29udmVuaWVudA==","IHJlYWRtZQ==","TmF2aWdhdGlvblZpZXc=","Q09ORElUSU9O","LmRlcA==","IHLDqXVzcw==","IG9wY2nDs24=","IEFjY291bnRhYmlsaXR5","Lk1hcg==","LWd1aWQ=","RURHRQ==","RXZlbnRNYW5hZ2Vy","IGRpc2NpcGxl","dWNrbGVz","fX0+","aW50ZXJlc3RlZA==","RmlsdGVyV2hlcmU=","IHB1c3M=","LXByb3h5","X3N0YXR1c2Vz","IFsj","dW5mb2xk","IFJvbm5pZQ==","JiYh","IGFjZXNzbw==","dW9z","X3lpZWxk","KGNhbGVuZGFy","KHNvdW5k","IGRhdGFBcnJheQ==","IFlhdGVz","IHByb2Nlc3Npb24=","RUZBVUxU","IEdIQw==","YW11cmE=","IHN0cmljdGVy","LkJPVFRPTQ==","IGhhYml0dWFs","eEFG","QVZJTkc=","IHNldHVwcw==","ID17Cg==","Kioo","IHNvaw==","IHJldGluYQ==","IEZpcmVwbGFjZQ==","aW52ZXJ0","IEZvcnJlc3Q=","PGRhdGE=","XEFjdGlvbg==","T1VHSA==","IGNhcmVsZXNz","LmdldEFjdGl2ZQ==","ZXNlcw==","IHpkasSZ","KSkqKA==","U0VN","IFBhbmlj","VG91Y2hlcw==","IHByZWNv","L2FjY291bnRz","5L6b","UG9zdGFsQ29kZXM=","LXBsdWdpbnM=","PG1lc3NhZ2U=","KHBvd2Vy","IHBlcmN1c3Npb24=","IGPDqWw=","5o6o","IGRhbmNlZA==","X1NDQU5DT0RF","IFNpdHRpbmc=","IExva2k=","U2hhcmluZw==","LkRpcg==","IHNjaHdlcg==","X0xB","Lk1lbnVTdHJpcA==","X3plcm9z","IGZpeGF0aW9u","IEFtaXQ=","IGNvbXBsaWVk","LnNwYWNlQmV0d2Vlbg==","IGFycmVzdGluZw==","IFN1Zw==","IHBlcmZvcg==","IGtvbXBsZQ==","IEVzc2VuY2U=","IHBsZWlu","c2ltdWxhdGlvbg==","IGNyZWF0ZWRCeQ==","IEV4cGVkaXRpb24=","77yBCgoKCg==","dHJhaW5lcg==","Il09JA==","IHN1Y3Rpb24=","bVBpZA==","bm90aW4=","IHByZWNpb3M=","IEFzc3VyYW5jZQ==","IExhbA==","LiIm","IG1pbkxlbmd0aA==","IE1pbmVyYWxz","dHJhamVjdG9yeQ==","U0FGRQ==","IG51YW5jZXM=","KGV4dHJh","X3ZpZGVvcw==","W109ew==","IGhvbmV5bW9vbg==","X3ByZXA=","CQkJCQkJCQkJCSA=","IHB1cnBvcw==","IGFuemVpZ2Vu","LnN0cnV0cw==","IHBhZ2Fy","LkF1dG9TaXplTW9kZQ==","IHdlbmlnZXI=","IHBhZ2Fu","IGFjaWRpYw==","Z01hcHM=","IGJld2FyZQ==","X2lwYw==","IG1lZHM=","IGRpc2XDsW8=","KSkpCgoK","Q2h1cmNo","IG51cnR1cmluZw==","X21waQ==","IHJlc3VsdGFudA==","IFBpc3RvbA==","c1BpZA==","TXNw","TW9tZW50","IFVQTE9BRA==","TmFubw==","YmxpY2s=","IG1lc3VyZQ==","IExheWVycw==","X3RyYWo=","IGJ1dHRvbldpdGhUeXBl","CWNvbW1vbg==","IE15Q2xhc3M=","2KjYsQ==","eG9vcHM=","X0hlaWdodA==","X1dBUk5JTkdT","U2V0VGV4dA==","IEhpc3Bhbmljcw==","TnVsbFBvaW50ZXJFeGNlcHRpb24=","LmZhY3Rvcg==","IHZpZWxsZWljaHQ=","IHNob3V0cw==","dHJ1c3RlZA==","IG5ld1Jvdw==","IEZyYW7Dpw==","W2pq","4oCUd2hv","IFFEaXI=","X2FkdmFuY2Vk","KEhhdmVPY2N1cnJlZA==","IHVucGw=","L3Jvcw==","LmVhc3k=","IEJBTEw=","550=","L2xncGw=","IHN1YmNvbnNjaW91cw==","ICctJzsK","ICcpOw==","INGW","IHNjYW50","X3Nlc3M=","X3BsYXlpbmc=","X0lTTw==","IHNldFNpemU=","X2RlY2s=","X0xBUkdF","IE1leQ==","Q2hpY2tlbg==","aWZmaW4=","ZGlzcG9zZQ==","SEVTVA==","TGF1Z2g=","IExDUw==","IG9uc2l0ZQ==","LmlzTG9nZ2VkSW4=","IGlycml0YXRlZA==","IGJyaWdhZGU=","IGRlcXVldWU=","Y2xhc3NOYW1lcw==","IE3DoXM=","IEF0YXJp","KElPRXhjZXB0aW9u","UmFjaGVs","LXNhbXBsZQ==","IGVpZ2VudGxpY2g=","SUZERUY=","Lm5laWdoYm9ycw==","IHNlcGVyYXRl","IExpc3Rpbmdz","LmZm","KGltcG9ydA==","TW9kZWxBdHRyaWJ1dGU=","IHNwZW5kZXI=","IG1vdGlmcw==","c3N1ZQ==","IEFwcHJlbnRpY2U=","LWNhdA==","clBpZA==","Ly8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8K","b2N6","aW5pb25z","L2NvbnRhaW5lcg==","IHBsYWdpYXJpc20=","V3JpdGFibGVEYXRhYmFzZQ==","Ly4KCg==","IEZldmVy","LVZlcnNpb24=","YWNpamE=","IHdlaQ==","LWluZw==","IHRlbWFz","IHN1cmdlZA==","IGNyaWE=","IGFyZA==","Yml0Y29pbg==","LnRpbWV6b25l","IG9iamVjdE1hcHBlcg==","IAogICAgICAgICAgICAK","IHlsaW0=","IElDVQ==","IERlcHJlY2F0ZWQ=","KSgpOwo=","QVJHRVI=","dW5nYWxvdw==","VGVzdERhdGE=","KHB0cw==","RklMRU5BTUU=","dXBwbHk=","IHBhY2llbnRlcw==","LGxlZnQ=","IFdyaXRlTGluZQ==","IHBhcmNlbHM=","X2ZvbGRlcnM=","IERpcms=","LmFzc2VydElzSW5zdGFuY2U=","TWND","X1ZhcmlhYmxl","KGFh","IFBvcms=","LlB1Ymxpc2g=","LWdheQ==","IFBldHJh","IENvbm5lY3Rpbmc=","VGFiQ29udHJvbA==","aXZlcmluZw==","KFNjcmVlbg==","IGNoaWxsZWQ=","IGFpbw==","VG91Y2hFdmVudA==","IGFjY2Vzc2lvbg==","IExvaXM=","L21vbWVudA==","IGFudsOkbmQ=","IHN1aWNpZGVz","KGhlbHA=","YW5kZXJz","IFZJRA==","QmVp","ZXZlbnRv","IEFuZ3Vz","VmVycw==","IEJvcmRlYXV4","LnN0cmVhbWluZw==","IHJvdWdl","IGNyYWZ0c21hbnNoaXA=","b3NzaWw=","X0ZBTEw=","QG1lZGlh","aWxlYWtz","RGF0YVNlcnZpY2U=","IFRyaXBBZHZpc29y","IE1hYXI=","Q3Vyc28=","UG9zdGFsQ29kZXNOTA==","KCk7Kys=","JFBvc3RhbENvZGVzTkw=","IG9jb3I=","IHRhaW50ZWQ=","IGxlbQ==","LW91dHM=","IHh4eHg=","IGlycml0YXRpbmc=","b3hpZA==","b2ludGVk","IFRvcm8=","X292","LmJpcnRo","KyU=","IENoYXJhY3RlcmlzdGljcw==","IEJldHRpbmc=","IG9mZmVuZA==","IFBIWVM=","IElDTVA=","eERD","IENk","LmdldE1hcA==","YXRjaGV0","LmN1cnJlbnRJbmRleA==","RVJBTA==","IGthcHBh","aWRlbmNlcw==","UGFyZW4=","IFNlcmdlaQ==","LWZpbg==","J10sWyc=","w6FtYXJh","R3Jvd2luZw==","R2xhc3M=","CW1ldGE=","dmVyYmF0aW0=","L0dQTA==","IEthaA==","KHN2Zw==","Y2xpc3Q=","IEJsb3dqb2I=","b2NjYW4=","LmFib3J0","b2RlbGlzdA==","IGRpZmbDqXJlbnRz","X09QVFM=","PXJlcQ==","IGludG94","IGRpYWdvbg==","IFsoIg==","JlI=","IG9iamVjdGl2ZWx5","IGJsaW5raW5n","IExvdmVz","cmluZ2U=","Kik7Cgo=","IEJvbmRz","IExvdmVk","ZWx0cw==","IGRpc3BhcmF0ZQ==","IEVucmlxdWU=","IldpdGg=","cmVtaXVt","YWphcmFu","dHJ5aW5n","LVJ1c3NpYW4=","bmV3SW5zdGFuY2U=","LlRSQU4=","IG9yYW5nZXM=","L2xvY2FsZQ==","IERJU1A=","CW5z","IFNodXR0ZXJzdG9jaw==","IENMT0NL","KHJhZA==","IGFzc3VyYW5jZXM=","IHJhc3A=","VWJlcmdyYXBo","RW1pbHk=","IGludmVudGlvbnM=","cmlvdA==","IHRvc3Npbmc=","IG1ha2VvdmVy","IHVuaXRPZldvcms=","YnV0dG9uU2hhcGU=","5Yid5aeL5YyW","IHBhcnRlZA==","4paR","LnNpZ21vaWQ=","IHJlZGlyZWN0aW9u","IGRpc3R1cmJhbmNlcw==","IGludGltaWRhdGVk","CUNyZWF0ZWQ=","YWdldA==","IGNvcnJlcw==","IE5FRw==","aXRvbmU=","L2Zyb250","IFZlcnNl","Z2FtYmFy","IHByZW1pZXJlZA==","IElNTw==","IEdvYmllcm5v","IGlmcw==","YXlhaA==","LkNPTA==","IGZyZWRlcg==","IHN1Ym1lcmdlZA==","IE5lcm8=","bW9kaWZpYWJsZQ==","L0Zvb3Rlcg==","LWNlbnRyYWw=","IGdvdXZlcg==","IFRyaWVk","IGRpenp5","UXVlcnlQYXJhbQ==","Ij4nKwo=","X3ByaW1pdGl2ZQ==","56iO","LmdwdQ==","IHZveg==","ZW56ZQ==","IFdpbGRlcm5lc3M=","IHByb2JhYmls","L3JlYw==","IGFjY2Vz","IFRydXN0ZWVz","R2I=","IHBhZGRpbmdIb3Jpem9udGFs","U2hpZWxk","IE5hbWVu","dWRkbGVk","IFByaW9yaXR5UXVldWU=","UG9vcg==","IFNBRg==","LS1bWw==","IGNobG9yaW5l","IHZlcmJhbGx5","IGFpcmU=","PjsNCg==","aWxoYQ==","W2NvbG9y","YW5kYWxvbmU=","LmFkZFJvdw==","IFNvaw==","IENvbm9y","IG1lam9yYXI=","J2lscw==","ZGV0YWxsZQ==","ICIpLAo=","JUA=","Lmxhenk=","Lmp1bXA=","b3N0ZQ==","K0Y=","IGluZnVyaQ==","IHNvbnJh","aXRlbWlk","JGxvZw==","IG11cmRlcm91cw==","TEVD","CW5pbA==","IE3DpHI=","KHBn","aWxlbw==","QXNjaWk=","IExvY2toZWVk","IFRoZW8=","QmVsbA==","YWNpb25hbGVz","LmNyZWF0ZU5ldw==","IOW+","LWZvb3RiYWxs","IGVjb21tZXJjZQ==","CVNpbXBsZQ==","Y2x5","LklubmVyRXhjZXB0aW9u","IHBlc29z","IHRyb3Bl","IEFSR1M=","TWlhbWk=","IFBhbG8=","IFN1emFubmU=","X21hcHBpbmdz","I3tA","IE9jY3VwYXRpb25hbA==","X2J1Y2tldHM=","Z29hbHM=","X1J1bg==","LXByZXBlbmQ=","c3Nz","bWFyc2hhbGw=","IGVxdWl2YWxlbmNl","IFdlbGNo","KE9wQ29kZXM=","CWNsb2Nr","IE1lZGluYQ==","VEVSUw==","b3Jhbmc=","VGhvdWdodA==","IG9hdHM=","X1RFWA==","UklDUw==","IGluZGlmZmVyZW5jZQ==","IGFsbG90","LlVzZVRleHQ=","IFRyaWNrcw==","YXdl","LkZJTEw=","LXBocA==","LnZvaWNl","IFBhdGhmaW5kZXI=","X1RBR1M=","IFRyaXQ=","5oyJ6ZKu","YmJj","IGFkZGl0aXZlcw==","IHNjaGxl","IEtleWJvYXJkSW50ZXJydXB0","IHVzZVBhcmFtcw==","IEJ1Y2hhbmFu","cmlhbmdsZQ==","IG11bHRpcGx5aW5n","IHNlbGJlcg==","IFllcA==","Q2hhaXI=","LXJlcG9ydGVk","X1NESw==","LG5v","IEZhbGxpbmc=","5rk=","ICgpLAo=","cGRi","IEJvcm91Z2g=","LnJlbW92ZUZyb20=","IG92ZXJzaGFkb3c=","aWdhaWw=","IHR1bmc=","IG1tYw==","W3BhcmVudA==","RXh0ZXJu","YXZpb2xldA==","JykiCg==","IGNvdW50ZXJ0b3Bz","IHVidW50dQ==","5rc=","IM6T","IHVucHVibGlzaGVk","IEluZGllcw==","VU5FVA==","IG9mZXJ0YQ==","IGRhbWVz","IGFzdGVyb2lkcw==","IG5vdmVtYmVy","Y29udHJhc3Q=","LkFkZE1vZGVsRXJyb3I=","K1NhbnM=","IHNjcmFtYmxpbmc=","dGV4dFZpZXc=","L2NyeXB0bw==","VXNlUHJvZ3JhbQ==","QHVwZGF0ZQ==","RGVzZGU=","U0FU","IGRpc3BsZQ==","YW5uw6ll","XERlcGVuZGVuY3lJbmplY3Rpb24=","IGl0bQ==","IOe8","IGV0aG9z","QVBP","IEdhcmPDrWE=","aWRpcw==","IFN0ZWFr","cmliYQ==","X3ZlcmlmaWNhdGlvbg==","IEZL","IEVpbnNhdHo=","IHBlcnNvbmFsaXNlZA==","LW1vdGlvbg==","IE1lbGFuaWU=","w7Zo","X1ZD","IGRyaWZ0aW5n","LmNvbnN0cnVjdA==","IO2UhA==","IGJhdGNoaW5n","Li4vLi4vLi4vLi4v","RVJQ","X3V0Yw==","IG11bHRpdA==","IG1yYg==","Y2Nhaw==","Y2h1bmtz","IHRyYW5zbHVjZW50","IHBheW9mZg==","4oCUYW4=","IHNpbGw=","IG9ybmFtZW50cw==","Z3Vh","VUJZ","KHN0ZXBz","IEJPUkRFUg==","IFNPVU5E","YGAK","ZW5hcmllcw==","IEJpdHRl","IGdseXBocw==","IG92ZXJydW4=","IGJsb2NrSWR4","IE1TVA==","IGdlbm9tZXM=","dGVuc29yZmxvdw==","RGlyZWN0b3J5TmFtZQ==","X2xocw==","IGZpbnQ=","YWRkdG9ncm91cA==","IHN0ZWFkZmFzdA==","IGNsb3Zlcw==","IFNvdmlldHM=","IElTQQ==","wqNv","dXJnZXJ5","c292","INCy0YvQstC+0LQ=","IHB1ZA==","LXdhdGNo","IEhvc3BpdGFscw==","fXdoaWxl","IyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMj","4buj","IGFrdHVhbA==","IGtpbG9ncmFtcw==","IEZBQw==","b3BoeXM=","cHJz","KkA=","eWI=","c2VjdXJlZA==","IGFsZ8O6bg==","IOCkuQ==","cGhhbnM=","QWRkb24=","IGNlbnRyYWxseQ==","X1NVSVRF","SW50ZXJlc3Rpbmc=","dWx0aW1v","QWdhaW5zdA==","IEV6cmE=","IEhlYg==","dWlkYQ==","IHNreXM=","T0xWRQ==","QmVuZWZpdHM=","IHByaXNl","Lio/KQ==","LmlzRGVmaW5lZA==","IHN0YW5kb2Zm","IHBsYW5v","LmxhdGVzdA==","ICgkLg==","IEdvdWxk","IGNhdXRpb25lZA==","J10o","IG51aXQ=","IEhDSQ==","Zm9vdGJhbGw=","IHdpbGxlbg==","UHJvY2VlZA==","IGludGVuZGluZw==","dGlm","IHNwb25zb3Jpbmc=","b2hhbmE=","RG9z","TW9ybmluZw==","ICEiKTsK","LnNoZWxs","IFJFTEFURUQ=","IHBpbXA=","L2NvdXJzZQ==","IHJhbWlmaWNhdGlvbnM=","IHBpeG1hcA==","IHBvd2VybGVzcw==","IGRvdWNoZQ==","Y3JpbWU=","Y29udHJpYnV0b3Jz","KHByb3RvY29s","IGdldFBvc2l0aW9u","U0VUVElOR1M=","IHZpZXQ=","aXNzZXM=","V2l0aEVtYWlsQW5kUGFzc3dvcmQ=","UmV0dXJuVHlwZQ==","QXBwZQ==","IElLRQ==","LkNvb2tpZXM=","Lm1lZGl1bQ==","LmdldEpTT05BcnJheQ==","X0Zvcg==","L3Rpbnlvcw==","IFRhYmxlQ2VsbA==","IFJFUExBQ0U=","Lk5ldHdvcmtpbmc=","IGJvd2Vk","CW1k","PSJ7ISE=","IGhvbmRh","IEV1cg==","IGluZG9uZXNpYQ==","IGhlbmQ=","LnZpZXdtb2RlbA==","CWN0cmw=","IFRhYmxldHM=","LW9yYW5nZQ==","ZXJyYXM=","X2dyYXBoaWNz","e3M=","IFRpdGxlcw==","IGRpYWdub3Nlcw==","b3VwbGU=","X0RvdWJsZQ==","W3Jlc3VsdA==","IGppdHRlcg==","X05VTUVSSUM=","PmY=","X01Z","0LjRgdGC0LXQvA==","c3RvcmVJZA==","IHJlbGlucXU=","ZW9z","IHdpZGVuaW5n","IHRhY29z","LllFUw==","XSsn","IEluZGV4ZWQ=","IHByb2Zlc3Npb25uZWw=","IFN0cmFw","QnVmZmVyRGF0YQ==","ZWVh","ZXJpbg==","QU5DRVM=","X1RYVA==","IHt9Lg==","KGNvbnRyYWN0","eXc=","IGJsaW5kbmVzcw==","Q0hBTg==","CWdsQ29sb3I=","IGN1cnJlbnRQb3NpdGlvbg==","IENhdWNhc2lhbg==","JGltZw==","I2Fh","IHNlYW4=","TWVzcw==","Kj0qPQ==","IGNhcGFjaXRvcg==","YWxmYQ==","LlJlbW92ZUFsbA==","IFdQQVJBTQ==","dWxhZG8=","bmljb3M=","IG9yZ3k=","R1g=","X0RFVklDRVM=","b3Vya2U=","IGtC","IHNvcGhpc3RpY2F0aW9u","X2F1ZGl0","L0lQ","IEx5ZnQ=","L1N0","CWNhbmNlbA==","IG92YXJpYW4=","bWFyaW5l","a8SZ","IFlN","IE1pbG8=","IE1hdFRhYmxl","IEFiYnk=","bnpl","IEx1ZHdpZw==","X2FybW9y","IHNjYWZmb2xk","4buXaQ==","YXV0aG9yaXR5","4bqleQ==","LmdldFByb2R1Y3Q=","IE9yYml0","X1BhcmFtZXRlcg==","LmRhdGVGb3JtYXQ=","L3RhZ3M=","LlNwZWVk","KExpbmU=","IHBvbGlzaGluZw==","IGtvbWI=","IHJ0cmlt","J2ljb24=","cmllcmU=","IFByZWZlcg==","c3RydG9sb3dlcg==","UmVncw==","Q0JE","LT4K","IHBhcmFzaXRl","ZW5kc1dpdGg=","IENvYnJh","OnRlc3Q=","IE51Z2dldHM=","xaF0","Q29yZUFwcGxpY2F0aW9u","L2JpbmQ=","IE1jSW50","aXR1bmVz","Wy0t","IFN1cnByaXNl","X0lORw==","IEZhc3Rlcg==","0J3QsA==","OkU=","IGRpbnQ=","bmdl","LiInLCciLiQ=","IGFkamVjdGl2ZQ==","LmJj","Y29uc3VtZQ==","Qk9S","KGFuY2hvcg==","IGVzdGVlbQ==","IGJyZWFrdXA=","ZGVjYXk=","ICQKCg==","RWR3YXJk","QVNJ","IGF0dGFjaGVz","X0RJU0s=","IFdpbG1pbmd0b24=","IEt1bA==","IFtbXQ==","IERlcGFydG1lbnRz","IHJldHVyblR5cGU=","IFVOSVRFRA==","b2JqZWN0aXZl","IGdpcmxmcmllbmRz","X0dV","QHN0b3Jl","LU91dA==","Lm1vdmVz","KHN0YXJ0RGF0ZQ==","CUpCdXR0b24=","IFBhY2U=","IEJlYXRz","IGxpY3o=","IGV0aGVyZXVt","IGNoZWVyZWQ=","IGF1Y3Vu","UmVnYXJkaW5n","IG1pZ3JhdGluZw==","IGZ1dGlsZQ==","IFRhY29tYQ==","X0NoYXJhY3Rlcg==","IHZn","IENvcGE=","2Ks=","IG5hbA==","IGxhbmRmaWxs","IHRhbWls","IHBlcnBldHJhdG9y","IFBhY2Vycw==","LmdldE9yZGVy","fA0K","R2V0T2JqZWN0","IGJsYQ==","IEhhcmFt","cG9ydGxldA==","IGxva2Fs","TWVyY2hhbnQ=","UGFzc3dvcmRz","b25lbnQ=","IGFydGVyaWVz","IEludGVsbGk=","XFN5c3RlbQ==","PWxvY2FsaG9zdA==","LmF2aQ==","IFZlbmQ=","KHRibA==","Q29ycmVjdGlvbg==","IHV0ZXJ1cw==","IHNhbGl2YQ==","Kys7DQoNCg==","KCcqJyw=","IHNuYXRjaA==","IFNUUkVFVA==","KVs6","54Sh44GX44E=","U2VudGVuY2U=","KCkuJy8=","OnJlbGF0aXZl","leOCkw==","X3VzZXJpZA==","b2xpbmc=","IENsYXNo","CXNldHVw","KG1p","IGppdA==","IFNjYW5kaW5hdmlhbg==","IFBob25lcw==","Iic7Cg==","IHR1bXVsdA==","IEludGw=","IFNpbm4=","KG5ld3M=","IGRicw==","IFJlbWFya3M=","S2l0Y2hlbg==","IGFkbWlyYWJsZQ==","X2Rhc2g=","IERPTUFJTg==","YWRkTGlzdGVuZXI=","Il0uKA==","CU1ldGhvZA==","bWFya3Q=","LGV4cG9ydHM=","IG91dG51bWJlcg==","X0FTQw==","cHJlbWl1bQ==","KU5VTEw=","IEJvd21hbg==","LnNldE9uSXRlbUNsaWNrTGlzdGVuZXI=","IFJlZ2V4T3B0aW9ucw==","S2Vs","L21hdA==","44GT44KM","IHdlYXJlcg==","aW5pcw==","W2RpbQ==","IE51dHp1bmc=","aXNidXJ5","5Yid","IHJvb3RSZWR1Y2Vy","ZXlK","SW5jbHVkZWQ=","LUxlYWd1ZQ==","YW5heA==","KGluZmxhdGVy","IEZpZWxkVHlwZQ==","IHNob3Zl","IGZ1bGxmaWxl","RGF0YU1hbmFnZXI=","LmdldExlZnQ=","IEZz","ZHJvcG91dA==","IOuyiA==","IG1hbmnDqHJl","IGZsYW1pbmc=","IGNvbXBsZXRhbWVudGU=","4oCw","fC4=","RW5lbWllcw==","b3NjaQ==","IFNBWQ==","IG1hcnk=","KFJ1bnRpbWVPYmplY3Q=","IH4+","IFNpbXBzb25z","J10uJA==","X21lbWJlcnNoaXA=","KSI6","IGxheW91dE1hbmFnZXI=","IFJvY2tlZmVsbGVy","ICd8Jw==","SVBI","RE9O","YWNodGU=","UGVhY2U=","aHRhcg==","QCIK","IHRyZWFkbWlsbA==","IHNwdXJyZWQ=","IEtW","bWlkZA==","IGZsb3dlZA==","w6Nlc3Rl","R2VuZXNpcw==","PT0+","IFZlbnR1cmE=","X2VsaW0=","INC40LzRjw==","IHNvbmd3cml0ZXI=","Y3JlYXRlRm9ybQ==","SUdITA==","IG1vbGRlZA==","IHJldmVyZWQ=","VW5kZXJUZXN0","aW1ibGVkb24=","X1Nlc3Npb24=","IG1hc2NvdA==","IGFsZg==","66mU","PldlbGNvbWU=","IGtub2Nrcw==","IEVxdWF0aW9u","LnRvdWNoZXM=","X0xhc3Q=","IHVwYmVhdA==","YmlnaW50","IGVudmlz","L2Jhbm5lcg==","44GC44KK44GM","IERvd25z","X1NG","IHJ1bkFwcA==","IHF1ZXN0aQ==","VHJhZGl0aW9uYWw=","X3dhaXRpbmc=","cGlja3Vw","KCdALw==","CXNl","IEtlcm4=","IERlbGljaW91cw==","IHNhdHVybg==","IEpTT05FeGNlcHRpb24=","44KN","SlI=","fSgpKTsK","IFNvbWFsaQ==","dWFp","aW1hZ2Vt","YW5kRmlsdGVyV2hlcmU=","w6hsZXM=","aW5ib3g=","IHlhcMSx","IG1laXN0ZW4=","YF0o","U1dH","LGNsYXNz","4LWN4LQ=","dGFpZW50","IEZyYW7Dp29pcw==","QXV0aFRva2Vu","IHB1ZXN0bw==","IGps","IGdhdGVk","IERlYXRocw==","IFNpZGQ=","IHByZXZhaWxlZA==","LcOqdHJl","KGFsYnVt","IHFpbnQ=","bWFyY2E=","IE5BRlRB","IHRpZ2h0ZW5lZA==","X0dBUA==","RU5TSU9OUw==","IExpYmVydGFyaWFu","X3N0eWxlc2hlZXQ=","LlNldEludA==","X3B1Ymxpc2hlcg==","cGFnZU51bWJlcg==","enNjaGU=","IFNRTEFsY2hlbXk=","IGhvb2Y=","Z2V0VG9rZW4=","IG5lYmVu","bHVuZA==","Lm1pdA==","ZXJycw==","LnNldE1pbmltdW0=","LXByaWNlZA==","KHBv","ZW5nYWdl","X0ZU","Ly8KCgo=","IHRvbWU=","ICI+PC8=","VmVjdG9ycw==","IFRlc3RVdGlscw==","ZmlsdHI=","VXN1","IGRpY3Rpb25hcnlXaXRo","IG9icmFz","IEJEU00=","LmdldFRhcmdldA==","IGFsbG93YWJsZQ==","IEluc2VydHM=","CU5vbmU=","IGxpYmVyYXRlZA==","S2VudA==","IFdpc2hsaXN0","IExhZ2Vy","IGp1aW4=","IG51ZXM=","IG1vbmFzdGVyeQ==","IG1pY3Jvc2Vjb25kcw==","IEhhbm5h","0L7RgdGC0Lg=","d2VhcG9ucw==","X3Nwb3Q=","b2RvbQ==","Lk1vZGVsRm9ybQ==","IG9yZGVybHk=","RklOSVRF","IHJlc2lkZW5jZXM=","X3RD","Q0dDb2xvcg==","IMW+ZQ==","IHNjcmVlbnBsYXk=","IHB5bW9uZ28=","IGTDqXQ=","IGRlc3Rh","IE5ldXJvc2NpZW5jZQ==","bmllc3Q=","QEdlbmVyYXRlZFZhbHVl","RUxTRQ==","PGw=","IGRpc2pvaW50","LnB1Ymxpc2hlZA==","ZWxsYW4=","IFN0cmluZ1dyaXRlcg==","LkJyb2FkY2FzdA==","IEZlaW5zdGVpbg==","YW1waGV0YW1pbmU=","S2V5U3BlYw==","IEdyaW1t","ZXR0ZWw=","4Lic","T3Q=","aWJyYWx0YXI=","Y2Vi","IHRpbWluZ3M=","aW5lZQ==","IEFuZHLDqQ==","RXNzYXk=","Lmpk","IEJ1bmRlc2xpZ2E=","UmV0dXJuZWQ=","IGFwcGFsbGluZw==","LkJpZ0ludGVnZXI=","IFNFTg==","IEhvbWVtYWRl","LmNoYXB0ZXI=","LXZhbGlk","IEFUVFJJQlVURQ==","dXN0cmlh","IGVudMOjbw==","UmV0dXJuaW5n","dmVydGlzZXI=","LlBhY2thZ2VNYW5hZ2Vy","Q2xhcms=","IHF1b3Rhcw==","IHNjYWxlRmFjdG9y","IGNveg==","X21pbmk=","IG11dGF0ZWQ=","LmFjdGl2YXRpb24=","Km1hdGg=","LnZlcnR4","PGFydGljbGU=","IGVtYnJvaWRlcnk=","L2J1c2luZXNz","Y2tldHQ=","c2NpZW50aWZpYw==","IEdpbGVz","IHJhY2Vy","X3BlcmZvcm1hbmNl","IGxhbWluYXRl","IFBISQ==","UsOp","IEF0aGU=","Y29sZXM=","IHNhxJ8=","IElua1dlbGw=","CXNpZw==","IHNwYWNlc2hpcA==","IGluc29s","IFVDbGFzcw==","LmxlYWRpbmdBbmNob3I=","dG90YWxz","IHNwcmlua2xl","IE1vZHVsYXI=","ICdcIg==","b3Jvbg==","LlJlYWRBbGxUZXh0","ICAgIAkNCg==","L2lvbg==","REVQVEg=","X21pbmltdW0=","XENhY2hl","IGRpdmVyc2lmaWVk","aWduZXQ=","IGRvam8=","IFVJQWxlcnRWaWV3","L3R0eQ==","IFNhc3M=","IC9cLig=","IElNQUdFUw==","IGRhdGluZ3NpZGVy","IEV4cGxvcw==","LmdlbnJl","XEV2ZW50cw==","IGVudW1lcmF0ZWQ=","Y3VycmVudFN0YXRl","aXRydXN0","Q2FsbGFibGVXcmFwcGVy","Rm91bmRlZA==","IHJveWFsdGllcw==","KFByb3BlcnRpZXM=","IFVTUFM=","LS0tLS0tLS0tLS0NCg==","LlJlYWRUb0VuZA==","IGNvc3k=","IGFwZQ==","X2RlZmluaXRpb25z","IHBhZ2VObw==","IGR6aWVjaQ==","c3RhbmRlbg==","IGJlc2Fy","aXRpbg==","IGNvbnNlcXVhdA==","IHBydg==","IHNwbGl0dGVk","IGVzcG9zYQ==","PWZpbmRWaWV3QnlJZA==","V2Fsa2Vy","IEhlYXJ0aA==","aWJyYXRvcg==","b3RvbXk=","YWdnYWJsZQ==","IOW9kw==","77yBJyk7Cg==","aW9uYXRl","L3llYXI=","IHNldEM=","IE1lZGlhVGVr","LWJveQ==","LnRvb2xTdHJpcE1lbnVJdGVt","Q29uZmlncw==","YXR0ZW5kZWQ=","IGVtb2M=","IEJhaQ==","b3BvbGl0YW4=","IGludHJ1c2l2ZQ==","IHp1Zw==","IGZmbXBlZw==","X2Jvb3N0","IG1vemlsbGE=","IHNsaWNpbmc=","V0c=","cGFnZXNpemU=","UHJvcGVydHlEZXNjcmlwdG9y","IEFsZWphbmRybw==","VVNFUw==","SG9zdGluZw==","IHJpc2tpbmc=","IEludml0ZQ==","IEphemVlcmE=","IHJlZ2FpbmVk","IEhhZ3Vl","IGd1ZXJyYQ==","IGVuY2xvc2luZw==","J10iKQo=","PFRyYW5zZm9ybQ==","Lk5PUlRI","IGNyaW0=","SU5V","IGNsZW4=","IE1vdGhlcnM=","IE93bmVyc2hpcA==","RHJpbms=","IGJlYmVyYXBh","Lm9uZXJyb3I=","KSsK","IHRhYkluZGV4","IERpbw==","IEZvcnR5","KExpbms=","IHNlZ21lbnRlZA==","IGphbWVz","IFRhcmdldHM=","IFJUUw==","INC60L3QvtC/","IHZhcmlhcw==","IHTDrXR1bG8=","IGTDvHI=","L0dhbWU=","cmFuc2l0aW9u","IGRpc3Rpbmd1aXNoaW5n","dWt0dXI=","YW5qZQ==","IE1jQ2FiZQ==","cGFp","KHRr","RGVzdHJ1Y3Rvcg==","R2FtZU9iamVjdFdpdGhUYWc=","JGg=","IGFmcg==","LnNldEVtYWls","IHJlcGV0aXRpb25z","bGFuZGVycw==","IFNoZWE=","X2NsYWlt","IGFjZXNz","QmVuY2htYXJr","LkVzdA==","LlBP","IE7DpA==","IGl0Y2hpbmc=","IGNvbmRvbWluaXVt","X0ZXRA==","IHJlYWx0aW1l","IGNpdmlsaXplZA==","X3BoeXNpY2Fs","UmFs","IHdpbnRlcnM=","IFlhZA==","IGZvcmE=","IGNhbGlicmF0ZWQ=","UGV0cw==","IHN0b3JtZWQ=","IGplbA==","IFNTUA==","ZGF0YWdyaWQ=","IExhdQ==","dW5hcg==","dWxmaWxsZWQ=","RVJJTkc=","IFRyaW8=","2LHZiA==","Rm9yZWdyb3VuZENvbG9y","PW91dA==","LyoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKi8K","IHZpZW50","IEFETQ==","X0Nvbm5lY3Rpb24=","LWNhbmNlbA==","KCcuJyk7Cg==","IHNhaWxz","IGVxdWl2YWxlbnRz","TmI=","IGZseWVycw==","IEdJUg==","a2VsaWc=","LXdhbGw=","LlJlcXVpcmVz","IGNvc2U=","IEFOQw==","IGphZGU=","IEFsZWM=","IGVuZHJlZ2lvbg==","IEVYVEk=","ZWRlcmU=","VGVycmFpbg==","U3BlY2lmaWNhdGlvbnM=","IFN3ZWVw","c2V0SXRlbQ==","IHNtaXJr","IHNjcmlwdGVk","W1N5c3RlbQ==","56eB","IHN5bmNlZA==","IHNxcg==","Z2V3YXRlcg==","IGpld2Vscw==","IGhkYw==","4KWN4KSw","z4Y=","w7xzc2VsZG9yZg==","bGllbg==","Qm9yZGVycw==","IEF0b21pY0ludGVnZXI=","IHBhcmFseXNpcw==","Q2xhc3NpZmljYXRpb24=","IGdsaWRl","IHVtcA==","IC8+fQ==","IHZlbmRpbmc=","4Li04LiZ","bm90aWY=","Jl8=","IEVtZXJnaW5n","YXRpY29u","IHByb3BhZ2F0ZWQ=","LW9yZGVycw==","YWdhcw==","dXJnZW50","KFRpbWVTcGFu","QUxDSEVNWQ==","L2Jvd2Vy","7IKw","LmJvb3N0","LmRlcGVuZGVuY2llcw==","LlN3aW5nQ29uc3RhbnRz","dW50bGV0","LmNoYXJz","LWNpZ2FyZXR0ZXM=","IE1vZHM=","ICAgICAJ","IGJyYXZlcnk=","IGNvdW50ZXJlZA==","cmVsdWRl","X21vYg==","QUlORUQ=","bmdvaW5n","IHVuZGVyZ3JhZA==","R2V0TWV0aG9k","RHVhbA==","X2pvdXJuYWw=","LE5v","IHNpZGVs","IExhcnNvbg==","KyIsIis=","IG5hcnJhdGlvbg==","IFN1YndheQ==","IExleGVy","IE5pbmc=","aW5kaWM=","dGhhbmU=","LlNJRw==","LWVhcnRo","IGJlcnJ5","IFRldWNob3M=","CUVudGl0eQ==","ZXJzcGVjdGl2ZQ==","Tm9z","IE93bmVk","QlVS","IGxpbmVubw==","IEZpamk=","R2V0SW50","U3RyaW5nUmVm","ICcmJw==","dWFkYQ==","LmNhcHRpb24=","YXBwTmFtZQ==","KG9mZg==","IHZlcnN0","IHR5cG8=","6ZyA6KaB","YXRlcmFuZ2VwaWNrZXI=","IHFlbXU=","IEdFTw==","X0Ns","LklU","IE51bmVz","W1o=","IENvbXBsZXRlbHk=","LkxpdmU=","IEphcw==","IHdlaXQ=","Y29zaXR5","IHBvbGljZW1lbg==","KHRhcmdldHM=","aXRsZWRCb3JkZXI=","IOinow==","LkdsaWRl","IGRlbW9uaWM=","SW50ZXJpb3I=","LS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0t","IERvdGE=","IG9yYml0cw==","QU1Z","IFRyaW5pZGFk","aWN1bQ==","Lnph","IGdldEludA==","QXRsYW50YQ==","IGFtbmVzdHk=","IFJhaHVs","IF98","aGlybw==","IFRBS0U=","IGp1bWxhaA==","IEF1dG9tb2JpbGU=","4buP","d2hvc2U=","X1NBTVBM","UGF0aWVudHM=","INGC0LXQutGD0Yk=","LnN1YnNjcmlwdGlvbnM=","IE1lbnRpb24=","VG9Xb3JsZA==","aXBh","CU1lc3NhZ2VCb3g=","PEFwcGxpY2F0aW9uVXNlcg==","INil","ZmFicmlj","a2VsZXRhbA==","QmFyQnV0dG9u","IGFyY2hldHlwZQ==","aW5zdGFudA==","IGludGVybmFjaW9uYWw=","IFZveWFnZXI=","KHRvdWNo","IFZhbGs=","L01JVA==","IGNhdWw=","J0Nvbm5vcg==","KCIh","KE9Q","ZmFjdWx0eQ==","IEJhdG9u","IFZvbHVudGVlcnM=","dGFuaw==","X0JJTkRJTkc=","O2xpbmU=","IFZlcnNpb25z","WUxFUw==","IGplZXA=","KEVuY29kaW5n","IGdlb2xvZ2ljYWw=","TmljaA==","KHBkZg==","IGFuYWx5emVz","IGNhcHRpdmF0aW5n","IGhpem8=","Lm1kbA==","IGphcA==","IGZsaXBz","CWRm","IFBpZXQ=","IG5yb3dz","IGthbXU=","INCy0L7Qtw==","IHBydW5pbmc=","YWN1bGE=","IHRyYXZlbGxlcg==","U2hvb3Q=","LmVwc2lsb24=","IEZsZW1pbmc=","aWJ1cg==","b3BlcmF0ZQ==","aWdodGVy","IGJlZ3M=","IFdhbG51dA==","KFBhcnNlcg==","IHdpdGhkcmF3YWxz","aXNjb3BhbA==","IGJpbGxib2FyZA==","a2Vr","LW9wZW5pbmc=","IER1ZGU=","Y29uaQ==","eEVC","IGNhbG9y","YW1haGE=","LlRYVA==","RHJ5","IG1pc3Npb25hcmllcw==","X1ZlcnNpb24=","IG11bHRpbGluZQ==","4oCUd2U=","IGNvbXBvbmVudERpZFVwZGF0ZQ==","RmF2b3JpdGVz","aWdoYW0=","IGpvdXJuw6ll","IGFtdXNlZA==","IE9tbmk=","dGd0","IHdhaA==","ZXRpbmU=","IHBoYXNlZA==","IG9uU3RvcA==","Y3JlYXRpdmVjb21tb25z","U29waA==","IHVuYm9ybg==","PUU=","IEZlZEV4","bm9ybWFsbHk=","IGx5cg==","TWF0cml4TW9kZQ==","IHplaWdlbg==","QXRo","IEt1bQ==","w6RobGVu","LyI7Cgo=","IGRhbGxl","IGxhbmNl","IFN1aXRhYmxl","IGNvdW5zZWxvcnM=","5YWo6YOo","IGZhc3Rh","IGJsYXppbmc=","7KeE","L3R1dG9yaWFs","LnRjcA==","5pmv","TWFuYWdlckludGVyZmFjZQ==","IFNhbWFy","CWdsVW5pZm9ybQ==","IHByZXJlcXVpc2l0ZXM=","IGFudGljaXBhdGluZw==","cmFxdW8=","a3Nlbg==","TWFnbml0dWRl","dXRvbWF0aW9u","SGllcmFyY2h5","IGRldmlhdGlvbnM=","aW1ldA==","Q0NJ","PSgK","IGFudGxy","CWluaXRpYWw=","IFJlc29ydHM=","aG9tZXM=","CXBvb2w=","IG1hdMOp","P29wdGlvbg==","Om15c3Fs","KHV0Zg==","LlRhYkNvbnRyb2w=","PlRpdGxl","IEFkb3B0","LklzTWF0Y2g=","IGVudHJ1c3RlZA==","U3VzYW4=","c3dpbmc=","aW1hZ2VuZXM=","IHNlbGVjaW9u","IGFpZGluZw==","KFtdKg==","IHNldEZyYW1l","c3Bpcml0","L3Jzcw==","SXRhbGlj","IFByb3BlbEV4Y2VwdGlvbg==","IFRvbGw=","LkZpbmRHYW1lT2JqZWN0V2l0aFRhZw==","aW5hbnQ=","IHNlbGZpZXM=","XXxb","IGFwcGxpY2F0aW9uQ29udGV4dA==","aXhl","Y2Ri","ZWJi","IE92ZXJzZQ==","IHNxbENvbW1hbmQ=","SG9zdE5hbWU=","LWxhdW5jaA==","Umlzaw==","O3I=","LlNwYW4=","X0NJVFk=","X01B","LyIKCg==","UGF3bg==","IFllbHA=","QnVuZGxlT3JOaWw=","IG1heW9yw61h","U3RhY2tOYXZpZ2F0b3I=","ITsK","IHRodWdz","IEJhcm5ldHQ=","44O744O744O7Cgo=","IOqygA==","X0NPTlY=","IGJ1enppbmc=","a2V0ZXJhbmdhbg==","TWlsaXRhcnk=","d2VlZA==","IGRlbGltaXRlZA==","6LWE5rqQ","INCw0Lo=","X0hFTFBFUg==","IFJFQURZ","TG9vcGVy","KioqKi8K","IFRydWNrcw==","5Y67","X3BvZA==","T01BVElD","LWphdmE=","IHVuaWZ5","L0FyZWE=","ICcvJyk7Cg==","IEdhbWJsaW5n","LkhpdA==","IEZhcnJlbGw=","X2ZpdG5lc3M=","cmVjb21tZW5kZWQ=","emVuZA==","b2RpZQ==","X2JlYW0=","IHBsYWdl","bmRvbg==","LmFzc2VydGo=","IGdyYXRl","TWVhc3VyZWQ=","LmNlbnRyYWw=","Z2VzdHVyZQ==","IEdsb2JhbEtleQ==","cHl4","IE5lY2tsYWNl","5Y2O","LkFkZENvbHVtbg==","IFJ1ZGQ=","IFByZXNieXRlcmlhbg==","dW5kbGVy","IyFb","X2xhaGly","KCk9PSI=","QWNjZXNzaWJpbGl0eQ==","LXRyYWluaW5n","IFRob3U=","X1BJWA==","X1RSWQ==","PEo=","xrDGoW5n","bHVjaw==","X01BWElNVU0=","IHRoYXc=","VW5pZmllZA==","PkNvbnRhY3Q=","LVByZXNpZGVudA==","LXBhcnNl","IFBpY2tlcg==","TWFyY28=","dHJz","zrQ=","LiQu","X01FU0g=","IHNhZ3Rl","Kz0n","0K8=","KHBhcmNlbA==","aXZvcnM=","IGRpdmVydGVk","QUdBSU4=","IG5lc3M=","IHZhbGxleXM=","IC4uLig=","IEVRVUk=","IE91dHM=","IERlbW9uc3Ry","RGV0YWxsZQ==","IOu2gA==","UG9pbnRYWVo=","LmVwcw==","IHN5bm9ueW1z","ID09KA==","4oCcWWVz","J3V0aWxpc2F0ZXVy","TmFtaW5n","TEVW","cHJvdG9jb2xz","IOyb","IGdldFVzZXJuYW1l","LXZhcg==","X210eA==","IHNwZWN1bGFy","IG5vdGFz","SG9yaXpvbnRhbEFsaWdubWVudA==","IEJheWVy","c3Vz","ICAgIAkJCg==","IFNoYWNr","cmVzaGVy","IGltbWF0dXJl","YnJhY2h0","SVNDTw==","LmNyZWRpdA==","IHZpbmVz","X0xQ","RUVERUQ=","IFNjYXJib3JvdWdo","w6FudA==","KT09Jw==","CWRlbHRh","X0NPTE9SUw==","LkN1c3RvbUJ1dHRvbg==","IGFmaXJt","IEppbmc=","UGFybXM=","Y2VudGVycw==","LT5fX18=","IExETA==","LWNvbnRyaWI=","IERyZXNkZW4=","IFBpeGVscw==","ICIiIiIsCg==","TEVUVEU=","eEJF","IEh1c3Q=","IEV4ZWN1dGlvbkNvbnRleHQ=","IEJ1ZmZldHQ=","Y2xhbXA=","LkFydGljbGU=","IFJhdGg=","IFBleXRvbg==","IExPV0VS","b29rZQ==","IHRpZGFs","IHVuaGVhcmQ=","IFNoYWxs","IGJvbWJhcmQ=","YW5vdmE=","W21hc2s=","KGNyZWRlbnRpYWxz","IEV1cm9z","IGJyYW5jaGluZw==","IHN0cm9uZ2hvbGQ=","IGNpdmlsaXphdGlvbnM=","LWNvbm5lY3Q=","IExTVE0=","LW1vdmluZw==","IHV0ZW4=","Y3Jhc3Q=","X0RJU1A=","IENvbnRyb2xsZXJz","dXBl","LnBlbg==","IGRlc3Nh","IGRpZsOtY2ls","dWl0YWJsZQ==","b2ZpcmU=","W2NoaWxk","UkVGRVJFTkNFUw==","IGRlY2VpdA==","IFVyZw==","PEVkZ2U=","IGRlc2k=","IEJPVEg=","ICcpJzsK","dHlwZU5hbWU=","Q29tbWFuZEV2ZW50","d2hlcmVJbg==","KG9wdGltaXplcg==","IHLDqWFsaXM=","IG9taW5vdXM=","IEJyYWNrZXQ=","IGRhdGVTdHJpbmc=","IHNpbmdseQ==","KEpGcmFtZQ==","4oCZVA==","ZXNsaW50","KGhlcm8=","IE1hcmE=","IGNhdGNoeQ==","LGNhbGxiYWNr","IGN0eXBl","cHJlc2V0","CWdsZnc=","0LXRiQ==","aGs=","IHRpdGFu","QWNlcHRhcg==","44Gh44Gv","X2Fzc2lnbmVk","X2VyYXNl","IGluZmFuY3k=","UmV2aWV3ZXI=","IFJlY29yZGVy","IHNjbQ==","IEJpZ2dlc3Q=","IEdvYQ==","CVND","X0xvY2F0aW9u","X29yaQ==","a2ls","cmVuZGU=","IG1hcnpv","U3RyaW5nVXRpbA==","0YPRidC10YHRgtCy","IEhvd2U=","xrDhu51p","Zm9pcw==","WE1MRWxlbWVudA==","IGRlcmVjaG9z","IGR1bmc=","IFdhaw==","IEdhdw==","fVxc","ISIpOw==","IEpvaGFubmVzYnVyZw==","IHN1Ym1hcmluZXM=","IGFjY29s","IGZvc3RlcmluZw==","LgoKCgoKCgoKCgoKCg==","Lk9wZXJhdG9y","IG51b3Zh","IHRyYWplY3Rvcmllcw==","LnNjaGVkdWxlcnM=","IEZvbGxvd2Vycw==","IEFuZGVyc2Vu","IFBlZ2d5","LmZyZQ==","xLFjxLE=","IGt2cA==","Y29i","LWxlbg==","IG1haWxz","IGFjY3I=","IEpBVkE=","IGFkbWluaXN0ZXJpbmc=","RGVmYXVsdENlbGxTdHlsZQ==","IGNsaWNrYWJsZQ==","IEphY2tldHM=","O2Rpc3BsYXk=","IGJyZWFkY3J1bWJz","Y2hhbA==","Oic7Cg==","IEhvdmVy","dWNjaGluaQ==","IHRlYw==","IHN0b3B3YXRjaA==","X1JlbGVhc2U=","TWF5b3I=","4Z62","IFlhbmtlZQ==","Y2huZXI=","QXJ0aWZhY3Q=","LmJhbm5lcg==","IGtm","X3N0dWR5","Zm92","IE1lZXRpbmdz","w7Zt","IGluanVyaW5n","L2RvY3VtZW50YXRpb24=","QkNN","c3R5bA==","CXJi","IG9yaWdpbmFscw==","IGZsZXJl","IFRlcnJhcmlh","dG9rZW5pemVy","LWxpdGVy","Jyk7Ig==","IHBldGl0cw==","IEJidw==","IFRoaWVm","VUlMVElO","Uk9VVA==","IHNudWc=","Pj4p","LW5pbmU=","IH1dOwoK","IEJlbGxldg==","IGVsw6k=","IHl5bg==","eW5hbW8=","Z2xlcw==","IHNwZWQ=","LkJVVFRPTg==","IGRpc3BlcnNpb24=","b3VibGVz","IG5vdmVsbGVy","Il0uIg==","IHByaWVzdGhvb2Q=","ICIiKQoK","CWd1aQ==","LWluYw==","WG1sTm9kZQ==","IHN0dWRz","LklzQWN0aXZl","IHRyw6Q=","IG9yZGFpbmVk","IEJ5dGVBcnJheUlucHV0U3RyZWFt","IHJlcXVlc3RCb2R5","IFJUUA==","UkVTVUxUUw==","KGNvbGw=","IHJlbG9hZGluZw==","Lk5hdmlnYXRvcg==","X2NvdW50ZXJz","IGJ1ZGRpbmc=","IGxpY2Vuc2Vl","b2xvZ2k=","IHPhuqNu","IEtpcw==","IEZsYXR0ZW4=","X3ByaQ==","IGFwcHJvcHJpYXRpb24=","6K+E6K66","X1JTUA==","Y29tYmF0","X1BH","IGhpc3RvZ3JhbXM=","ZHE=","RW50ZXJwcmlzZQ==","IE5PQUE=","IFNwZWVkd2F5","IGJhZ2k=","IEJld2VydA==","RmxvYXRpbmc=","IEtpbWJlcmx5","UHJvc2Vj","SmltbXk=","IEVsaWFz","IGFyYml0cmFyaWx5","IOS9v+eUqA==","IENvdW50cw==","dXN0ZQ==","Rmlyc3RDaGlsZA==","IENsZWFucw==","LnB1cmNoYXNl","IGludGVycG9sYXRlZA==","IGJ1aWxkdXA=","X1NURU5DSUw=","RWd5cHQ=","IGF1cmU=","LnRydXRo","ZmVvZg==","IEdpbQ==","b2NhY2hl","IFV0dGFy","X0NPTVBMRVRFRA==","U2Vlbg==","IE5hcG9saQ==","KGRt","IGdyaXR0eQ==","LmVudGVycHJpc2U=","Y29uZXhhbw==","IGdhdGhlcnM=","IHNldFNlYXJjaA==","IENsaWZmb3Jk","IFNuYXBl","IFNhbHZhdGlvbg==","TG9naW5Gb3Jt","Q3JpdGljYWxTZWN0aW9u","LnVzZXJkZXRhaWxz","IHJlcGFpbnQ=","44GC44KK44GM44Go44GG","SHVudGVy","WmVu","VGlueQ==","bWxhbmQ=","ZXJ0aWw=","CWJ1ZmY=","X09mZnNldA==","IHNtZWxsZWQ=","Uml2ZXI=","LXRvcGlj","IGFjb21w","IFJvdXRlU2VydmljZVByb3ZpZGVy","IDwr","b21icw==","IENvb3BlcmF0aXZl","IHNldWxl","IGFpbWU=","c2hvdWxkUmVjZWl2ZQ==","SG9uZw==","IG9hc2lz","IEdlbWluaQ==","cmFwaWQ=","RHVw","KFF0R3Vp","b2RvbnQ=","LWdudQ==","IFNlbGVuaXVt","Jyk/Pjwv","IE5vcGU=","R3JlYXRlclRoYW4=","Lk9ic2VydmVy","IEFwcHJvcHJp","IExvbmVseQ==","IGhhaXJjdXQ=","IGFsbGVyZGluZ3M=","w7NwZXo=","esWR","IHNsdW1w","IEdpbnM=","IGdpb3JuaQ==","IHBhcGVyYmFjaw==","LkZpbGVSZWFkZXI=","ZGFm","Y3JlZHM=","dHlwaW5ncw==","ZGVoeWRl","Y29pbA==","U291dGhlcm4=","IG1vdXNlQ2xpY2tlZA==","emVpY2huZXQ=","dXNlclJlcG9zaXRvcnk=","RGVzdHJveWVk","aW50ZXJuZXQ=","IEVpZA==","IGxpbmtlcg==","4oCZQg==","IHNsYXVnaHRlcmVk","IFBlcnI=","CVJ1bnRpbWVPYmplY3Q=","c2FpZGE=","IHBhZ2VDb3VudA==","IFJhbmRvbHBo","IEpOSUVudg==","X3N1cGVydXNlcg==","LWRpcmVjdGVk","IElEYg==","IEJlcm5hcmRpbm8=","IE5pbnRo","IEFsZ29yaXRobXM=","YmRi","QHRlc3RhYmxl","LmFybQ==","YmVsbGlvbg==","KHNpZA==","IGJyaWVmZWQ=","4pWX","6YWN572u","IFVtYQ==","IEluZGljZXM=","IEJ1Y2NhbmU=","IGF5YW50","RnJlZWRvbQ==","IFl1cmk=","ZXRzaw==","X1Bo","IGl0YWxpYQ==","Y2xvc2luZw==","IHdyaXN0cw==","ICp9","c2VjdXRpdmU=","RW52aWFy","cmFpdGg=","IEhhd3Ro","15M=","ICoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKgo=","cGFnZVRpdGxl","IGRoY3A=","IOyLpO2WiQ==","d2lzaGxpc3Q=","IGJsYW1lcw==","IHNpZGw=","dWRkZWQ=","IGNvbnRyb3ZlcnNpZXM=","6I8=","KHVzZXJEYXRh","IGxpbnNwYWNl","IERpZmZlcmVuY2Vz","X2RlcG9zaXQ=","REVUQUlM","LmRlY2s=","IGNvbnRpbnV1bQ==","IHNhY3JhbQ==","b21pdGU=","IG5mbA==","Q3Vt","IHNvZg==","IGV2aWxz","IGVudGlkYWQ=","CXNvY2s=","IExlbW1h","LlNoaXA=","IHppZw==","VGVsZWZvbmU=","SURFUw==","IE51bWVyb3Vz","Lm1ldHJpYw==","aW5zbg==","IGNvcHlyaWdodHM=","IGNvbXBsaWNhdGlvbg==","IFVSTFNlc3Npb24=","IGRpcHBpbmc=","IGNx","IEJ1c3R5","cmVsYXRpb25zaGlwcw==","IENvcnZldHRl","U3VtbW9u","ZXZlbnROYW1l","SXNzdWVz","IGlycmVzaXN0aWJsZQ==","IGdyaXM=","Q0FTQ0FERQ==","IHBhdXNlcw==","IGxlZGdl","X0dQ","LkltcA==","IG9yZGVyYnk=","IE9yZ2FuaXplcg==","IEdyZWVud2ljaA==","T2Fr","LW1lbWJlcnM=","IFdlYkdM","IGdhbW0=","bW9kdWxlSWQ=","IGZ1bGxQYXRo","bG9nZW4=","KGV2ZW50TmFtZQ==","KCIuIik7Cg==","IGtyaXN0","IGNsaWZmcw==","IFBlcmNlcHRpb24=","RVRJTkc=","IGzhuqFp","IGludGVydg==","IG9wcG9ydHVu","IEp1ZGdlcw==","IENvbWJpbmF0aW9u","Y29udGludWVk","Y29ubw==","LmRyYXdSZWN0","LkNvbXBvc2U=","IHNpZ3VpZW50ZXM=","IER1ZmZ5","KGVuY29kaW5n","IFZ1bGthbg==","IEdlcnI=","IHBhcmZhaXQ=","KHl5","X1RIQU4=","IGdldFNlcnZpY2U=","X09SRA==","LGVw","Z3JhcGhpYw==","IFF1ZXJpZXM=","IHBhcnRpY3VsYXJz","IEhhdmFuYQ==","PW8=","ZmFucw==","IHVuaWxhdGVyYWw=","IFJGSUQ=","Q29tcGF0aWJpbGl0eQ==","c3RyYW5k","IHdha3R1","IHF1YWxpZGFkZQ==","UHJvcGVydHlQYXJhbXM=","cmV0ZW4=","KGhvc3RuYW1l","X0NBUg==","IHdpZGVuZWQ=","IFhwZXJpYQ==","cG9sbG8=","QWJvcnQ=","ISEpCg==","IFdhZw==","LS0r","INGC0YA=","IFJlY3Vyc2l2ZQ==","IGFubmU=","IEdhbWVwbGF5","PENsaWVudA==","LlVzYWdl","IElTU1VF","IGpkYmM=","aXNvcnk=","X21hY3Jvcw==","cGlja2xl","LmdhbWVzZXJ2ZXI=","IHR2Yg==","0YLRiw==","Lk9QRU4=","IHByZWRldGVybWluZWQ=","IHNpcmU=","CQkJDQoJCQkNCg==","aXNjcmltaW5hdGlvbg==","IHJlcGVhbGVk","IGNvbmplY3Q=","IFByZWNvbmRpdGlvbnM=","IHRpbHRlZA==","IGlub2M=","IGV1cm9wZWFu","YWJk","X0RFTEVURUQ=","IC0s","4oCTYW5k","QEZYTUw=","ICldCg==","UklORw==","IGFsaXF1YQ==","IGdydWVzb21l","IEluY2hlcw==","UGxheWVk","KGNvbmZpcm0=","IE5WSUM=","X1RvdGFs","aXNhcw==","IE9uaW9u","IHNlY29uZG8=","IEdldFVzZXI=","XFVybA==","X2Fic3RyYWN0","IGRldmV6","IGN1cGJvYXJk","dGV4dHM=","IElzbGVz","X01BVEg=","U2tpcHBpbmc=","X2Nvc3Rz","PW91dHB1dA==","aWJpbGk=","IGtudWxs","X2NvZWZmcw==","X2F0dGVtcHQ=","CVJ1bg==","Z2VuZGVu","cnVwdGVk","IHNvYXJlZA==","X2hz","IGFkb3B0cw==","X01PRElGSUVE","XEZhY3Rvcmllcw==","IFN3ZWF0","IGRva3VtZW50","IFRlbGVzY29wZQ==","IEZpeGVz","b3JxdWU=","LkNoYXJ0aW5n","X0RBQw==","IHNlY3JldGlvbg==","IHJoZXRvcmljYWw=","UGVyZmls","IG3DtmNodGVu","LCcs","IHZpZXdQYWdlcg==","QlVZ","IG9uRm9jdXM=","b3NhbHM=","IGJpc2N1aXRz","IHZib3g=","IGZvcmNlZnVsbHk=","TmludGVuZG8=","IHbDoWw=","IGNsYW5z","ZnJvZw==","IGJvcmRlclRvcA==","QnJpZWY=","LkJvcmRlckZhY3Rvcnk=","LXNlcnZpbmc=","IHF1b3RhdGlvbnM=","IEdhcm5lcg==","IEFsbGV5","Ij8+Cg==","KHNjYW5uZXI=","IGVudGFpbA==","IC8vPT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PQ==","KGA8","LmRlc2NyaXBjaW9u","X0J5","IOyalA==","IHBha2lzdGFu","ZWxobw==","RW5naW5lZXJpbmc=","IGJvb24=","IExvb3Nl","aWVyZ2U=","U2VuYXRl","IExZ","cmVzcG9uc2VPYmplY3Q=","aW9yZQ==","w6FnZW5lcw==","IOS4jQ==","IGFkZEFjdGlvbg==","IE1BQ0hJTkU=","YW5na2Fu","X21p","X0FSUg==","TGl0ZXI=","T0xG","IHN1cHBlcg==","IHBhdGhNYXRjaA==","IE9ycg==","w61k","KGZpbHRlcmVk","IGF1dGhUb2tlbg==","IOKEnQ==","LTwv","KHRlbnNvcg==","IHJldm9sdmluZw==","IGluaWNpYXI=","IFNjaHdhcno=","ZGVmZ3JvdXA=","Y29sdW1uTmFtZQ==","X3RyYWplY3Rvcnk=","4LmE4Lih","ZWdhc3Vz","IOydtOumhA==","IGVhdGVy","IHVuZGVyZXN0aW1hdGVk","IGJ0Yw==","IOyEoO2DnQ==","ZW5hZGU=","IFNFWFA=","ZW1vdXRo","T01FVFJZ","ZW50ZXJlZA==","LnBob25lTnVtYmVy","IFZvYw==","IGV4Y2Vzc2l2ZWx5","IENBVEVHT1JZ","X1VQREFURUQ=","IG1vbmFyY2h5","YXJjaHM=","IGNhdmVhdA==","d2lucw==","IHBsYXlib29r","c2hhZGU=","IHNldFVzZXJuYW1l","IGFjY3VzZXM=","IG1vxbxsaQ==","IGxvcnNxdWU=","IGFqdWQ=","aGVhcg==","IHBzeWNvcGc=","KEVD","IG1lbGFuY2g=","dGhyb2F0","bmlo","V09PRA==","IHZvbHRz","X05FRUQ=","X3doaWxl","IFJpZGVycw==","16I=","IC4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4uLi4=","TmV0TWVzc2FnZQ==","TW9kaWZpY2Fy","LnNlc3M=","KCIiKSw=","6Kmx","IHByYWlzZXM=","IGxjbQ==","IG1ha2VzaGlmdA==","IE5PVEhJTkc=","IEFydGlmYWN0","d2lq","dHlwaWNhbGx5","KCde","PGs=","xJlraQ==","INC+0YLQv9GA0LDQsg==","IOE=","IGRlZlN0eWxlQXR0cg==","aW5jZXJlbHk=","w6lzdA==","SW5UaGU=","c3RpbWU=","IGZyYWdtZW50ZWQ=","IGZyeWluZw==","Z3JpbQ==","ZmllbGRuYW1l","IGNyb3NzaW5ncw==","IGFtbw==","X09wdGlvbnM=","IGhhaXJlZA==","L3dhaXQ=","IHBhcmNobWVudA==","IGNyZWF0ZUVsZW1lbnQ=","SHR0cFN0YXR1cw==","IGVya2zDpA==","aXp6YXppb25l","dGh1bWJuYWlscw==","bG92YWs=","IGJhbmdpbmc=","IHVuaW1hZ2lu","IE92ZW4=","KEF1ZGlv","YXBzdWxhdGlvbg==","IHJhbXBz","55Wq","IFdvb2R3YXJk","6Zeu6aKY","cm9ncmFt","0YDRg9C/0L8=","IFdvcnNoaXA=","IHN0YWQ=","IG5lZg==","IEphdW5l","YnV6eg==","YWx1cw==","T05ET04=","LXN1","IG91dHBhdGllbnQ=","amFj","RVNQTg==","w6ZsbGFuZA==","bXlw","IHNob3dyb29t","TW9udHNlcnJhdA==","LmdldERyYXdhYmxl","w6l0aWNv","IHbDoG8=","SUJD","RXhwZXJ0cw==","TWJwcw==","Ij4j","IG5vcnRoZWFzdGVybg==","IE1lag==","KG1pbGxpc2Vjb25kcw==","4oCUYWxs","LXJlYWNoaW5n","CXJlcGx5","P3R5cGU=","IGNydXo=","ID48Pw==","LkZpbmRBc3luYw==","KGNpcmNsZQ==","IFNoaW5l","IE1hdmVyaWNrcw==","IHNhZmV6b25l","IExhemFy","IGRpc3RpbmN0aW9ucw==","LWZlZWQ=","LnNldENvZGU=","4KSq","IHTDqWM=","IHNlcmFpdA==","IE1JQ1JP","IENvbnN1bXB0aW9u","Xm4=","LmZyb21GdW5jdGlvbg==","IFJ1cGVydA==","IGhhcmFzc2luZw==","LUNv","IHRpaw==","IFN2ZW5z","LkltYWdlQWxpZ24=","X3doaXRlc3BhY2U=","IGtpY2tlcg==","IGNhZGFzdHI=","Q2V0dGU=","X25vdGlmaWVy","IEZBRw==","IHByaW1hbA==","IGhvbW9nZW5lb3Vz","IGFzdHJvbm9taWNhbA==","IEJ1cnI=","LkNvcHlUbw==","Z3JhcGhz","aXR0bw==","T1NI","IHNob3dBbGVydA==","YW50cm8=","ImRlZmF1bHQ=","ZW1waGFzaXM=","V2Vp","b3V0Y29tZQ==","IGFrdQ==","IGNhbXBhaWduZWQ=","KSI7Cgo=","IHJlY2lwcm9jYWw=","IFJveWFsZQ==","ICMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyM=","LlRJTUU=","IDwq","T2Zmc2V0VGFibGU=","Y29tcG91bmQ=","d2FpdEZvcg==","dWVnb3M=","LnN0cmluZ1ZhbHVl","X1NDSEVE","IGZhdHQ=","wqDCoMKgwqDCoMKgwqA=","LmRpc2s=","IHdhcnBlZA==","IGNyaXRpcXVlcw==","PycKCg==","KHNraWxs","IG1vZGVyYXRlZA==","X2VsZW1z","S2V5TGlzdGVuZXI=","IHNlYXNvbmluZw==","IHBvdXJxdW9p","X0ZE","cHJk","aHlh","Ij7Dlzwv","IG5vdXZlYXV4","IGdpdmVhd2F5cw==","5oql6YGT","TWFpbk1lbnU=","Oy8q","IEdyb24=","cXVpdm9z","Ow0KDQoNCg0K","IGluZmx1ZW5jZXJz","KFRJTQ==","U2hhcmVkUHRy","IGRpYWxvZ3M=","KioqKiovCg==","LkF0b21pYw==","IE1vcnNl","IHBjYg==","IEFQQw==","LkltbXV0YWJsZQ==","IHJlc2l6aW5n","IEx1bXB1cg==","IEh1bWFuaXRpZXM=","X3NvbHZl","X2h1bWFu","ZXR5bA==","IEh1cnQ=","IEVzdGFibGlzaGVk","Y2xhcmVk","IGNvbXBhcnRtZW50cw==","QmVhbQ==","X1JN","LmZhbHNl","KEdyaWQ=","IFFTaXpl","X2ZsZw==","aXN0aWNh","PkxvZ2lu","OlVJQnV0dG9uVHlwZQ==","IEV4aXRpbmc=","Y2xhcw==","IGFyc2Vu","KG1ldHJpYw==","cm93c2luZw==","cXVlcnlTZWxlY3Rvcg==","X0ZSSUVORA==","LWlv","IGNvbmZpc2NhdGVk","IGRlZmlhbnQ=","IE1PVE9S","cmVndW50YQ==","IE1vcnJvdw==","IEJlcnM=","Q3JhaWc=","IENQQQ==","IHNleGtvbnRha3Rl","IHNhbW1lbg==","L0F1dGg=","LkxpYg==","Y3JhcGVy","aWNlbWFpbA==","Y3JhdGNo","IFdpcmVk","IGFkdmVydGlzZXI=","IGdldENsaWVudA==","IHJlc3BvbnNpYmx5","CVVPYmplY3Q=","LnNldFJvdGF0aW9u","LkNvdW50ZXI=","X0hPVVI=","VGVzdENhdGVnb3J5","IGhpbmRzaWdodA==","XGNvbnRyb2xsZXJz","d2FsbHM=","LnNldE1heGltdW0=","IHB1YmVydHk=","X3RlYW1z","X01PREFM","LkNP","IGJhZGFzcw==","KSddLAo=","w7pzcXVlZGE=","aXJ1dA==","Q2hlbHNlYQ==","LnRyYW5zZm9ybXM=","IGNhcGl0YWxpc3Rz","TWFyY2E=","IEFyeQ==","LWNvZGVk","546v","VVJFRA==","PFRyYW5zYWN0aW9u","IFBhcmxpYW1lbnRhcnk=","KSRf","IHN1YnRseQ==","IHNpbGt5","IERpcnQ=","IHB1enpsZWQ=","fScpOwo=","cXVlc3Rz","Rm9vdGJhbGw=","IENvbmZpZGVuY2U=","dXp1","YnVsYW4=","IGh1bW1pbmc=","bW91c2VlbnRlcg==","UmV0ZW50aW9u","IHNkbA==","b2tlZGV4","JywnPScsJA==","IEt1YWxh","U0FN","IHRyYW5zZm9ybWF0aXZl","UEtH","aWxsdXM=","IHJvb3Rpbmc=","IFdpdG5lc3Nlcw==","IFJhamFzdGhhbg==","5byg","LWFkZGVk","IFRlcnJpdG9yaWVz","KHNxdWFyZQ==","cmFiYml0","X1Jlc291cmNl","6ZaL","4LiT","IHdpbm5pbmdz","IHNwbGU=","IGTDqHM=","IE1EQg==","w6lydA==","IE1hdHRpcw==","YWlsbGVz","X3dlYWs=","L2phdg==","IGNvbGxhcHNlcw==","ICAgICAgCQk=","IHN3aXJs","IE5TU3RyaW5nRnJvbUNsYXNz","IHZvbHZlcg==","LlJlY2VpdmU=","IERleHRlcg==","IHRhYmxlbmFtZQ==","cmVhdGl2ZQ==","LkdldEZpbGVz","dm9vcg==","IEhvZQ==","VkVSTg==","IE9QQw==","7YOc","cmFtaWRz","54Sh44GX44GV44KT","U3Bpcml0","IE5PUA==","IE1haW50YWlu","KHNpZ21h","b3Ry","TW91c2VDbGlja2Vk","cXVpZXJkYQ==","X3dm","0L7QutCw0Lc=","YXBwYWJsZQ==","IEhvbGRlbg==","IENvdW50ZG93bg==","LnNpZ21h","Y2hhbGs=","YmlsZGVy","IHZpc2lvbmFyeQ==","CU9u","JHVwZGF0ZQ==","IEdpbmdyaWNo","cm9vbUlk","Pk5hbWE=","IHl5dHlwZQ==","LkRlY2ltYWxGaWVsZA==","bWFjcm9z","LnNldExheW91dFBhcmFtcw==","IHJubg==","IElNRGI=","56eN","ZW1hbGVz","IGluY2lkaWR1bnQ=","UmVzdHJpY3RlZA==","IHBlZGFscw==","IEpvZw==","IEFkYXB0aXZl","IGZhZGVz","LkV2ZW50U3lzdGVtcw==","IFBhaWdl","IHNlaXM=","IGFwcHJvcHJpYXRlZA==","RkZU","Z29yaXQ=","IGNvaGVzaXZl","IE5pY2h0","X3dvcmtmbG93","bGl1cw==","IEZvcnRuaXRl","X0lX","QXRQYXRo","IGludG94aWNhdGVk","bm9zdGlj","QmluQ29udGVudA==","LnJlZHVjZXI=","KT8K","J10q","IE9ic2VydmF0aW9u","X3ByZWZz","LnJlc29sdXRpb24=","LlBheWxvYWQ=","TWl4ZWQ=","IFJhaQ==","KHBkZXY=","KEAo","aWNvdA==","JGlz","IGNyZWU=","Pz0uKg==","LlFMYWJlbA==","IEdlb3JnaWFu","eENB","IGRlZmljaWVudA==","dGhyb3du","IHJhcGluZw==","dXBvcw==","CWNsaQ==","Z2V0Vmlldw==","SGlnaGxpZ2h0ZWQ=","Q3BwR3VpZA==","IHJlbGVnYXRlZA==","IGxlYWRlcmJvYXJk","UmVjZWl2ZVByb3Bz","Lmhhcg==","IGNvbmRp","SU1JVElWRQ==","IE1jQ2FydA==","KXRocm93cw==","YnVpZQ==","YnVhaA==","LmNvZWZm","IEF1c3NpZQ==","IFNhYmhh","KGZhYnM=","cmVsYW5k","IEbDtnI=","YmFyYW5n","LHRvcA==","CWVsc2lm","U3RlcFRocm91Z2g=","IHNrZXdlZA==","IFVudXNlZA==","Jyl9Pgo=","WWU=","Y2FsbGVl","SGliZXJuYXRl","IEV2ZXJlc3Q=","aW1wb3J0RGVmYXVsdA==","IHRhcm4=","IE5vd2FkYXlz","WUE=","IENoYWxsZW5nZXI=","X2xvZ2ljYWw=","IGNyZWF0ZURhdGU=","IEdsb3VjZQ==","IGN1YW50bw==","IEhBUg==","IENoaWxs","Il4=","IGN1cnNvcw==","LkVPRg==","IG5pamU=","IGFuZ2VyZWQ=","b2N1c2luZw==","PENvbnRhY3Q=","IEF0bW9zcGhlcmlj","IFdvbGZnYW5n","IEJK","Y2hpbGRz","IEJ1Z3M=","X0hFWA==","KFNQ","w6Vs","X2V2YWx1YXRpb24=","IFJBTkdF","IFNPUA==","X3Rva2VuaXpl","bXNnaWQ=","IHJleA==","CXBt","Q29weWluZw==","Kkw=","RGFsbGFz","LVN0YXRl","dWxmaWxs","IGJ5xYJv","IENvbnRyYWN0b3I=","RGlkbg==","QVNURQ==","IFBJTw==","LlRlbGU=","LndhdGVy","ZGV6","IGFuZ3JpbHk=","IHV0aWxpc2F0ZXVy","IHZvcnRleA==","Q29ycG9yYXRl","YXR1cmFz","IHByaXplZA==","J3VybA==","dWdsaWZ5","IGltcHVsc2Vz","IGNocm9ub2xvZ2ljYWw=","cGxlbg==","X25hbWE=","L29u","IE9mZmljZXM=","IENQSQ==","IEFmdGVyd2FyZHM=","44GT44KT44Gr","X0JMT0NLUw==","R3JhY2U=","LyoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKg==","IEthYnVs","IOaIkA==","IExlaXB6aWc=","4Kao","U2hvY2s=","QXVz","IG11cm0=","X3N0YXJ0cw==","IGLDpA==","IFp5","IkY=","LXJpZ2h0cw==","IGJlaGF2aW5n","KCc+","IG1vc3F1ZXM=","KndpZHRo","Ii8+Ljwv","LnVuc3BsYXNo","LmdldEFjdGl2aXR5","VVU=","IFNoYWs=","X3Jn","X0VxdWFscw==","J2h0dHBz","IE94eWdlbg==","IFBvcnRzbW91dGg=","4oCUb25l","IHdhdGNoZXJz","IENob2k=","IHNpZGVy","cGVjdHJhbA==","bXF0dA==","LmNyZWF0ZVVzZXI=","amVjdGl2ZXM=","dXJtYQ==","UmVnaXN0cg==","UGVyc29uYWxseQ==","PWtleQ==","IE5FTw==","IEZBUXM=","aWJpbGlkYWRl","Y2tzw6U=","IENvbGxhYm9yYXRpb24=","CWxibA==","LlNFUlZFUg==","IGFib3VuZA==","IEJlbmU=","d2FudGVk","LWhvbGU=","IG11dHRlcmVk","IHBlcA==","bmVzYw==","LlVwbG9hZA==","c2VtaQ==","eEVD","Jz4iKw==","IGVtYnJ5bw==","IEZpeGVkVXBkYXRl","Q2FzdGxl","Lm1vZGVsbw==","IHBscw==","IGVudmVsb3Blcw==","X3JlbWFpbg==","UXVhcnRlcg==","YWxlcnRWaWV3","X2Zvcm1hdHRlZA==","IGxhc2hlcw==","emVsZg==","aG9tbWU=","LmZsb3dMYXlvdXRQYW5lbA==","YWlycG9ydA==","IE1lbW9yaWVz","IEhFUk8=","IEFzaHRvbg==","IGV4aGliaXRpbmc=","KFNFTEVDVA==","U3VibWlzc2lvbg==","U3R1ZmY=","X3N1bg==","IHBlcsOtb2Rv","IGRlc3ByZQ==","CWVkaXQ=","IER0eXBl","Y2Vzc2l2ZQ==","YWFk","IGRlc2Nvbg==","bmVsbHk=","IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQ==","IHNjcmlwdHVyZXM=","IG9uVmlld0NyZWF0ZWQ=","IEVWRQ==","IEJhbGxldA==","O307Cg==","VURP","IFByb2JhYmlsaXR5","cXVpcnJlbA==","Q29udGFpbmluZw==","IFBsYXQ=","6KI=","L2JpdA==","IEpRdWVyeQ==","IHRpZW5lcg==","L2RyaXZlcnM=","IFByZXNpZGVuY3k=","XHVE","IEl2ZQ==","aWVuYQ==","IGh5cGVycw==","IFNwZW5kaW5n","PFc=","IFRIRU1F","IHVzZXJQcm9maWxl","IGFubnVt","cmV0d2VldGVk","IFwnJw==","YnVuZGxlcw==","KCk8Lw==","IEN5bGluZGVy","IG91dGxpZXJz","IGRpc3NlbWluYXRpb24=","L2FwdA==","IE5hdGFzaGE=","IHJlbmRlckl0ZW0=","IENoaXBz","IHJvdW5kdXA=","IGltcHJvdg==","IGNvbW11bmljYXRvcg==","IHNreXBl","TU1N","cmlqaw==","LlBsYWNl","IHBhc2E=","IFNZTkM=","ZW5zaXM=","IEF4ZWw=","ZW7Dp2E=","Z2V0U3RyaW5nRXh0cmE=","YWJpbGl0w6k=","IGVtYWNz","LmdyYXZpdHk=","IGNoZXJpc2g=","IElTU04=","CUpzb24=","dXlv","IHVwdGltZQ==","IHJhbmRvbW5lc3M=","IGxvZnR5","Qm93","Q3JlYXI=","IHRvd2VyaW5n","Y2F0ZWdvcmll","L3Bvd2Vy","L3dlbGNvbWU=","fFI=","IGJhcnJpbmc=","aWRpYQ==","cXVhbQ==","w7pkbw==","ZXhwZXJpbWVudGFs","IGNsYQ==","IGN1cmF0b3I=","cmVhbWJsZQ==","aW5keA==","TExM","IH0pOg==","IGhpc3RvaXJl","c2ltdWxhdGU=","PEFueQ==","IEdsYW0=","IEJhcmc=","VmFsdWVDb2xsZWN0aW9u","IEluc3RpdHV0bw==","QXNTdHJpbmdBc3luYw==","IGFkZWM=","IGZlbGxvd3M=","cGlwZXM=","IFBsYWNlaG9sZGVy","IEtn","IEFsYnVtcw==","ICooKg==","X0dPT0Q=","KSIsDQo=","LlFSZWN0","w6Jt","IH0NDQo=","TWFyc2hhbEFz","QmFjaGVsb3I=","IEJhcmNvZGU=","IFRyYXZlcnNl","IG9kaW8=","LnNldFBhcmVudA==","IHNlbWljb25kdWN0b3I=","QUxMRUw=","IGJhbnF1ZXQ=","IE5ld3NwYXBlcg==","RE9NTm9kZQ==","IE5hdWdodHk=","Rm9ybWF0dGVkTWVzc2FnZQ==","IGRpc3J1cHRpbmc=","5piT","IGxvb2thaGVhZA==","IGdyYXR1aXRlcw==","IGNoZWVzeQ==","IFNQRg==","blA=","IGFyc29u","IGFudGVubmFz","X01JRERMRQ==","X01BTExPQw==","LmdvQmFjaw==","IFByb3Bvc2l0aW9u","IE1pY2hhZWxz","X3Byb29m","INC90LDQudC0","w6R0emxpY2g=","LXJvbGw=","RURB","w6Fuw60=","Z292ZXJubWVudA==","w7Z0dA==","IEVzdGFibGlzaG1lbnQ=","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA=","X0hJVA==","IEFJTQ==","YWRvbA==","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCg==","X1JFRkVSRVI=","IGZvcm1hdERhdGU=","dWN0b3Nl","IGRvd25sb2FkZXI=","VGV4dEVkaXQ=","IGRpc2FybQ==","IEhBUFA=","0L7QtNCw","ISkuCgo=","L3Byb2Nlc3M=","IGJyYWluc3Rvcm0=","IE9SSUdJTkFM","LlRhYmxlTmFtZQ==","IEtvc3Rlbmxvc2U=","IGTDqXA=","IElzYWJlbA==","IGFzdHJvbm9tZXJz","UVVJUkVT","OiIt","dXBsb2FkZXI=","Oi8vJQ==","IGFtaXM=","RmlsZVZlcnNpb24=","ICwk","Y29vaw==","LFNJR05BTA==","JywvLw==","IFN1cHByZXNz","IExhdGlub3M=","IHdpdGhob2xk","IG1uZW1vbmlj","X0NZQ0xF","IGhvZA==","IFdvcnNl","ZXJkZQ==","IHR5cGVpZA==","CWV4cG9ydHM=","IGFjaHRlcg==","b3Nhcw==","IGZvb3Rub3Rl","aGFuaQ==","KFBhcmFtZXRlcg==","CVJlbmRlcg==","IFlZU1RBQ0s=","IFhJSQ==","IHNpZGVu","IGFyb3VzYWw=","IE9P","Qml0dGU=","IG5lYXJlcg==","IENpcmN1cw==","IENPTE9SUw==","IHdpZWxkaW5n","LkZpbGVTeXN0ZW0=","IGdyaWxsZQ==","IERvdmVy","CiAgICAgCg==","KGdlb21ldHJ5","IHN0YXBsZXM=","IEFubm91bmNlbWVudA==","IOuyhA==","IGZvcnR1bmF0ZWx5","LlNvbWU=","IG1hbmdhbmVzZQ==","IGludGVydmlld2Vy","WVJP","IGNyeXB0b2dyYXBoeQ==","IGNoYW1icmU=","LnJldHJ5","IGltaXRhdGlvbg==","JGZkYXRh","IGxvdGlvbg==","KGlkZW50aXR5","LnBn","IHByZXN1bXB0aW9u","X1NVUEVS","dm9jYWI=","IFNlbWVzdGVy","IEFiZWw=","X2FwcHJvdmVk","LmNvbXBhdA==","IHdhcnRpbWU=","XV07Cgo=","bHV0","X0FjY291bnQ=","Pygn","Y29vcA==","L3JlZw==","LnNldFRv","aXRlc3Nl","IEh5ZHJh","Qmlucw==","Y2FkZW5h","Pi8nLA==","Llwi","CWFjY291bnQ=","IERhaGw=","IGRyb3du","IGdhdXNz","IHRyYW5zZm9ybWVycw==","IE1ldGFsbGlj","IEhlcmJhbA==","YWNocw==","X2J1dA==","IGl0ZXJhdGl2ZQ==","IEZyZWVk","anVy","fE0=","O2JyZWFr","X0ZG","KGRvd25sb2Fk","4buDbg==","LmNoZWNrU2VsZlBlcm1pc3Npb24=","TkVUV09SSw==","OmZsZXg=","IENUTA==","IEFyYg==","IFByb2R1Y2U=","CXN5bmNocm9uaXplZA==","4oCcT2g=","LmRhdGF0YWJsZXM=","IGNvbmVz","RMOp","0YbQsA==","QWxn","IGZ1bmNpb25h","IFViaXNvZnQ=","IGdlb3BvbGl0aWNhbA==","IHNpZWh0","IGh5ZHJhdGlvbg==","c3Rocm91Z2g=","IER1ZGxleQ==","YXrEgw==","IHRheGluZw==","INC30LDQutCw0Lc=","X0FTTQ==","TmV1dHJhbA==","dHJhZGl0aW9uYWw=","UGxheWFibGU=","IHNwYWdoZXR0aQ==","IGlDbG91ZA==","IERheXRvbmE=","IHdlcmRl","IEFOVA==","IFByb24=","IFN0YXRpb25z","IGF0dGVzdA==","IGZ1bGxlcg==","IG5vdmFtZW50ZQ==","XVxc","Y2Nl","KGRlY2s=","L2F5dXNobWFu","aWdzYXc=","IGFkdWx0ZXM=","IHRlcnJl","Lk9yZGVycw==","CXByb3BlcnRpZXM=","RElH","IFRJTUVT","ImluZGljZXM=","ITw=","TW9uYWQ=","IG5vbmV4aXN0ZW50","IEF0bGFudGlz","IGdyaWV2YW5jZXM=","dXJlbmNl","IElQUFJPVE8=","4pmA4pmA4pmA4pmA","IGVtcGxlYWRv","INmD","Lk1vdmVOZXh0","IElzbw==","YmVhdXRpZnVs","IHNvbHVibGU=","IHNsdWdnaXNo","IGRpZmZz","X09CUw==","eG1pbg==","IHR1bWJsZQ==","IFVuYXJ5","IHppcGZpbGU=","IHN2ZW5za2E=","ZXJsYW5k","L2N1cGVydGlubw==","CXNjcmlwdA==","aXNjaGVz","TW9kaWZpZWREYXRl","IHZleWE=","IGRldGVybWluYW50","IEdvcmdlb3Vz","Z2Jvb2xlYW4=","IExPRA==","ZGNj","c2NlbmVz","IFRTUk1MUw==","KFR5cGVFcnJvcg==","IGNhbW91ZmxhZ2U=","IGJ1cmdl","VGhlbQ==","LkFzc2lnbg==","IGxhc3RJbmRleA==","X3NwaGVyZQ==","X0FCSQ==","w4Q=","aWxhZ2U=","XHhmZg==","IGtheWFr","IGZpeno=","dWl0ZW4=","LlNob3VsZEJl","IGh0b25s","IFBldGl0ZQ==","IGhlYWxz","IE9zYWth","Tko=","SW5QYXJhbWV0ZXI=","IEJpcmNo","IGNvbW1lbnRhaXJl","IFNpZWdl","IGtleWNvZGU=","LWludGVuc2l2ZQ==","cHJvcFR5cGVz","RXhwb3J0cw==","IGJ1dHRvblRleHQ=","IEdvZHppbGxh","LkV4Y2hhbmdl","IHVuZGVyc3RhbmRhYmx5","IGFjY29yZGlvbg==","IHLDqWdpb24=","IG1hcmtlZGx5","YW5vb2dh","IGNvbnRyYXQ=","X2xpZnQ=","W2RhdGU=","IHNjb3Ju","IERhdGFNYW5hZ2Vy","4oCm4oCmCgo=","X0NPTVBJTEVS","IENsYXc=","b2RhdGU=","IHVuZGVyYWdl","IEltcGxlbWVudGVk","Q2xp","S2Fs","UHJvZHVjdG9z","IGVuZmVybWVk","w6lpcw==","IGRpc2NyZWRpdA==","IFNhbW9h","IFByZXNlbnRlZA==","IGNpbmVtYXQ=","XEFjdGl2ZUZvcm0=","IGZlcm4=","IFByaW1lcg==","5oKo","Z2VyZQ==","IGlsbHVzaW9ucw==","bm90YXRlZA==","IHBvag==","IG1vZGVsTmFtZQ==","IFBNQw==","IGRlY2Fk","IGZvcmVzdHJ5","dm9pZQ==","Li4uCgoKCgoK","IH19Owo=","IHRva2VuSWQ=","YW1tdQ==","IFBlcnNvbmVu","IFZFUkJPU0U=","IHBhdHJvbHM=","IGFudGlj","X2RlZXA=","ZWdlbmQ=","IFNldFByb3BlcnR5","IEdhcmV0aA==","IE1BUw==","LnJlc3RhdXJhbnQ=","IEhlYXZlbmx5","aWVkbw==","X2xlYWQ=","IEZ1amk=","UU4=","TWFzc2FnZQ==","IHBhcmFtTWFw","IGNpdGE=","X1NwZWVk","KGJib3g=","IEpVTA==","4oCZYW4=","IG1lbnRl","IFNob3djYXNl","IENTSQ==","PlR5cGU=","LlNu","b3R5cGljYWw=","IEZhbGxvbg==","LlVUQw==","IHByZWRhdG9yeQ==","IG9yZ2FuaXNpbmc=","Y29sZA==","IHBhcnNlcnM=","dWllbg==","IGNvbXBpbGVycw==","IFs9","IEV1cmFz","TU9TVA==","CiAgICAKCg==","UkFS","LlNjaGVkdWxl","Lm9wZXJhdGlvbnM=","dWZz","w7FhbmE=","IHByZW9jdXA=","LXRyZWF0ZWQ=","LmdldFdvcmxk","Lic6","IEFUSA==","OnN0YXJ0","IGF1dG9pbW11bmU=","IEJsYWNramFjaw==","X0ZJTklTSA==","KGZsb29y","IHdyZWNrYWdl","VVJU","LkJyYW5k","cGFpcw==","Y2ltYWw=","Y2nDsw==","TkZM","LWVxdWlwcGVk","LmNvbnRlbnRPZmZzZXQ=","IG92ZXJjcm93","IFRa","IG9kb20=","IENlbGx1bGFy","CXdyaXRlbA==","KGlucHV0U3RyZWFt","KHByZWY=","LXN0b2Nr","IERlbmllZA==","LXN1cHBvcnRlZA==","ICcoKA==","YW5jb2Rl","LmZpbHRlcmVk","RGltcw==","IGpi","CXByaWNl","IEBACg==","bm9jaw==","Lm9wZW5Db25uZWN0aW9u","IGFudGljcw==","cmVzdWx0Q29kZQ==","UGxheWJhY2s=","IGNlbHVsYXI=","IEZPT0Q=","IFBvZGVzdGE=","PW1lc3NhZ2U=","LnBlcmZvcm1hbmNl","IERtaXRyeQ==","YWx0aW1vcmU=","IHBsYXRlZA==","IHR1YmVyY3Vsb3Npcw==","X2dlbQ==","KEVkaXRvcg==","VHBs","IGNyaWFu","IGJ1ZmZlcmluZw==","6KeG6aKR","ICcpCgo=","VnU=","TWF0aGY=","IHRpbWVsaW5lcw==","IFRhdGE=","L3Bw","IHBsYXN0","IFRydWx5","IFN1YnN0aXR1dGU=","a2llbQ==","a2Fhcg==","IFZpc2g=","J2h1aQ==","IE1hZ2ljaw==","L0xheW91dA==","dXJhbsOnYQ==","X3R0bA==","SGlkZUluSW5zcGVjdG9y","LmtleXdvcmRz","TGlzdE1vZGVs","X1N1Y2Nlc3M=","aWxpaGFu","IGJsYWNrbWFpbA==","IFNlcmJpYW4=","cXVlbGxl","IER5c2Z1bmN0aW9u","IFByZXBhcmVk","IGpNZW51SXRlbQ==","IGxvZ2luVXNlcg==","c2V0YXR0cg==","LkNS","X2xjZA==","IGJ5dGVzUmVhZA==","IGNkZWNs","IHRvd25zaGlw","cGVr","aWprc3RyYQ==","IG1heGltaXppbmc=","LnByb3ZpZGVycw==","SW52ZXN0aWdhdG9ycw==","IHNob290b3V0","IGFpcnNwYWNl","dG9vbGJveA==","UVdpZGdldA==","PXBr","IHBvcnRlcg==","IFByZWRhdG9y","IFN1bnJpc2U=","IGRldm91cg==","CVVJbnQ=","aXR0YW5jZQ==","U1BB","X2VuZGlhbg==","IE5hZ2Fy","dmVuaWRh","L29wdA==","QnlFbWFpbA==","IFBoeXNpY2lhbg==","XEQ=","INC80Ys=","WUVBUg==","SUND","L3BvcnRmb2xpbw==","LmV4ZWN1dG9y","dWRlbQ==","RmFsbGJhY2s=","dWR1","U2xpbQ==","w7Nsbg==","Xnst","YW5za2U=","IGh1c3RsZQ==","IElyZW5l","IGFieXNz","IFJvYmJpbnM=","IGluZGV4ZXI=","U2F1ZGk=","IHdob2xlc29tZQ==","LXNsb3Q=","IFRlY24=","IHBhZ2VUaXRsZQ==","IGNvbnRlc3RhbnQ=","aWNvcHRlcg==","IGNvdXJzZUlk","Q2hy","IEFYSVM=","Zm9yZGVy","X1RVTg==","VHJhZmZpYw==","IHR5cGVhbGlhcw==","IGRhcmY=","LXVyaQ==","dHN4","LmRlc3Ryb3lBbGxXaW5kb3dz","IGl0ZXJhdGluZw==","UmVhY3Rpb24=","CUFN","IGN1ZW50","LWNvb2tpZQ==","IGZsYXZvcmVk","c3RvaQ==","IGZsaXJ0aW5n","44CL77yM","4KSu","X0NSWVBUTw==","W3Rva2Vu","IHByb2xldGFyaWF0","LuKAmeKAnQoK","CWRj","LlN0cmluZ1Zhcg==","IGxlZ2l0aW1hdGVseQ==","X2RlY29yYXRvcg==","TG9ja2Vy","IEplbm5h","VVJJTkc=","5YaN","X1ByaW50Zg==","QVRPUlk=","LWRpc3Q=","ICIuIik7Cg==","LnF1aXo=","IGlyZ2VuZA==","LWxlYWd1ZQ==","Z2llbg==","IFByb2R1Y2Vk","SGVsbWV0","5Y+v6IO9","UGxhdGZvcm1z","IFJlc291cmNlTWFuYWdlcg==","IEh1bmRyZWQ=","cm9tZXRlcg==","ZW5na2Fw","SG9w","IHBvc3N1aQ==","QmVmb3JlRWFjaA==","IENISw==","IElNUw==","VGlja2Vy","IGdyaW5uZWQ=","LmdldEFz","IGltcG9zZXM=","XSIp","Rm9yZ2V0","L2ltcG9ydA==","IGluamVjdGluZw==","TG92","IGFicmls","X3NsaWNlcw==","LWNvbW0=","IFBST0RVQ1RT","IE9hc2lz","IMO4bnM=","IFJlamVjdA==","IHJlZ3VsYXJpemF0aW9u","aW1wbGljaXRseQ==","bmF6","U3BlY2lmaWVy","IGltcG92ZXJpc2hlZA==","5po=","IG5vbWluYXRl","IE9WRVJSSURF","IEJhbmRz","ZXRoeXN0","IEppYW4=","IG5ld2NvbWVy","IE5hYg==","IGVicA==","IFBhZ2Vy","IEh1bWI=","L2Nj","IGV4cMOpcmllbmNl","dWRnaW5n","TWI=","ZGJ1Zg==","Jy8+","IG9ja3PDpQ==","IGpkYmNUZW1wbGF0ZQ==","IFNISVBQSU5H","IGludGVyZGlzY2lwbGluYXJ5","IENFVA==","YXV0b3A=","LXN5bWJvbA==","YXZlYw==","IGNvbXBvdW5kZWQ=","IENodW5n","X1NNUw==","LWll","IFByb3NlY3V0b3I=","IExlaWE=","IE1hbmRlbGE=","U2luZ2xlT3JEZWZhdWx0","CVJFUVVJUkU=","YXRvd24=","dXJyZXRz","5paH5a2X","IENPTlRFWFQ=","RU5TSVRZ","IGluc3VyZ2VudHM=","IERpYXM=","LnN0YXRpb24=","IEtsYW4=","X21lYXN1cmVtZW50","X1FNQVJL","IHN0b2k=","TU9PVEg=","PicpOwoK","IGluZ2VzdGlvbg==","IEdsb3c=","dXRjaGVz","YmVhcmluZw==","LnRvYXN0cg==","IGZyYWdtZW50YXRpb24=","aXBwbw==","X1NFR01FTlQ=","IHN0dW1ibGluZw==","aW1hcg==","c3Rpbmlhbg==","XygpCg==","IG1vdGl2YXRpb25hbA==","TGlzdEl0ZW1UZXh0","IHdvbWVucw==","T3BlbkhlbHBlcg==","aWJhbmQ=","IGJ0blNhdmU=","IGluY29ycG9yYXRpb24=","IGRvY3VtZW50YXJpZXM=","aWNs","IE5k","IEFyYQ==","IHF1YWtl","IEN1bW1pbmdz","aHRt","YXN0ZXJlZA==","LmR0cA==","IGNvbmRvcw==","IEd1bmRhbQ==","L2Rpc2FibGU=","aHlkcmF0ZQ==","IEVwb2No","IG5hdGlvbmFsaXN0cw==","IGRldmVy","LHJlcXVlc3Q=","LmdldFZlcnNpb24=","Q0VMRVI=","IFNhbGFo","IG1vdGU=","IE1lbGxvbg==","c3BvdGlmeQ==","IG9yaWdlbg==","IG5hbGU=","IGFkdmVyc2FyaWVz","LkpUYWJsZQ==","Zm9yY2VtZW50cw==","IFJldHJlYXQ=","IGFyY2hpdm9z","IHNsYXNoZXM=","Lk1vdXNlRG93bg==","PDo6","X3Rocm91Z2g=","QWxhbWF0","LmJsdXI=","X2ZpbmRlcg==","IGFsbHVyZQ==","UGVyaXBoZXJhbA==","X3Bhc3NlZA==","X2NoYWxsZW5nZQ==","IFBhbGVv","SU5J","RGlyZQ==","c3BoZXJl","KENPTE9S","YWNrZXJz","IEdseXBo","KGludGVnZXI=","INC60L4=","IFJlbGV2YW50","INm+","IGF0YXM=","X3ByaW0=","IE1VVA==","bmluZ2Vy","YXV0b3JlbGVhc2Vwb29s","PV9f","IFNpZ25pbmc=","7ZWY7KeA","IHVjeg==","RWRpdGluZ1N0eWxl","IEhlYXRlcg==","IEZhaXJmaWVsZA==","IEJlYXJk","LGVu","dXNhdA==","KCcuJw==","L3N0cmVhbQ==","IGdldFN1cHBvcnRGcmFnbWVudE1hbmFnZXI=","IG1DdXJyZW50","X1NUQVRFUw==","X3dpbmQ=","Q0hBUFRFUg==","cHJvYmFiaWxpdHk=","KGFubm90YXRpb24=","ICovDQoNCg0K","LlVuaXF1ZQ==","LkFkZEZpZWxk","SGlnaGVy","LmRpZ2l0YWw=","LmV4cGVyaW1lbnRhbA==","YXds","IHdoZW5jZQ==","ZXJub3Rl","U0FNRQ==","Lmlwdg==","dG9CZUZhbHN5","YnJhbmU=","X2NhdGVnb3JpY2Fs","QXVyYQ==","IFR5cGVTY3JpcHQ=","IHNwb250YW5lb3VzbHk=","bG9uZ2xlZnRyaWdodGFycm93","aWthbA==","X1RPRE8=","IFd5YXR0","IGZsdXJyeQ==","ZGlm","IHJlY2tvbg==","IENvcm91dGluZQ==","CWZmbHVzaA==","IHdvcmtmbG93cw==","IEZBTUlMWQ==","c3ByaXRlcw==","X1dvcms=","LkdldFNpemU=","IENvbnN0cmFpbnRz","QmlnSW50","aXRpYQ==","Z2V0Um93","IGR1aw==","IGlzTmV3","IFByb2R1a3Rl","eENC","aXNpZXJ0","ZnVuY3M=","IEFkZW3DoXM=","QmluZGluZ1V0aWw=","b21waWxlcg==","LWludg==","IGNoYW50cw==","IGVudHNwcmVjaA==","KHRp","X0lB","0L7RgNC00LjQvQ==","IEZBTEw=","aW1k","IGxvY2FsdGltZQ==","PExpbms=","0L3QuNC60LA=","IHByb2ZpbGVy","IGdldFVzZXJJZA==","IFBoeXNpY2lhbnM=","UkFE","IGhtbQ==","IE5lc3M=","IFRlbXBv","IEpU","IHJlY29ubmFpc3NhbmNl","PHRyYW5zbGF0aW9u","IGVudGljaW5n","IHF1YWludA==","IGNvdXBl","X18nLA==","TkFTREFR","INC30L3QsNGH0LXQvdC40Y8=","UEVSQVRVUkU=","IFBhaQ==","IHRldGFz","Q0FT","SVJST1I=","IGtj","IHRvdGU=","IGRyYXdiYWNr","IHBhcnNsZXk=","CUZ1bmN0aW9u","aXN0eQ==","IERVUA==","X0NJRA==","X1VU","IGtzaQ==","IGrDpA==","PXZhbA==","LnRvSGV4U3RyaW5n","5p2/","LmNsaXBz","IG9mZmVu","IFRFQ0hOTw==","IFNoYW1l","IHN1c2NlcHRpYmlsaXR5","IHN0dXBpZGl0eQ==","IFRyb3V0","IENoYW1wYWduZQ==","ZXRoeWxlbmU=","IGJlZ3I=","X3JlZGlz","WWVw","IGhhbnM=","IERlZmVuZGFudA==","IGRhc2hlcw==","IHVzZXJUeXBl","X2RhdG9z","IHVuaWM=","a3JpdA==","IHJlY2VwdGl2ZQ==","IEdyZXQ=","KG1i","IEluZmx1","w6tu","fS8+","aW50ZXJlc3Rpbmc=","VVRVUkU=","IGltYWdlU2l6ZQ==","IGdyZA==","IGFic29s","L2Zh","LmdyYWRpZW50","IHd5c3Q=","XX0+Cg==","bGVnYXRpb24=","Ly8tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0KCg==","IEJsZW5kZXI=","X18pOw==","IHVzZXJFbWFpbA==","IFBoYXI=","bGVoZW0=","KSk/","KFJldHVybg==","ZWdyYQ==","dXRpdm8=","IGFwcGVuZGl4","IFJUVkY=","IFNFQUw=","IGd5cHN1bQ==","X0FyZw==","IGlsbHVtaW5hdGU=","IFNjaGlmZg==","cXVpbA==","LkNvbWJvQm94U3R5bGU=","J10pKQoK","IGFsdGVycw==","IHByYWN0aXNl","IHVzdA==","IERpbWl0","LVJlZ3VsYXI=","IGNyZWVwaW5n","IENhbmFkaWVucw==","IHJldG9ybg==","LWNvcm5lcg==","ICJdIg==","KHJuZw==","IGNhbmFkaWFu","IHBvc3Rv","LmFzc2VydEFsbW9zdEVxdWFs","IEJlY2t5","L3Nz","IGhvc3RhZ2Vz","IGJpb2xvZ2lzdA==","IEhvc3BpdGFsaXR5","IEVsaw==","IEJhcmFuZw==","66qp","YmJiYg==","LnRlYWNoZXI=","IHRlcm1pbmF0ZXM=","IGlzRXJyb3I=","IEtlbmRyaWNr","ZW5kYXJz","IFN1Z2dlc3Rpb25z","Q2Vs","IFNlcnZpY2VQcm92aWRlcg==","IFdpY2hpdGE=","XSkpLAo=","IGhlYWRsaWdodHM=","X3ZlbnRh","QU5USQ==","IHByb3BpZWRhZA==","IGVubGlzdA==","CW9yZw==","TWVzc2VuZ2Vy","LmxhbmQ=","IicK","YXNwZXJz","IHRlcnM=","ZmlsdA==","IEZ1bmN0b3I=","IHNsaW5n","X0JMSw==","LUV1cm9wZWFu","IEFjaGlsbGVz","XEVudGl0aWVz","LkRpc3BsYXlNZW1iZXI=","IHJlZGV2ZWxvcG1lbnQ=","CWhlbHA=","IFsnLQ==","IEp1bGllbg==","PUludGVnZXI=","LmlzTnVsbE9yRW1wdHk=","IFdvVw==","UGF5bWVudHM=","KGhkcg==","IGJhamE=","IEpDb21ib0JveA==","RmlyZWZveA==","IGNvbmdsb21lcg==","X2N1c3Q=","JCIpCg==","IG11dGFudHM=","TWFnbg==","IE1QSA==","e18=","X3dhcm5pbmdz","IGdhc3Q=","THQ=","IHRyYWluYWJsZQ==","VHJhZGVtYXJr","QkFTSA==","IEVDUw==","UmV0cmlldmU=","J08=","IGluaXRpYWxpc2Vk","IGNoZW1pbg==","LlRyYW5zcG9ydA==","IFlpbmc=","YXNpb25z","IG1vYw==","X0xPR0dFUg==","R0VOQ1k=","IEJsb2dnZXI=","ICIpIgo=","UEVuZA==","IGFjY29tcGFnbg==","LkNPREU=","IG1MaXN0","LWVkdWNhdGVk","LC8=","IE1lcnJpbGw=","L3Blb3BsZQ==","LicnJwo=","X3RvZG8=","IGfDvG4=","X0ZVTExTQ1JFRU4=","LmNsZWFudXA=","VW5tYXJzaGFsbGVy","LlN1cHByZXNzTGludA==","IG9uc2xhdWdodA==","IE1hcnNlaWxsZQ==","ZWRpYXRvcg==","X0VOVFJJRVM=","LGRlZmF1bHQ=","bWVsZHVuZw==","ZWxmdGg=","IEdvdmVybm1lbnRz","IHBsZWFz","b3R0cw==","IHBsdW5kZXI=","cmVhZE9ubHk=","IGR5c2Z1bmN0aW9uYWw=","J05laWxs","IHVubG9hZGVk","IHNxdWVlemluZw==","IGRvb2Q=","LmFkZERhdGE=","IEFzaQ==","TUVT","KHNjaGVkdWxl","IGFkdmVudHVyZXJz","ZXhwZWN0RXhjZXB0aW9u","IH19Pns=","Q0xT","IHJlY2hlcg==","IGRlcm5pw6hyZQ==","LkRldGFpbHM=","IHJhbmRvbU51bWJlcg==","IGlhcg==","IExhbmdl","ZXdl","IEVtaWw=","IGFkdmVydHM=","IGRyYW1hcw==","IEtvbW0=","ICAJCQkJ","X1Rlc3RDYXNl","IENsYXJlbmNl","0LXQvdGC0LA=","dG91cHBlcg==","Lm9uU3VibWl0","Y2Fh","X0FMQVJN","KikKCg==","IOuzgOqyvQ==","LlByaXZhdGU=","IHNreWxpbmU=","UkFJTg==","KGN1cmw=","b3NpdGU=","SWdub3Jpbmc=","IHZ6","IHZlZGVyZQ==","IE9TWA==","YmFuYW5h","IG1ldGFt","IHRyYW5zbGF0ZVk=","IE1jR3I=","4oCZYWNj","5Lul5LiL","IHNwaXJpdHVhbGx5","KGVuYWJsZWQ=","IHJlc3RvcmVz","IGJ0bkNhbmNlbA==","dmFuaXNoZWQ=","IE51ZXZv","U2FsdmFy","Y2FmZmU=","IG1hc3RlcmluZw==","aWRkbGVk","LmlzZGlnaXQ=","IGdyYXZ5","YWdlZExpc3Q=","XFJlc291cmNlcw==","IGRvd25mYWxs","LlBhc3M=","IGFsdGlqZA==","IHBpenphcw==","IH0pKQ==","cGVybXM=","aWdodG9u","IHJlcGVsbA==","ICcnKSw=","Lm5vcm1hbGl6ZWQ=","IG1hcmNoZXM=","CXJlc29sdmU=","Q2hpbGRTY3JvbGxWaWV3","IEluc3RpdHV0aW9ucw==","QXR0ZW5kYW5jZQ==","bHNl","ZXJkZW0=","LmdldElucHV0","SGFzQmVlbg==","YXBldXRpY3M=","ICpc","IFJpdHVhbA==","X0xT","IHNwb3RpZnk=","IHNww6R0ZXI=","IFRodW1ibmFpbA==","KGNlcnQ=","IGdldFJlc291cmNl","X3Bsb3Rz","IHN0YWluaW5n","YWRqdXN0ZWQ=","INep","RGl2RWxlbWVudA==","IFRUQw==","IGFwcm92ZQ==","LnZpZXdlcg==","fD0=","Z2V0U291cmNl","55S16K+d","X1RC","X2JpbGxpbmc=","LUxpZmU=","IHBzeWNoZQ==","IHRhYlBhZ2U=","IEluZmVjdA==","eGZmZg==","X2hpZA==","IGFwb2NhbHlwc2U=","IE5GUw==","IElURVI=","V2luZG93U2l6ZQ==","aGVpdHM=","IGluY3JlbWVudGVk","IEJyYXk=","ZW5lZ3Jv","IGFsbW9uZHM=","WVBSRQ==","Tm9ybWFsaXpl","4oCcV2VsbA==","IEFwaUNvbnRyb2xsZXI=","W1VuaXQ=","R2VucmVz","IE5leA==","IExORw==","IGZvcmVnb2luZw==","IHRlbmRvbg==","IEhw","Q291bmNpbA==","IFNhdWRpcw==","IERlemU=","IHNjcmFwZWQ=","IGJvdHRsZW5lY2s=","IE9ybg==","IHVubWFubmVk","IGludm9raW5nU3RhdGU=","IEV4b2R1cw==","X0FUT01JQw==","U3ViTWVudQ==","X2NvbXByZXNz","Iy4=","RHJ2","LnB1c2hCdXR0b24=","IHN1aXRjYXNl","b3NzZWQ=","Yml0cmFyeQ==","U25pcHBldA==","IEVwaWRlbWk=","RGlzYWxsb3c=","X0NISw==","IHZlcmlmaWVz","IENhdGFseXN0","4oCUZnJvbQ==","IGNvbnRhbWluYW50cw==","Sm9obm55","KGZpbA==","IGRlcmVu","IG91dGNyeQ==","IEpvaGFubg==","PFRhZw==","X3Nhbg==","IHN0ZGRldg==","IHBhcmFseXplZA==","IExleHVz","b3NhdGU=","IENoYXJzZXQ=","IFJlYWx0","PT8iLA==","KERlZmF1bHQ=","IFRyZWFzdXJlcg==","RWluZQ==","IHVudHJ1ZQ==","IGZpbmFuemk=","IGJlaGF2aW91cmFs","IG5pcHBsZQ==","IFJhZGljYWw=","IFBheg==","IE1haXNvbg==","LWVtcGxveWVk","IHdlcmVsZA==","IGpvcw==","IERpZWQ=","ZW50cmVwcmlzZQ==","JHJvd3M=","IHNwb29m","IMK7Lg==","IGtleXBvaW50cw==","IGN1cGNha2Vz","IHt9KTsKCg==","Y2hpbmU=","4oCL4oCL","LExPQ0FUSU9O","IHBseXdvb2Q=","IG1hZ2c=","IFJhbw==","IERQUg==","IGVib29rcw==","KXNpemU=","IHNwZWNpYWxpc2Vk","I2Fl","IG1pY2hhZWw=","IFNURE9VVA==","IFBlbGw=","QU1FUkE=","YW5nZWxv","IGluZ2lu","IG1BdXRo","IGxlZ2FsaXpl","IEN1YW5kbw==","IGNlcnRv","IGxpdHJlcw==","IEV4dHJhcw==","U0hPUlQ=","IHByZW1hdHVyZWx5","IFNlbWFwaG9yZQ==","SEVO","IGFtcGhpYg==","IGjDqQ==","RXhpdGluZw==","ZXVpbGxleg==","IFRNUHJv","LnByZWZlcmVuY2Vz","LmdldEluZm8=","w6l0aWNh","IiIiLg==","Lm5ld0FycmF5TGlzdA==","IGtyb24=","IEJMTA==","Y2xpbmU=","X2di","IFRvbWFz","cHJvYmFudGU=","SVRJT05BTA==","4buRaQ==","IExvZA==","SXNu","LHsK","IGtvbW11bg==","d2R4","Z2Vub21l","6YCj","dG9IYXZlTGVuZ3Ro","J0U=","IHDDumJsaWNh","IERldGVjdGVk","IF8KCg==","0YzRjg==","K1M=","Y2xvdGg=","Um90b3I=","Lm51bWVybw==","X3N0YW5k","R0ND","6rU=","X3Zw","X0ZBUg==","QWhlYWQ=","e31c","KGNvcnJlY3Q=","ImNyeXB0bw==","bW9kdWxv","X1VUSUxT","LlZhcg==","LW1lbg==","IHZlbmlhbQ==","IE1jQ29ybQ==","Z2V0TG9jYXRpb24=","W2NvZGU=","JWY=","IGRpZmZlcmVk","SVBBZGRyZXNz","IFN0cmF3YmVycnk=","IFNhaGFyYQ==","Y3JlYXRlQ2xhc3M=","IS8=","IG1lbWJlcnNoaXBz","IHByb25vdW5jZQ==","LkNvbnN0cmFpbnQ=","IEVucm9sbG1lbnQ=","IHJlbmV3YWJsZXM=","Lmd0","aXp6aWU=","cnp5","ZXJzZW4=","PD0k","REVMQVk=","IHNpZ25pbg==","IFBTVQ==","QXBwTmFtZQ==","fVwuWw==","RUdB","IGNpZW50","IFN5bm9wc2lz","IGxldHRlclNwYWNpbmc=","IGNoaWxkcw==","IFNjYWxpbmc=","KXByZXBhcmU=","IGNvbW11dGVy","U2xhc2g=","b3VzZXI=","IHdhdGVybWFyaw==","IFVJU2NyZWVu","b2xpYW4=","CXZlcnRpY2Vz","PkFjdGlvbg==","IGFwaA==","aGFuZHM=","IE9DQw==","SFU=","IHNlY2x1ZGVk","IHZpc2NlcmFs","IHZpZGVvZw==","IFNhbXVyYWk=","IFp1aw==","IFdpZG93","YWNjaW5l","IGxpbGxl","IFJ5ZGVy","IFByb2dyYW1tZXI=","RXhwb3J0ZXI=","IG1vdmltaWVudG8=","YXBhcw==","IGxlaWRlcg==","dWxhcmVz","aWVtZQ==","LWRlbnNpdHk=","ZGVzY2VuZGluZw==","KElU","IHNjcmFwZXI=","IGljZWJlcmc=","X0NSSVRJQ0FM","IGF1dGU=","X1N0eWxl","IE1BTA==","IEhlY3Rvcg==","LUNocmlzdGlhbg==","IGRpZmZlcmVudGlhdGVk","IEJpc29u","ICAgICAgIAk=","LnBvcHVsYXRpb24=","Umlv","LVRy","PVZhbHVl","IEx1ZnQ=","IEdpdWxpYW5p","55yf","Q291cG9u","IGhhY2llbmRv","44Od","cG9uY2U=","X3Jlc2lkdWFs","IGxp4buHdQ==","XHVmZg==","0L7QsdGF0L7QtNC40Lw=","IHJlc3BlY3Rv","IERlc2lyZWQ=","RGF0YVN0cmVhbQ==","LnNheA==","IG1vcA==","IEhhY2tlcg==","QU5UQQ==","QW5j","VmVudGE=","IFdvcmRwcmVzcw==","CWVmZmVjdA==","YWRhcHQ=","IEludGVydmlld3M=","IGRyYXdiYWNrcw==","QUxMRU5H","IGfDqW7DqXJhbA==","LWJhZGdl","UmVzaXN0YW5jZQ==","IE9TSQ==","dG91cm5hbWVudA==","IFJlcHV0YXRpb24=","IEVpc2VuaG93ZXI=","RmlsZWQ=","IGhlYnQ=","I1w=","Y3JlYXRlUXVlcnlCdWlsZGVy","5pyJ5pWI","dmFuY2Vk","Lkhhc0tleQ==","ZGRl","KHN0YXJ0VGltZQ==","IEluc3RhbGxlcg==","IEltcGw=","Y29hY2g=","IHByZWFjaGVk","IGJyZXdlZA==","SW5zdGFsbGVy","b2x2YWJsZQ==","IGFsYXM=","KHNwZWxs","IyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIw==","IGRlZmFtYXRpb24=","KEFyZw==","IHVzZXJEZXRhaWxz","IGxpY2Vuc29ycw==","IEludmVzdGlnYXRpb25z","IGRpbmVy","IGZpY3Q=","U3RpY2s=","TmVpZ2hib3I=","dG9UaHJvdw==","LXNlY3Rvcg==","IHJpc3VsdA==","4oCZOg==","Sk5JRW52","eXBpY2Fs","ZGVzaWduYXRpb24=","KHdw","IGNvbmZpcm1QYXNzd29yZA==","LWlvcw==","ICItIjsK","CWFzc2VydE5vdE51bGw=","YWRkRXJyb3I=","YXZyYXM=","Vm0=","KGpRdWVyeQ==","IFZpY3RpbXM=","IHJlbGlhbnQ=","IEJsaXR6","IG91dGFnZQ==","IGZsdW9yaWRl","IFROVA==","LkRpc2NsYWltZXI=","IFNOTVA=","dmFibHk=","IHBob3RvbnM=","LlJlYWRBc1N0cmluZ0FzeW5j","U2NoZWR1bGVk","IGpld2lzaA==","IEdlb2ZmcmV5","IEdyYW5ueQ==","fgo=","LW1lc3NhZ2Vz","KGdvYWw=","IGFyZ2VudA==","IFBlc3Q=","IGNvbmdyYXR1bGF0ZQ==","aW5vc2F1cg==","IHdoaXNwZXJz","IHNpc3RlbWFz","IEbDqQ==","L0luZGV4","Lk1JTExJU0VDT05EUw==","IGFjaGlldmFibGU=","IEJyaXR0YW55","KysrKysrKysrKysrKysrKysrKysrKysrKysrKysrKys=","IFJldHVyblR5cGU=","IGluZml4","LmlzU3VjY2Vzcw==","LkNhdGVnb3JpZXM=","IG91dGxpZXI=","LkFzc2V0","b3RlYw==","IHdpemFyZHM=","IGJvb3Rsb2FkZXI=","X2Jlcg==","IHJlaGFiaWxpdA==","YW50b3I=","IFZpdm8=","IEdhcm1pbg==","b2JqZWN0SWQ=","QFBhdGg=","IMO6bmljYQ==","IFlvcmtlcnM=","R3VpZElk","JGVycm9ycw==","ICs9Cg==","IGF4aW9t","IFBTSQ==","IFN1Y2M=","IFNwb2thbmU=","ICciLiRf","IExO","Lm5ld0xpbmU=","IGludGVyc2VjdHM=","bGljaGtlaXQ=","IElBTQ==","LkRyb3BEb3duSXRlbXM=","IGNvdXJ0ZW91cw==","IFNtaXRoc29uaWFu","IEhtbQ==","UURlYnVn","c3RyYWlnaHQ=","X3NvbGQ=","QnVsaw==","VHJpU3RhdGU=","IGFkZEJ1dHRvbg==","IEhpcmluZw==","VHJhbnNwb3Nl","IFVJVGV4dFZpZXc=","aXN0ZW5jaWE=","L2NwcA==","INC/0L7Qu9GP","IENvb2tib29r","L0FwcGxpY2F0aW9u","Z2VuaWM=","IFdvb0NvbW1lcmNl","LHZlY3Rvcg==","IEJpdGU=","Lmh3","IGRvY2tpbmc=","IFRhbnRyYQ==","IFNWQw==","IE1hdXJpdA==","aWFsaWFz","IEF1cmU=","IGJvbHM=","TE9DSVRZ","IFdlc3Ricm9vaw==","IEJQTQ==","IEZleQ==","IFNvdmVyZQ==","IHBhbmRh","IHF1aXp6ZXM=","IGNyZW8=","c3BlZWNo","L2Rpcg==","INC40YHQv9C+0LvRjNC30L7Qsg==","IGZvdW5kYXRpb25hbA==","LWFwcGVuZA==","blRoZQ==","IGFwaVVybA==","LlhQQVRI","IExpbmd1","IEV4aGF1c3Q=","UGFraXN0YW4=","IG9tYXA=","IGZvbnRTdHlsZQ==","0LXRgdGC0Lg=","IG1hbnNsYXVnaHRlcg==","X0xvbmc=","IGNhcnBldHM=","Q2hlc3M=","ZWxpZ2h0","RHJhd2VyVG9nZ2xl","IFBhdHR5","X2Nyb3NzZW50cm9weQ==","IHR3ZWFraW5n","0YLRgw==","IENBTEM=","c2lw","IEpNUA==","X19fX19fX19fX19fX19fX18KCg==","VHJlZVZpZXc=","LXdhdmU=","IHBhc3R1cmU=","ZWxpbWluYXI=","IGVyeQ==","IHJlc3RsZXNz","6rWs","IG1hcmlhZ2U=","IEVsbGll","Xz0n","IHZtaW4=","S2ljaw==","LnRvb2xib3g=","IE1hcmlubw==","eXBzeQ==","c3RkYXJn","cHRyZGlmZg==","IFBlYWtz","X1ZhbA==","IGluZ2VzdA==","IGNvbXBz","RGViZQ==","IERlY2xhcmF0aW9ucw==","aXJjb24=","PWFsbA==","LkRlYnVnZg==","UHJlZGljdGlvbg==","IGRhdQ==","KE1lbWJlcg==","IGNoaWVmbHk=","L2FuaW1hdGU=","LkF0dGFjaA==","IGdhc3RyaWM=","IFVzZXJEZXRhaWxz","w7ZyZW4=","a29h","LWJvb3Q=","IHNwbGljZQ==","bGVh","b3Rp","W29w","U3F1YXJlZA==","IHNjcm9sbFRv","IE5ld2ZvdW5kbGFuZA==","CUVSUk9S","V2Fs","RU1BTEU=","R2V0WQ==","IGNhYmlucw==","IGFic2w=","Lm1peGVy","IGNkcg==","Y29uY2VydA==","IFN5bHZpYQ==","Qks=","5LuK5bm0","X0NMQU1Q","0YHRgtGA0YPQutGC0L7RgA==","L2dhbWVz","xZN1cg==","PGxvY2F0aW9u","IGNsb3NlQnV0dG9u","IEhhaXJzdA==","4bqhbw==","IGNydW1ibGluZw==","IHN1bGZhdGU=","IGFsZ3VpZW4=","IEpEQkM=","IEt2","UElQ","X3N1cmY=","IHXFvHl0aw==","IG1hbm5lZA==","IE9jY2FzaW9uYWxseQ==","b2Jqcw==","TWluaW1hbA==","LWRlc3M=","IFdBVg==","IEVycm9ySGFuZGxlcg==","IHNldExvY2F0aW9u","IGlldHM=","IHN1YnJvdXRpbmU=","IHRvbmd1ZXM=","X3F1aXo=","TWlsbGVy","IEJhc2VUeXBl","IFZ1ZXg=","aXJhdGU=","U2VyaW91c2x5","dHlwZWlk","IGt1dGpl","IHByZXNjcmliaW5n","X3N1cnZleQ==","LkN0","IGJsaW5kbHk=","LmdldExhYmVs","LCIpOwo=","IHBvdHJ6ZQ==","IFN3b3Jkcw==","U29ydGFibGU=","IEJsYWNrYnVybg==","IE1hdGE=","IHBvbmRz","IHByb3Rlc3RvcnM=","IEVuc2VtYmxl","OmZvY3Vz","IGl0YWxpYW5h","IGRvcm1hbnQ=","IE5lbA==","SU5DTFVERQ==","KENvbnY=","IGJ1Zmxlbg==","IENETg==","LnhodG1s","SGRy","IGNhcmNpbm9tYQ==","IFdvcmNlc3Rlcg==","bmRs","dXNlUmFs","dXNlUmFsYXRpdmU=","dXNlUmFsYXRpdmVJbWFnZVBhdGg=","IHRha2Vhd2F5","ZWxlbWVudEd1aWRJZA==","LmxhYmVsWA==","W0lE","QUxFUg==","CXV2","PigpLT4=","L2xp","K2xlbg==","IHByb3BlbA==","IGNhYm8=","XCIiKTsK","IHZvY2F0aW9uYWw=","LXBpbGw=","Lm5sbQ==","IGVyb3RpY2E=","b3BvdA==","bGFuZHNjYXBl","aW5zaw==","IHBsYWNlbWVudHM=","LnNldEF1dG8=","IGhvbWljaWRlcw==","X0ZpZWxkT2Zmc2V0VGFibGU=","Omw=","IGFubm90YXRl","LXJpc2U=","LGFscGhh","IGludGVydmVuaW5n","YW1iaQ==","Lj0nPA==","IHBhcmxlcg==","772l772l","IGNvbXBseWluZw==","LWhhbmRsZQ==","IGludGVycnVwdGlvbnM=","cGxlcnM=","cm91cHM=","X0RlZg==","IHBpY2tlclZpZXc=","IHBpZXJjZWQ=","IGVyYWRpY2F0ZQ==","bW9ieA==","W3RyYWlu","RGVmZXJyZWQ=","IHRvdGFsZWQ=","Q2hpbGRJbmRleA==","IFJlY29tbWVuZGF0aW9ucw==","X1dPUkRT","IHNpZ25pZnk=","IEFlcm8=","X2Jvb3RzdHJhcA==","X1Vw","cHJvZHVjdE5hbWU=","LWFueQ==","IHBwbA==","X1BVVA==","IGx5b24=","X0lMaXN0","IMOpY3JpdA==","KGd1aWQ=","IGNvbnRhZ2lvdXM=","X1NlbGVjdGlvbg==","L2xhbmd1YWdl","cXVhbg==","IGFjdXB1bmN0dXJl","IG9mcmVjZQ==","CVJURQ==","Lkd1bmE=","IHNlbnNlZA==","IEtyYWs=","IHVubHVja3k=","YXZpYw==","dGl0bGVMYWJlbA==","IGhheXN0YWNr","LmJpdG1hcA==","IENvdW5zZWxpbmc=","UExBVEZPUk0=","X1Rvb2w=","VGFt","V2VyZQ==","0YDQsNC3","X1NQRQ==","IG9uQW5pbWF0aW9u","PTw/PSQ=","IFNsZQ==","IEd1aW5uZXNz","IHR3ZWFrZWQ=","LXByZXNzdXJl","X21vbnRocw==","KW8=","UHJvYmFiaWxpdHk=","IENhbXBvcw==","LkNPTkZJRw==","VmludGFnZQ==","PndpbmRvdw==","IEZhY3RvcnlCb3Q=","cG9zdGdyZXNxbA==","IHRhYmxldG9w","IENhdGE=","aG9j","X2FzYw==","4oKs4oCc","QmFja1N0YWNr","w6lv","IFNvdXM=","c2V0dGVy","JyldKQo=","dmVsbGU=","IEFsdW1pbml1bQ==","eEJB","Lm1vbmdv","IFZhcmlhdGlvbg==","eXR1dA==","bmVobWVy","4buDbQ==","IGVmZmVjdGVk","ICoqLw0K","IHJlY291bnRlZA==","UHJhY3RpY2U=","Q0FOQ0VM","Y3puaWU=","TGFycnk=","IHFh","IEh1ZmZtYW4=","Z2V0RHJhd2FibGU=","IGVuZnJlbnQ=","IG9uQ2FuY2VsbGVk","IGxlbw==","IFhTUw==","IEh1cnJpY2FuZXM=","IGpvbg==","IFRlc3RlZA==","IE1vcmFs","IGJlZHRpbWU=","IEpBRFg=","IGVjaGFuZw==","IG51ZXN0cmFz","UENN","KS4u","IOyImOyglQ==","IGJvcmRlcmxpbmU=","IGFzc2lzdGly","IEhlbHBz","IERpdmU=","X3NuZA==","d2l0","X2JsZW5k","IGlzRmlyc3Q=","IGhlYXBx","KCc9","IGFzc2VtYmxlcg==","IE15c3RpYw==","b3JnaA==","IGhpam9z","X0tIUg==","KGRlY29kZWQ=","IFFVSQ==","INeR","IGNvbnRyb2xJZA==","U3BhY2Vy","LmFnZ3JlZ2F0ZQ==","IHNoYWx0","X3RyYXA=","IEZhbWlsaWU=","zrg=","b3J0YQ==","LlBvc3RNYXBwaW5n","7LA=","ICcuLics","esOh","L2FybQ==","LmdhbGxlcnk=","IGltcGVjY2FibGU=","IHdpbmRvd0hlaWdodA==","c2xhY2s=","ZmZi","X3Fw","bGFkZW4=","IFRFUk0=","c2V0TGFiZWw=","IFNpbmdsZUNoaWxkU2Nyb2xsVmlldw==","ecO8aw==","IHB1bHVtaQ==","LWdhcA==","dW5pYWNpZA==","CWhvbGRlcg==","LmFkZEZpZWxk","IHRyaXBsZXM=","IEp1ZGdtZW50","IENlbmE=","cGFyc2Vycw==","LmRyYXdUZXh0","INC60LDQttC0","IGFjY3Q=","aGl2ZQ==","IG11c2lxdWU=","IFlheg==","LXBvc3Rz","IGZpbHM=","IC8vew0K","X3B1dHM=","IFN0YXR1ZQ==","ZGlhbW9uZA==","U3RvcmFnZVN5bmM=","IHNodXRz","IGdldHRpbWVvZmRheQ==","IEFBQkI=","aWNoZXJu","Z2V0TG9jYWxl","aW50cmVl","IGZydWl0ZnVs","QmVhcg==","IHBsdW1iZXI=","cWlk","Q0hJUA==","IG1vdGl2YXRpbmc=","IGVzY2FsYXRl","LmJ1bGs=","IFBsYXlncm91bmQ=","X21pcnJvcg==","IFBlZWw=","IGRhbmU=","aW52b2ljZXM=","SGFzQmVlblNldA==","LXZlcnRpY2Fs","IEZyYW5jZXNjbw==","IEFTQQ==","INC60L7Qu9C40YfQtdGB0YLQstC+","w6Bu","Rm91cnRo","IENyZWF0ZVRhYmxl","Y2N0b3I=","IGZyYW50aWM=","YWFi","IEthcmFjaGk=","X2ltYWc=","IG5hdHV1cg==","RWF0","IHN0dW1w","IHJvbGxlcnM=","IHRyYWl0ZW1lbnQ=","INC/0YDQvtC0","IHJlYWxpc3RpY2FsbHk=","IGVQdWI=","IFphZw==","ZGFtbg==","IEFubmV4","cGVjaWVz","KGV4aXQ=","IHNwZWN0YXRvcg==","IEJ1bGdhcmlhbg==","IG1lZ2V0","IG1hdHVyZXM=","IGRldGVjdGlvbnM=","IHphaGw=","ZW5lZml0","YWtvdg==","IGFkdWx0b3M=","bWlkZGxld2FyZXM=","aXNPYmplY3Q=","S2Vubg==","IHVuZXRoaWNhbA==","c3VibmV0","R3JhcGhRTA==","IEdhZWw=","LkRyb3BvdXQ=","IGJ1cmVhdWNyYXRz","IFJlZGVtcHRpb24=","LkR0bw==","LkV2YWx1YXRl","IG9nZ2k=","IHRyYXRhbWllbnRv","IHJlY2FsbGluZw==","aXN0aW5ndWlzaA==","L3JlbGVhc2U=","X1dST05MWQ==","CW1rZGly","VHlwZUVudW0=","IERBUks=","5rWB","IFZhcG9y","IGF0b2w=","CWluc3Q=","LmApOwo=","L2Vs","IHJlY2xhaW1lZA==","w59lcmRlbQ==","X2xvc3Q=","IEFsYQ==","INC+0YjQuNCx","IEJhcnRo","Q29sb24=","b3Bvcg==","X3Bhc3N3ZA==","X2V4Y2x1ZGU=","QVBB","Zmxvd2Vycw==","IEVib29r","IFNUQQ==","VU5T","X0RJU1BBVENI","QUNJw5NO","dGVybWluYXRpb24=","IG5lc3RsZWQ=","YWRyYXRpYw==","Um93QW5pbWF0aW9u","X2tt","IHJvbmQ=","XV0+PC8=","5L2Z","IGNvc3BsYXk=","IG1pbGxlbm5pdW0=","X3NlcmlhbGl6ZQ==","IHZlcnNjaGllZGVuZW4=","YW50dA==","IEFtaWQ=","Y3JldGlvbg==","KT8k","IHRvd2luZw==","LmZpbA==","LkZpbGVXcml0ZXI=","IGFpcw==","IGVTcG9ydHM=","cHJ0","SVBB","LkZBTFNF","IHByaWNr","RW5kaW5n","IHByw6lzaWRlbnQ=","X2dseXBo","IHN1cHBsZW1lbnRlZA==","IGNvbnRhcg==","Ii4kXw==","IEJ1eWVycw==","dWph","IFRpbWVab25l","ZW5uZW50","SW5Qcm9ncmVzcw==","IFN1c3RhaW5hYmlsaXR5","IFByb3NwZXI=","Q29udG91cnM=","IHN0YXJ0bGVk","X2xlYXN0","IENvdmVudA==","Y2huaXR0","IE1pbGt5","ICItPg==","ZXRhaw==","IHR1c3Nlbg==","LXBheWluZw==","X2FjY2Vzc2libGU=","QmF0bWFu","KGl0cg==","SUFMSVpFRA==","IFRleHRBcmVh","YW5rZQ==","X0pVTVA=","IGJlaGF2ZWQ=","LG9wdGlvbnM=","eGl2","LlBMTA==","cXg=","Lm9uTmV4dA==","IHZlcmlmaWVy","IGR1xbw=","IEZ1a3VzaGltYQ==","IENPUlBPUkFUSU9O","X3RE","IE1lYWRvdw==","IHByb3llY3Rvcw==","ICgnXA==","IEJhcmNsYXlz","IGxlZ2FsaXR5","IGhhbWJ1cmdlcg==","IGVpbnM=","SW5kaWFuYQ==","IFRLZXk=","Y2xvYWs=","PGFsZ29yaXRobQ==","IHByZWFjaGVy","e2xuZw==","LmFydGljbGVz","c2V0SW1hZ2U=","UmVuYW1l","IGJsb3Nzb20=","IEJsb3Nz","IHV1cg==","IGRhZHM=","IFRpdGFuaWM=","ICAgICAgICANCg0K","IG9yZGluYW5jZXM=","IG3DpG5u","IGVyaw==","IGRpc3RpbGxlZA==","IMOkbA==","IHJ1cHR1cmU=","IENhbWVyYXM=","w7luZw==","IGhhaXJzdHlsZXM=","IGVtYnJ5b3M=","4oCdCg==","Lk5hdg==","IHN0cm0=","CXVzYWdl","LkFJ","IFRPVUNI","IElsbGVnYWxBY2Nlc3NFeGNlcHRpb24=","6rKw","a29uZWtzaQ==","ISIp","IGVzY2Fw","dWRpb3M=","c3RhcnR0aW1l","IG1laW5lbQ==","IFNwaXJhbA==","IEVyZWN0aWxl","aXZhbGVuY2U=","IGl0ZW1UeXBl","IGFiYWl4bw==","VmVydHM=","dGFraW5n","cHN0","IE9zY2Fycw==","IER4","ZXR0eQ==","TUFM","IE5lZWRsZQ==","IENPTVBVVEVS","5Lu75Yqh","IG5ld1g=","ICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAK","cGxldmVs","QUNFTUVOVA==","IEpvaGFu","UG9pbnRG","IHJlc3Ryb29t","dmVybw==","IGVsxZE=","cHJvZHVr","IFlFQVJT","CWFjdHVhbA==","VVBMRQ==","Q29udmVydGlibGU=","IHBvcnJm","SW5qZWN0ZWQ=","X2JvdGg=","L0dhdGU=","Y2FsY3VsYXRvcg==","ZW1haWxlcg==","LlBvZA==","IFpvdA==","X3NtYXJ0","YmFzaXM=","PENvbG9y","IGNyYXZpbmdz","RHJpdmVycw==","KGNvcw==","ZGF0YWJsZQ==","LW1ldGFs","IFBj","LmNvcHlPZg==","IG9yaWVudGF0aW9ucw==","CWFzdA==","IFpvbWJpZXM=","IGJvbWJlZA==","SG9zdG5hbWU=","X3JhaXNlcw==","bWVuc2FnZW0=","IGNvcnRpc29s","IEZpb25h","bGljb3M=","aGVhdnk=","IOqwgOyguA==","b21lbmNs","IGN1bHR1cmVk","IGFydGlrZWw=","xaHDrQ==","amRr","IHZhbmRhbGlzbQ==","IH1dKTsK","U3RyYWlnaHQ=","IHJlaGVhcnNhbA==","RWRpdGlvbg==","IEluc3Bpcg==","CXdj","IGZvcm11bGF0ZQ==","YW56ZWlnZW4=","IHBhdGhvbG9naWNhbA==","IGtlbm5lbmxlcm5lbg==","Pnsi","IGRpY2Vk","IGJyYWNlbGV0cw==","CQkgICAgCg==","Kj4q","L3RhcmdldA==","LkFnZW50","Lm1hZ2lj","IGlkZW9sb2dpZXM=","VFJBQ0s=","X2luZGl2aWR1YWw=","PGRlY2x0eXBl","IFJFQ0VJVkU=","L2Jvb3Q=","OkB7","UU0=","IE1hbmRhbA==","TkFNRVNQQUNF","IHRlcmNlcg==","IFJlZ2dpZQ==","IE5pY2hvbHNvbg==","IEZ1bHRvbg==","c3Rha2luZw==","IHJlc29uYXRl","bHBhcnI=","IGNvbnZlcnRlcnM=","ICgiLw==","IE1hcmxpbnM=","SW5mb3JtZQ==","Jz0+Wyc=","IHJvYmVydA==","IEhJTQ==","d2Vicw==","LnRyYWlsaW5nQW5jaG9y","LmFzY2lp","IE1hc2M=","IHRlY2hubw==","ZXR4dA==","CSAgICAgICAgCg==","zrHOuQ==","KFNlcQ==","ID8+Ojwv","IFBlYg==","W3NlbGVjdGVk","SkVDVEVE","Q2FzdEV4Y2VwdGlvbg==","P2Y=","IGV5ZXdpdG5lc3M=","IG1lbm8=","IERhbWllbg==","X0lFbnVtZXJhdG9y","IC4uLi4uLi4uLi4uLi4uLi4=","LlNFTEVDVA==","IGNyYXk=","X3BhcGVy","LlJvbGxiYWNr","SURFT1M=","cnBhcnI=","aW5lYXI=","X1JlbA==","IFdpbGRl","IFdvbmRlcmxhbmQ=","IFNodWZmbGU=","IHN0cmlrZW91dHM=","c2lnbW9pZA==","ISgiew==","ZXBhbQ==","IHJpY2huZXNz","IGVuZGVhdm91cg==","bWVudUl0ZW0=","INCf0L7Qu9GD0Yc=","IGZydXN0cmF0aW9ucw==","X3N1YnNjcmliZQ==","IGJvb3pl","IExpY2h0","IHBlYXNhbnQ=","IHdlaWdodGluZw==","IOW/","QWN0aW9uQ29kZQ==","LnRyYWNrcw==","IMOY","IG1pbGxpb25haXJl","KHVy","J10pCgoK","ICIuJF8=","X0VERUZBVUxU","IGN1cmxz","X0NvbUNhbGxhYmxlV3JhcHBlcg==","LnNldFZpZXdwb3J0","IGRlbmQ=","IGF1dG91cg==","IEZvdXJpZXI=","IGJvaWxz","IEpQRw==","IGRpZ3M=","IGNvbXBsYWlucw==","LWxpbmVk","IEJsYWRlcw==","X2RpY3Rz","IElwcw==","cmVmZXJlcg==","IGFueWhvdw==","YW50YXI=","LXNoZWV0","CXBsYXk=","aWVyY2U=","Lk1lc3NhZ2luZw==","6KeB","CXByb2dyZXNz","LkRhdGFWaXN1YWxpemF0aW9u","IFN0b3Bz","SW50ZXJ2YWxTaW5jZQ==","QGJyaWVm","LndpbmQ=","IGdldElucHV0","IEtB","IFJFU1BPTlM=","IHRhcmc=","dmlzdWFsaXphdGlvbg==","IEVzcGHDsQ==","bmllcg==","IERvdmU=","X2lzcg==","IEFQUExZ","YmVkbw==","W117Cg==","IGV2YWN1YXRl","IG1pY3Jvc2NvcGlj","5q2j56Gu","ZXJvdA==","LW9wZXJhdGl2ZQ==","aWt1dA==","IGRibA==","IGFqb3V0","Lml4","ICAgICAgICAKICAgIAo=","dGVzdGU=","bml2ZWw=","LnNuYXA=","dXR6dA==","LmlzQWRtaW4=","KElD","IG9iZW4=","IEVmZmljaWVudA==","RERldmljZQ==","IGluZGVtbg==","IGZyb3pl","LHJw","IGRlY2VtYmVy","57uZ","IG1lbG9kaWVz","IEVUQQ==","44GT44KT44Gr44Gh44Gv","IHF1YWxjaGU=","IHNldERlZmF1bHRDbG9zZU9wZXJhdGlvbg==","T1JJQQ==","IHphZw==","IGFsbG93YW5jZXM=","L3Bo","LVRva2Vu","IFBvdQ==","IG1pbmlzdHJpZXM=","LkxPR0lO","IHNlYXJjaFRlcm0=","IGh1cnJpY2FuZXM=","IEZsb3Vy","IFNVUw==","VGhlbWVz","cmVlY2U=","IGVudHJldg==","RFhWRUNUT1I=","IEJyZW5kYQ==","RXJyb3JNc2c=","OildOwo=","IGRvbWluYQ==","IEludmlzaWJsZQ==","PD4oIg==","cHV0Yw==","SEFWRQ==","RXZhbHVhdG9y","bWF0Y2hpbmc=","LW5hbWVz","IGxhaA==","X1lVVg==","5pyN5Yqh5Zmo","LldSSVRF","KTpc","LWRlZmluaXRpb24=","IGNoaW1uZXk=","LmNscw==","a25vd2xlZGdl","IEFsZXhhbmRyZQ==","IGNvbGVn","b8WbY2k=","LkNobw==","IHNvZnRlbmVk","IHJvdGF0ZXM=","LXN0YXRlcw==","6rc=","dmlvbGVudA==","IDopCg==","IGFjY2nDs24=","bmlrYQ==","IExhdHRlcg==","X0Zsb2F0","IGVncmVnaW91cw==","b2RpYWw=","U3lub3BzaXM=","KHhp","IH0sew==","Y3h4","RW1tYQ==","IENvbmN1cnJlbnRIYXNoTWFw","X0NhbWVyYQ==","IHBlYW51dHM=","44Kz44Oh44Oz44OI","X2JlZA==","IGVycm9yQ2FsbGJhY2s=","IFBhcHVh","LFRydWU=","tpo=","IHN0YWRpdW1z","IGtub2Jz","aWZpY2FjaW9uZXM=","IHB1cnBvc2VseQ==","IFB1cmVDb21wb25lbnQ=","INC60LvQuA==","LlRyYWNr","c3Nj","KEpvYg==","KEh0dHBDb250ZXh0","IGNob2lzaXI=","IOy7","IGF1c3A=","dXBwZW4=","QWR2ZW50dXJl","IEZMQUM=","IGFwcGVsbGFudA==","ICgoIg==","z4c=","IHRyaWY=","IGR1cmF0aW9ucw==","IE5HWA==","LmJw","YWN0aW9uRGF0ZQ==","Lmluc3RhbnQ=","LVJlcXVlc3RlZA==","JyYm","INGH0LXRgA==","PWJvb2w=","IGxvcmRz","bGljaW5n","IG1hcmlu","IGJsaW5kZWQ=","L2xheW91dHM=","ZmVpdG8=","aXp6bGluZw==","RXZ0","IGJ1bGxpc2g=","ZXhjbHVzaXZl","4oCZZXM=","LmdldE93blByb3BlcnR5RGVzY3JpcHRvcg==","IGJhcHRpemVk","INGB0LvRg9GH","IENlY2ls","LmVmZmVjdHM=","IGNyeXB0b2dyYXBoaWM=","IFZpbGxl","dWZ0","IEFudGhlbQ==","IHNlZWtlcg==","IG5pY2tuYW1lZA==","IGNhbXBncm91bmQ=","IGFjdGlvbkJhcg==","IEVwaXNvZGVz","IC0tLS0tLS0tCg==","QnVpbGRlckZhY3Rvcnk=","X1VOU1VQUE9SVEVE","VklMTEU=","LlJlZ2lzdHJ5","VG9uaWdodA==","IG1ha3M=","IGFkZG9ucw==","IERlY3J5cHQ=","LnNraWxscw==","KGZo","IGp1Z2c=","IENvdXBsZXM=","IEFtaXI=","ID09PT09PT09PT0=","IGVuZGVyZWNv","LlN0cmluZ3M=","IGhhcm1pbmc=","IGJ1c3RsaW5n","KGZpcnN0TmFtZQ==","LnNwYXJzZQ==","SVRP","ICAgICAgICAgICAgICANCg==","5p2l5rqQ","b2RlZ2E=","YW5hZ2Fu","LkhhbmRsZXJGdW5j","IHRpbmRlcg==","ICMo","IGltYWdpbmFibGU=","IGF1bg==","UHJlc2VuY2U=","UGFja2FnZU1hbmFnZXI=","IGx1ZGljcm91cw==","acOobWU=","IGdldE9iamVjdA==","Ym94aW5n","IHNxdWlk","w6p0ZXM=","RGFlbW9u","X2xpa2Vz","hrU=","Ly8tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0t","Lnd3dw==","c3NlbA==","ZXRlY3Rpb25z","ZGFl","L2Rvd25sb2Fkcw==","IENsYXNzaWZpZXI=","X1NVQkpFQ1Q=","emVnbw==","X0dST1VQUw==","YWN0aWNlcw==","X2xpdGU=","IGRhbm1hcms=","L2Js","YXB5cnVz","VElNRVI=","IFNjcmlwdHVyZXM=","0Y/Rgg==","c3Bh","Ikc=","IHBlbmV0cmF0aW5n","IGNvbmZvcm1pdHk=","bmV3bGluZQ==","IGx5bg==","IE1NUA==","IElOVEVSRkFDRQ==","IEFjdGlvblR5cGVz","LmNyaXRlcmlh","4buRbmc=","IHJlc3RpdHV0aW9u","CUZPUg==","PHBhdGg=","PT8iOwo=","KHBlcmNlbnQ=","bmRv","IEFDTQ==","CWN0","QGE=","IHTDug==","IHNwb3R0aW5n","w7xybg==","IEdFUg==","LndyaXRlVmFsdWU=","X2Jsb2NrZWQ=","WW1k","IGluZWZm","IFJhZGlhdGlvbg==","IE9pbGVycw==","QmVlcg==","cm90cw==","IFRyb3Q=","cm5h","cG9ydGVy","ZW5lcnk=","IHBvcm5vZmlsbQ==","65SU","X2Nr","LkNvbXB1dGU=","IFtdCgoK","Z2l1bQ==","IFRFTEU=","IEluc3RhbmNlcw==","Kkk=","IHdpcmVUeXBl","b25pdW0=","ZXNoaXJl","IHB1dGNoYXI=","IGF3YWtlbmVk","LmRlZ3JlZQ==","aGVpdGVu","LWF3YWl0ZWQ=","IG5ldXJvdHJhbnM=","LXRlc3RpZA==","CgogICAgCg==","IOe7kw==","IGtpbm8=","X0RBWVM=","IFZhbGVyaWU=","bnRpdHk=","QEJlYW4=","ZXRDb2Rl","PFJlbmRlcmVy","IiIK","IGJlcm4=","IHRvdGFsaXRhcmlhbg==","Y2xpbmlj","IE3DvG5jaGVu","bm9pbnNwZWN0aW9u","aXNjZQ==","X3R1cGxlcw==","LlBvaW50cw==","IHBhc3RvcmFs","SmFr","a2VuaW5n","L2NvbHVtbg==","LXByb2R1Y2luZw==","IGFib2xpc2g=","ZmVhcw==","cmVzcG9uc2VEYXRh","cmVkaXJlY3RUb1JvdXRl","IG9ic2VydmF0aW9uYWw=","cE5leHQ=","enRl","Q2hvaWNlcw==","CUxDRA==","JlM=","IGJpbGxpb25haXJlcw==","X0VPRg==","IGNvaG9ydHM=","YW5rZW4=","LmNvbWJpbmU=","KE9wdGlvbmFs","X0NPTlNPTEU=","QWN0aXZpdHlJbmRpY2F0b3JWaWV3","IHBoYXJtYWNpc3Q=","IERvdWdo","IE9wZXJhdGlvbmFs","57I=","IGphbXM=","U29sbw==","CWR1cmF0aW9u","LnJt","IFRvbmk=","LmxlYXZl","IHB1ZWRh","IEZheQ==","RGV0YWNo","Lk1heGltaXplQm94","IG1hcnR5cg==","IGhhemU=","L25l","IG1hbW1h","c2VsZWN0b3JNZXRob2Q=","IHBpbGdyaW1hZ2U=","IEFzcGhhbHQ=","IHZhbGlkbw==","RW5kRWxlbWVudA==","IGxhcHNl","ID09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT0K","aWxvcw==","ZXJuYWxz","Q29ubmVjdGlvbkZhY3Rvcnk=","IExvdmluZw==","LkNvbXBpbGU=","IGNvcms=","IEJ5ZQ==","aWJOYW1lT3JOaWw=","ZXN0YXI=","XEdlbmVyYXRlZFZhbHVl","KExM","IFJhaXNlUHJvcGVydHlDaGFuZ2Vk","IElyYW5pYW5z","IGdldFByaWNl","bWFyaWVz","anVtYm90cm9u","IFJlYmVscw==","RElGRg==","IE1vag==","b3J0aWM=","CWNvbnN0ZXhwcg==","bnRw","IG1hZ2ljaWFu","IHBhdHJpb3Rpc20=","LmNl","LlNpbXBsZUJ1dHRvbg==","IFBSSVY=","aGlzdG9pcmU=","aGlnaGVy","cmVmaXhlcg==","Q0pL","IE9zd2FsZA==","LnNwcml0ZXM=","Lkls","IGFyY2FuZQ==","IENodW4=","X09m","IGV2ZXJ5dGltZQ==","0Y7RiQ==","IGxldHJhcw==","aWxhbg==","YmFydQ==","LWJvdA==","IFNpZ25pZmljYW50","iOyKteuLiOuLpA==","4oCM","LWlzc3Vl","IGluc2FuZWx5","YXRlZ2lj","X1ZF","OkNHUG9pbnQ=","TWFya3M=","LnByb2JsZW0=","J10uJy8=","IHJlZHVuZGFuY3k=","IGRlY3J5cHRpb24=","SHVuZw==","LXZhbGlkYXRl","IEFuZ2Vsbw==","Sk0=","IHBvcG92ZXI=","ZGViaXQ=","Q29tcHV0ZWRTdHlsZQ==","KV9f","KHNpbg==","ICcpLA==","KGRlZnZhcg==","w7R0ZQ==","VGhhbk9yRXF1YWxUbw==","Lnpo","KE5vdGU=","aWJCdW5kbGVPck5pbA==","IFNvbmlh","eW1vdXM=","44CCPA==","IGZpbG15","IGVhcnRobHk=","IExlYXJuZWQ=","W3NlY3Rpb24=","Lmpzb3Vw","c3RydXA=","IFBhdHJvbg==","ICkq","c2V0Rm9udA==","IGhlZw==","IGRlbHRhWQ==","X1NDUg==","LmN1dA==","IHZiQ3JMZg==","Lk9iamVjdE1hcHBlcg==","IHLDqXBvbnNl","WXU=","KCl7fQoK","LXBhcmFtZXRlcg==","xLFzxLE=","aWF6emE=","SVpFUw==","X1NVUFBMWQ==","a2l0cw==","IHJlaW5z","KGRvY3M=","JSE=","IHN5c3RlbWN0bA==","IFBzcg==","IFdlcms=","UGhpbGFkZWxwaGlh","QlJFQUs=","LmFwcGVuZFRv","KGxvbg==","QWJy","L3JlbmRlcmVy","IEVsZWFub3I=","Q0VSVA==","UGFyYW1ldGVyVmFsdWU=","JGdldA==","IOCy","IEpM","IGlnbml0ZQ==","IGLhuqFu","IENhdWw=","IGhhc3Rl","IGRvbWluZ28=","VGVzbGE=","L2NvbmZpZ3VyYXRpb24=","KGV4cGVjdA==","dXNyYQ==","IHByZWZlY3Q=","IGZyb2dz","IGFzc2lnbmFibGU=","IGludGVydmVuZWQ=","LmNob2ljZXM=","VUlTdG9yeWJvYXJkU2VndWU=","IGLDqQ==","IEzDtnM=","YWxwaGFiZXQ=","IHByZWFtYmxl","ZGJh","IGVtaXR0aW5n","Lm1vcmU=","IEJhc2Vs","KGRhdGVUaW1l","KCl9KTsK","IG5vZGVMaXN0","IEZQR0E=","d2Vs","IGxvZGFzaA==","X2F1dGhlbnRpY2F0aW9u","w7NyaW8=","KHJ1bnRpbWU=","X1NDRU5F","IGN1ZmZz","IEFkcmVzc2U=","Ojw/","X2NtZHM=","VMOqbg==","IGVqZWN0","CUVSUg==","PE8=","IEtyYW1lcg==","4oCmCg==","c29tZW9uZQ==","IENQTA==","77yN","bG9ja2luZw==","LkZvb3Rlcg==","IGFsbQ==","IEFkb2xm","KS4v","IE1hdHRoaWFz","ICIsIgo=","ZW51aXR5","IExvdmVy","IGFsaW1lbnRvcw==","cGxldHM=","w6R0emU=","KHJlY3Y=","dXJhYQ==","U1RET1VU","YW50eg==","LkZsb2F0VGVuc29y","IFJhZQ==","cGln","IHRlcnVn","IHRoZW9sb2c=","IHRheGlz","Y29tcG9zaXRl","c2hlcg==","bGVEYg==","IFJhaG1lbg==","IDst","SW5kZW50ZWQ=","IHRyb2xsaW5n","RVJJQ0FO","Z2V0RW1haWw=","X0VOQ09ERQ==","Z2V0Q2VsbA==","IFdyYXRo","KHN1aXRl","bm90RW1wdHk=","LmdldFJpZ2h0","IGJyZWF0aGFibGU=","44Gf44Gg","IHNldFRpbWU=","J29wdGlvbnM=","IHBheWxvYWRz","YXVnYQ==","ZWRt","KHdlYXRoZXI=","CXNlbQ==","KGZyb250","IHBheW91dHM=","LnNldFRleHR1cmU=","LFtdLA==","IFBhY2tz","IGNhenpv","V2l0aFBhdGg=","UHJvZw==","bW1hcw==","IGtvaw==","LkNzcw==","IGRlbGE=","QXdhcmQ=","w7xsdA==","c291cA==","KFsoJw==","b2xsaXBvcA==","LFNMT1Q=","Y2hpYQ==","IGJsYW5jbw==","T0xVVEU=","LXBsYW5l","LExpc3Q=","eGluZw==","SU1BVEU=","LW1vcnQ=","IGdyYXZpZA==","IEhhbmdpbmc=","IHNjb2Zm","Lml0ZW1JZA==","VEhFTg==","aW5mZXI=","IG1pc3BsYWNlZA==","CU1vbm8=","d2F5bmU=","IGVkZ2Vk","X25pY2s=","IE1BUlQ=","CXN0YXRlbWVudA==","IEV2ZW50QnVz","PkFib3V0","IGJ1cmdlb25pbmc=","IGNpY2xv","TE9PUA==","IGRlZnk=","IGVsZW1lbnRUeXBl","IGNvbnNlcnZhdGlzbQ==","V2ViSG9zdA==","LkRpc2FibGVk","IGNsYXA=","IEFsZWtz","cm9yaW5n","aXNzaW9uYWw=","LUJvbGQ=","SVJUSA==","Lml0ZW1WaWV3","cWluZw==","P2tleQ==","IFZlbm9t","IGFudGlk","IEZvcm1hdHRpbmc=","UVB1c2hCdXR0b24=","IEFzc2VtYmx5VGl0bGU=","X3Jlc2VydmU=","LkRpcmVjdA==","QW5pbWU=","IG1hdGVyaWFsbHk=","IGFkanVuY3Q=","LnNldFRvb2xUaXBUZXh0","bGFzc2lhbg==","KG5y","IG5pbmfDum4=","IG1pc3VuZGVyc3RhbmQ=","IEFwcGx5aW5n","X2NvbXBhdA==","IG1peGlu","IGplb3BhcmR5","0YvQstCw0LXQvA==","IGNvY2luYQ==","X1dST05H","QVRBUg==","S0Q=","IGNhdGVnb3J5TmFtZQ==","SHR0cENvbnRleHQ=","IGJ1YmI=","IGFua2xlcw==","b3dlcmluZw==","RnJhbWV3b3Jrcw==","IHNlZ3VuZG9z","LkFzc2VtYmx5","X0VudGl0eQ==","SFE=","IGZvdXJz","IGZvcmZlaXR1cmU=","dmxhbg==","LWRvbWluYXRlZA==","LWF3YXk=","SUNJRU5U","LlJlYWRCeXRl","YW1heA==","Lj0iPA==","X3Nwcml0ZXM=","IFJlbWFpbmluZw==","TE9PRA==","X3JlcXVpcmVtZW50cw==","J2FydGljbGU=","IFBvbXBlbw==","IHTDqXI=","IERyb3Bz","SG9tZUFz","SG9tZUFzVXA=","w7ph","Lm5hc2E=","X2Jpbw==","IFlvc2hp","RWxlY3Ryb25pYw==","IGpvc2U=","IGludGVsaWc=","ID8+Pjw/","PnshIQ==","X3Byb3Y=","PURC","PCEtLQo=","LWZsb2F0aW5n","eXVt","LkpNZW51SXRlbQ==","IE5hdGlvbndpZGU=","SW1wb3NzaWJsZQ==","6K+m5oOF","SmVycnk=","IGRlc2Nhcmdhcg==","7JW8","RGVjcnlwdA==","IHRlbXBlcmVk","IGVrcw==","w61jaWE=","Lmxhcmdl","IHVuZm9sZHM=","IGh2ZXI=","IEFWTA==","LnR0","4oKA","PSUu","IHRvcHBpbmdz","IHN0b3V0","IHNlbWluYWw=","eGVz","IE9VVEVS","YWRybw==","IHlvaw==","IERlcmU=","CWZyZW9wZW4=","X2xuZw==","Q2h1bmtz","LmdldE9yRWxzZQ==","KGVsbQ==","ICgpKTsKCg==","Q2VsZWJy","X2NhcGFiaWxpdHk=","IHNvY2llZGFk","IGludGltaWRhdGU=","IEJsYXplcnM=","aWd0aA==","ZW5kY29kZQ==","VUlMREVS","IEhhbm5pdHk=","IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0K","INC40YHQv9C+0LvRjNC3","IFRvb2s=","IE1vdmVk","IHByb250bw==","IE1hcnRpbnM=","RGF0YUV4Y2hhbmdl","LlBvb2w=","ZXVz","IGpvYklk","IEF4ZXM=","IGhhbXN0cmluZw==","LnJtaQ==","RGF0YVRhc2s=","IE1hZ2ljTW9jaw==","IEdBUw==","IE5hdw==","IHNuZWw=","X3NjZW5hcmlv","IGVtYWlsQWRkcmVzcw==","IE11c3M=","IHBob2VuaXg=","IGRlbnNpdGllcw==","IE1hY09T","cmVtYQ==","IHRlc3RlcnM=","KT87Cgo=","IHB1cHM=","bGFwcw==","ZGRi","L1BlYWs=","IGJhY2tzdGFnZQ==","IGJhY2tCdXR0b24=","KG5hdg==","eEFF","c3RyY3B5","aWNodGV0","IFJpZg==","4LiB4Lij","IGhvbm91cmVk","IGdyYXBwbGluZw==","VmVydGV4QnVmZmVy","LmdldEFjY291bnQ=","LU5ldw==","IG9wcHJlc3M=","IHV0dGVyZWQ=","IFVTQUdF","X0xFQVZF","X2NvbGxlY3Rpb25z","X1V0aWw=","KCIiKSk7Cg==","IHF1aWV0ZXI=","YCksCg==","IHR5cGVJZA==","IHNlcmlm","c3RhbGs=","IHByaW1hcnlTdGFnZQ==","eEVB","Ok5TTGF5b3V0","X1JC","X0FQUFM=","U0tV","KnNjYWxl","IENvdWdhcg==","CVJFVFVSTg==","aWZpw6k=","dGltaW5n","IGlkb2xz","656Y7Iqk","4oCUaWY=","KGZvcm1hdHRlcg==","IGFtYWxn","c2V0V2lkdGg=","LG1pZA==","b3JlYWw=","LlJvbGVz","IGRldmVs","IGdldEluZGV4","IHN0b29scw==","IHNub3d5","IGdyYW5kaQ==","0Y/QtdC8","aWd1aWVudGU=","0LrQvtCy","IEN1dHRlcg==","cm9zY29wZQ==","YWlyYQ==","0YPRgNGB","IHRhYmVs","IGRlZmlhbmNl","LlRvQm9vbGVhbg==","IHBlcmc=","LWNvbW11bml0eQ==","IHB1cnN1aXRz","KG1ldHJpY3M=","TXVzbGlt","IFJpeWFkaA==","IOKCuQ==","LldlYkVsZW1lbnQ=","IEhhcmRlbg==","IENvcnJ1cHRpb24=","IEFl","IFRhbm5lcg==","IGluZGVi","IENoYXJnaW5n","X1BST0Q=","IOKTmA==","IGNlbnRlclg=","dHlwaW5n","IHV4","IFRvZQ==","CWxvb3A=","Zmxv","UmVnaW9uYWw=","X2Fh","IHZpZXdwb2ludHM=","PnRoaXM=","LXJlc291cmNlcw==","IEltYW0=","IFNoaXY=","IGFuZHJh","UkVRVUlSRUQ=","IHNlZWRlZA==","dW1vbnQ=","IHRvYXN0ZXI=","IGhvbWVzY2hvb2w=","24zYsQ==","X2V4dHJhY3Rvcg==","bW9kZXM=","IE11bmRv","X2ZpcmVzdG9yZQ==","IHB1bmlzaG1lbnRz","IGJvcmVkb20=","anVyaWVz","LlNhZmU=","YW1iaXF1ZQ==","IGFkdmVyc2l0eQ==","VUxFUg==","IGFuYWxzZXg=","bW9ycGg=","IE9tbg==","KCkiPgo=","IEdJVkVO","U3o=","IG5vdW5z","IHF1YW0=","IFdpa2ltZWRpYQ==","IGR6aWV3Y3o=","LmNvbW11bmlj","Q291cmllcg==","Qm9uZA==","LmNvbW11bmljYXRpb24=","LlByZWZlcmVuY2U=","c2xpZGVEb3du","L2djYw==","IHZpYmVz","QVBJVmlldw==","IE92ZXJzaWdodA==","X3Zr","IGVtcHJlcw==","IGFyaXNlbg==","ICovKQ==","KCcoJw==","IGJ0dw==","IGNvbmV4acOzbg==","IFV6YmVr","IOyEnA==","IGltYWdlVVJM","44Kq","c3RvcHBlZA==","IFdvdWxkbg==","IENoZXc=","Z3LDqQ==","IHRydXRoZnVs","IFRyYW5zcGFyZW50","KHNlcnY=","IE1jS2F5","PXJlYWQ=","IFNhbw==","CUdyaWQ=","IGluZHVjZXM=","Lmxpc3RGaWxlcw==","IGNhcnJlcmE=","IGljb25OYW1l","IENhcmx0b24=","LkV2ZW50VHlwZQ==","IGRyYXBlZA==","X1NBTVBMRVM=","KGVzdA==","IFJ1aXo=","IGNhcHRhaW5z","IG1hZmlh","IFJhcGhhZWw=","IEdBUA==","aW1wYW4=","Y29taWM=","IG1hbnRlbg==","JEw=","IGFmdGVybWFya2V0","15c=","IENm","CXRpbGU=","QXBwU3RhdGU=","IHdob2xlc2FsZXJz","bG93ZXN0","RGVtb2NyYXRpYw==","IHBvd2VyaW5n","YXBvdA==","IENvcnRleA==","KHNpbmdsZQ==","b3BoeXNpY2Fs","LnV0Zg==","77yf44CN","IHRhcmVh","RXF1aXA=","IGtsaWs=","IHJ1YQ==","IGFWYWx1ZQ==","IE1pbmVy","IFZlZw==","YW55bA==","Q293","QGM=","X0xPQURFRA==","IEFITA==","d2FrZQ==","LkxvZ0luZm9ybWF0aW9u","KGNhdGVnb3JpZXM=","IFFVRVNUSU9O","LnVtbA==","IENyZWF0ZU1hcA==","bWVlcg==","IHJlbmNvbnRyZXI=","X3N1","IGF0bGVhc3Q=","KFByb3BlcnR5TmFtZQ==","IFlhbw==","IEhhdXB0","QmxvY2tTaXpl","IFNBQw==","IExlZ3M=","Yml0ZQ==","IGxvZ2FyaXRo","IElNZXNzYWdl","QmFja2Ryb3A=","IGdkaw==","7Jy866m0","LmV4Y2x1ZGU=","QURPUw==","LXNoaWZ0","YXRobGV0ZQ==","X2NvbWJpbmVk","IHJlYmF0ZQ==","IHBhcmQ=","IGltcGVkYW5jZQ==","cmVhdQ==","Xw0KDQo=","IGRhZ2Vu","a2VsYXM=","IGluZ3Jlc2Fy","IEJSQU5E","Lm1rZGlycw==","IHJlaWduaW5n","VGFsa2luZw==","LyoqCgo=","X1JFU09VUkNFUw==","IFBST0dNRU0=","IGRhdGFTaXpl","44Og","ZGVueQ==","SVJT","IHRlbGV2aXM=","PV8oJw==","ZWdpcw==","PD8s","IHVwc2V0dGluZw==","IHNhdWNlcw==","IHB1ZXJ0bw==","IFZvZ3Vl","aWRpbmU=","IEdyZWVud29vZA==","emlvbg==","L3F0","5bGA","Lmxhbmd1YWdlcw==","IFBsYXlib3k=","b25uZW1lbnQ=","IFBvc2l0aW9uZWQ=","IOS4uw==","IEZyaXR6","SW5pdGlhbGx5","bm9kZVZhbHVl","X1RSSUFOR0xFUw==","LWJhY2tlbmQ=","dG9JU09TdHJpbmc=","IEdvdmVybm9ycw==","WUxPTg==","Lk9SREVS","RE9J","IENoZXZyb24=","IGRlY2tpbmc=","IFNoYXJpYQ==","b3RoZXJtYWw=","RW1wdHlFbnRyaWVz","KEluaXRpYWxpemVk","ZG9yZg==","Lmx1","KFJvb20=","LlllbGxvdw==","IEFicmFt","X2xt","INC90LDQvw==","IFRIQU4=","fi1+LX4tfi0=","Lk92ZXJyaWRl","IFNWTQ==","IFN1c3BlbnNpb24=","IGFic29yYnM=","X3RyYWZmaWM=","ICI+Ig==","LmZpdHM=","IHJlaW5mb3JjaW5n","IG1veWVu","ZXJlcg==","IFJvc2Vuc3RlaW4=","IFdlc3Rvbg==","IGNvbmZpbmVz","T0xB","b3JyYWluZQ==","X0dSUA==","IHN0cmFwcGVk","IG1pbmdsZQ==","CVZr","IG5vc3RyYQ==","IGFjdHJlc3Nlcw==","IFNhbW15","bGlnbmU=","SUdITElHSFQ=","IHN0dXA=","aWN0b3J5","IGNvbnZpY3Q=","IHN1cHA=","cGVvbg==","dnJpZXI=","IyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyM=","IHRyb3R6","IG1lbHRkb3du","YXJrZXJz","LlNlbGVjdENvbW1hbmQ=","IExpYWJpbGl0eQ==","IEJlY2FtZQ==","IGx1Y2tpbHk=","INC/0L7RgA==","IHJlYXNzdXJl","IENvbnRyYXN0","IEF1ZHJleQ==","IENvbnN1bHRhbnRz","IFF1ZW50aW4=","LU93bmVk","b2NyaW4=","X1NUUklQ","IHJldGFsaQ==","IHJhbGx5aW5n","IFJlcXVlc3RDb250ZXh0","IG1hc3NhYw==","CWdy","TEVF","IGNhxYI=","IEpvYW5uYQ==","4butYQ==","aGho","IHNxbFNlc3Npb24=","xLFrbA==","Q29tcG9zZXI=","IGN1cnJlbnRQbGF5ZXI=","YWdpbmk=","IEJhcmJhcg==","IEhlbGxvV29ybGQ=","bG9vbWJlcmc=","LkhlcmU=","IGRpc2d1c3RlZA==","CQkJCQkJICAgIA==","b2t1cw==","VmV0ZXI=","IGNob3Bz","IEZPUldBUkQ=","IEVpZw==","IFBhcnRpYWxWaWV3","IGltcG9zcw==","IGNvbnNlcXVlbnRpYWw=","IFsnIw==","CWxvZ2dpbmc=","IEVsaXM=","cHJvY3M=","LDwv","X3BpbnM=","XERvY3RyaW5l","VXZz","IEdJVA==","IHRhaA==","KHJ1bGVz","Y3JlYXRlRnJvbQ==","ICctJykK","aGFuZGxpbmc=","ZXh0ZXJuYWxBY3Rpb25Db2Rl","Uk9EVUNUSU9O","Rm9yUmVzb3VyY2U=","c2J1cmc=","PFRleHRWaWV3","dGhpbmthYmxl","YW5nbGluZw==","ICJ9XA==","UFJT","QXBwcm92YWw=","IGtsaWVudA==","bm91bg==","IERpYW1vbmRz","SEc=","IFRyaWJhbA==","LnB4","IHByb3BOYW1l","IGhlbHk=","0LvQuNGH","IEJvdXRpcXVl","Iik7fQo=","L2hvc3Q=","IHN0YXR1c0Jhcg==","PkRhdGE=","IGRpc2NvbnRlbnQ=","IGZyYWls","LmVsZW1lbnRBdA==","IGVtYW5j","CWZ1bg==","YXR0bGVz","IHByb3B1bHNpb24=","IGludGVyY2hhbmdlYWJsZQ==","IFRhbWJpw6lu","IHZlbmVy","X0xPV0VS","IHBkbw==","IGRldGVyZ2VudA==","IHRhdmVybg==","VmVudWU=","Lmphc3Blcg==","eXR0","IEppaGFk","4oCZw6A=","IG1lZGlhUGxheWVy","P3A=","cGNm","YW5kb25lZA==","IHJlY2ViZXI=","T1RQ","KGlPUw==","KCckew==","UHRz","IG1hbmFnZXJpYWw=","IFR1ZA==","IFdFTEw=","b3pl","IEFudG9pbmU=","IFxcCg==","IFZlY3Q=","IFdpbWJsZWRvbg==","aXNtZXQ=","IGJvdGhlcmluZw==","aW9zaXM=","Z2V0TWV0aG9k","IGlucHV0RGF0YQ==","IEJpbmRlcg==","IGRjdA==","w6Fsbg==","X0JPTEQ=","IEp1Z2VuZA==","IEJlZ2lubmVycw==","aW9tcw==","IHJlbGVudGxlc3NseQ==","IE1vbmRheXM=","5LyY","VG9tb3Jyb3c=","IFNhbXA=","XFBlcnNpc3RlbmNl","TUFTVEVS","KHByZWRpY3Rpb25z","KG51bWVybw==","LnR3aXRjaA==","LlJlc3RyaWN0","IFpa","IE1MTQ==","LlNtYWxs","XWJ5dGU=","IFZpZXdQYWdlcg==","IEFnZW5jaWVz","IHBhcnRpY2lwYXRlcw==","IGluaXRXaXRoU3R5bGU=","JVg=","IGAs","Lk9iag==","ID8iKTsK","Q2FyZWVy","IDwlPQ==","a3Vs","Q3BwSQ==","IE11c2hyb29t","dXJhdA==","bWlh","Q2Q=","YXJkdWlubw==","IGNvdW50cnlDb2Rl","X3BsYWNlbWVudA==","KCI9PT09PT09PT09PT09PT09","LWJlbA==","QXNzZXJ0aW9ucw==","IHByw7N4aW1h","KCkiKQo=","X2Vn","U1NJUA==","dXpl","cGxhY2Vy","YW1iaWd1b3Vz","X0lOSVRJQUxJWkVS","IEhhdHM=","IEdPT0dMRQ==","IGFnaXRhdGlvbg==","KG11dGV4","SElHSA==","OiIp","IGludmFkZXJz","ICl9Cgo=","Lm1hbnVhbA==","IFNpZW1lbnM=","CUpQYW5lbA==","YmluZHVuZw==","ZWNlcmE=","L21ldA==","IMOpYw==","KHN0YXRpb24=","IHBvc2ljacOzbg==","X2lzc3Vlcw==","X2FsaWFzZXM=","X3RvcG9sb2d5","IEF1dG9kZXNr","QWNrbm93bGVk","ISpcCg==","IEZyZWlnaHQ=","IEZYTUxMb2FkZXI=","aWNoZWw=","KENoYXRDb2xvcg==","IGRpc3NvY2k=","IGFuYWxvZ3Vl","PHVzaXpl","LWV2","IHRlbmRy","PkFsbA==","IFVTRVJT","LnJlc3A=","X2ludGVncmF0aW9u","RGlzcGxheVN0eWxl","RkFJTFVSRQ==","0YfQuNGC","aWxkZWQ=","X3NlbWFwaG9yZQ==","YWNhZGVtaWM=","IHNjbGVyb3Npcw==","RmFs","LHN0","YD0=","aWZ0b24=","IHN1YnN0aXR1dGVz","IFN1cHBvcnRlcnM=","YXBwbGljYW50","KGt2","IEJlcm11ZGE=","IGRpc2NyZXBhbmNpZXM=","LlNvbGlk","d2VlbmV5","IGd1bA==","IGZpbGV0eXBl","IHJlc3VsdGF0","U2VuZGVySWQ=","IGdlem9jaHQ=","IEJlcmtzaGlyZQ==","ICgiPA==","KG1s","KHNoaWZ0","X1JFRElSRUNU","T0xPTg==","L2Jyb3dzZQ==","Ok5TTWFrZVJhbmdl","IHdhaXZl","IGV4Y2U=","IGNhdGFsb2dz","5Lmm","aWxsaW9ucw==","LkdldEN1cnJlbnRNZXRob2Q=","IGJpbGluZ3VhbA==","IENhc2NhZGVUeXBl","CVRyYW5zZm9ybQ==","X0NVU1RPTUVS","aXNpZnk=","INCx0Ls=","IFdob2V2ZXI=","IEVBUg==","IFs9Ww==","INC80L7QttC90L4=","IGphcmRpbg==","QHNob3c=","IGhlaXJz","IGFiYW5kb25tZW50","IFRyYW5zY3JpcHQ=","XV4=","OlNldFBvaW50","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAo=","IEZhY3Rpb24=","KGVudGl0aWVz","ZmFjdGlvbg==","bXR4","X3JlY2FsbA==","Lk5VTEw=","Lm9wdGlvbmFs","KHByZWRpY3Rpb24=","QUdFTlQ=","IPCfmIA=","4oCZeQ==","4oCZdXRpbA==","IGFuZ3N0","LkV4cGVyaW1lbnRhbA==","aG9vdA==","YXN5YXJhaw==","YXV0b3BsYXk=","IFNwbGFzaFNjcmVlbg==","IGhlY3RpYw==","IG1ldGljdWxvdXNseQ==","IGNvbWVy","S2VpdGg=","IGZyYXNl","X1VOSVFVRQ==","Lk1hZ2VudGE=","KE1heA==","IHNjYWxlWQ==","IHB1dHQ=","KElG","IEFQUExF","UG9ybm8=","LmFkZENlbGw=","IG1vbHQ=","Y2hpbXA=","IGxlZ2dpbmdz","IGZsb3A=","4oCZaHVp","UlRPUw==","L3NwYW4=","LmJlZA==","LkxvZ2lj","IHVudHJhbnNsYXRlZA==","Q0xFQVI=","O2xlZnQ=","IEJGUw==","LWdyb3Vwcw==","dG9vaw==","X2FjY2VwdGVk","IGNhc2hpZXI=","ZXZlbnRJZA==","IGRvd25ncmFkZQ==","CQkJCQkJCQkJCQkK","0LDQvdC40Y4=","w6RuZGU=","IGNvdW5jaWxsb3I=","IGRyZWQ=","ZFQ=","V1JBUFBFUg==","Lm9s","5LiA6aG1","TUVB","IGtpbmV0aWNz","IGptcA==","X2ZsaWdodA==","RmVhcg==","IENoYW5lbA==","X21pZ3JhdGlvbg==","aGRs","ZXJlcXVpc2l0ZQ==","LnJhcg==","LU9uZQ==","IHNoZXBoZXJk","LmVhc2luZw==","KGRlc2NyaXB0b3I=","IHN1YnRvdGFs","44OT","Q29tcGlsZWQ=","IENvbHQ=","ZGxl","L21vY2s=","KXJvdw==","IHJlc2V0dA==","dGVybw==","IGFlcm9iaWM=","LmludHJv","IGNoZWNrYm94ZXM=","IE1jQ2FydG5leQ==","IENseWRl","77yM5bm2","Y29vbGRvd24=","LWluc3RhZ3JhbQ==","IE1QRw==","IExlaXN1cmU=","IG5hd2V0","IE5YVA==","UmVndWxhckV4cHJlc3Npb24=","IHJhdmU=","QklMTA==","IGJhcnRlbmRlcg==","RW5sYXJnZQ==","IHZhaXM=","IDoKCgoK","LkVuZHBvaW50","ICIsDQo=","fX0iPnt7JA==","dHJlZXM=","LmVuZw==","KmxvZw==","OltdLAo=","IGJhdHRhbGlvbg==","U3ViamVjdHM=","IGV4cG9zaXRpb24=","IFRvYXN0cg==","IHRvcExldmVs","IENFTA==","IGd1YmVybg==","dW5zdWJzY3JpYmU=","Y29uYQ==","X2FwcHJveA==","VFo=","IFRyZWVTZXQ=","LmNvbW11bml0eQ==","IG5hcnJvd2Vy","KEV4cGVjdGVk","Q2xy","IGdvcmU=","IGFjcXVpdHRlZA==","IEVVUk8=","G1s=","IHJlcHVibGljYW4=","IGF1dG9iaW9ncmFwaHk=","X2Zkcw==","Q29sbGFwc2Vk","IA0KIA0K","LXBpbGxz","TUJFRA==","IGlOZEV4","IHJlc3BvbnNlVHlwZQ==","Z2xmdw==","LXR1cm5lZA==","5Y+R5biD","CUJvb2xlYW4=","Lk9y","aW5pYQ==","IGhvdmVyZWQ=","IHNvcnRlcg==","IE5o","IEV4ZXJjaXNlcw==","bGVtZW50cw==","aWRvbg==","VG9l","IHLDqWbDqQ==","U1NGV29ya2Jvb2s=","IG9yZ2FuaXNlcnM=","IHJlc3VsdE1hcA==","X0hPUg==","RG9k","TG9jYWxTdG9yYWdl","IGpzb25SZXNwb25zZQ==","QXV0aFNlcnZpY2U=","IHNtZQ==","ZW1icm9z","IGxvYmJ5aXN0","b2d1aQ==","LnNwaW4=","IENvcnJlY3Rpb25z","X1JBRA==","IExTTQ==","KGN1cnJlbmN5","IOaA","IHByZWZldGNo","LkhlYWQ=","LXJlYWRlcg==","IFJveg==","CW1vdXNl","IFRMQw==","IFFUYWJsZVdpZGdldEl0ZW0=","IFNUT1JBR0U=","YW5uZWVy","IOyXkA==","YWNlbg==","U1g=","SW1hZ2VSZWxhdGlvbg==","IHJlc3VyZ2VuY2U=","aXp6eQ==","aWxvZ3Vl","SVZBTA==","IHNtYWNr","cnJoYQ==","KFBBUkFN","IUk=","IE1lY2g=","IElNYXBwZXI=","IGdpc3Q=","IFBPRA==","dm9yZQ==","dWxhw6fDo28=","ICwt","IGludm9sdW50YXJ5","UVJT","PXRpdGxl","IEJpb20=","IFNoZWxsZXk=","IENTUA==","UGVz","ZHJvcHM=","INGD0YHQv9C10Yg=","ZGl2ZXM=","IVsK","IExlYXN0","IGtha28=","IE1vZGVsbw==","IGZ1bmN0aW9uTmFtZQ==","IGNob2tpbmc=","IGRlZm9ybWF0aW9u","JywnJyk7Cg==","Y2HDp8Ojbw==","IHNxdWlycmVs","c2V0QmFja2dyb3VuZA==","QnJva2Vu","cG9saXQ=","Tm9uY2U=","IGtleWVk","TWVzaFBybw==","LnVzZXJJbnRlcmFjdGlvbkVuYWJsZWQ=","IGZsdXNoaW5n","IGJwcA==","IEFuZ2xpYw==","VHJvdQ==","IFdhbHRlcnM=","IHN0dXR0ZXI=","SGlw","X3dhcg==","aXZlbWVudA==","Q29ybg==","IHVuZHVl","YXBhdGthbg==","IG1pbmRlbg==","c2lnbmlmaWNhbnQ=","KHF1YW50aXR5","JGluc2VydA==","IEFMRVJU","LlVuaWNvZGU=","aWhu","XTo9","IHBpbk1vZGU=","IGZyYWlz","aW50ZXJwcmV0ZXI=","J2FjdGlvbg==","IGJsZWliZW4=","obQ=","cm93c2Vycw==","R0lU","X0RJUlM=","Rm9yZXZlcg==","IFBkZlBDZWxs","fG0=","LnNldEhlaWdodA==","IGZvcmVhcm0=","IGJhdHRsZWdyb3VuZA==","INC/0L7RgdC70LXQtA==","IEhhdGg=","IEF1dGhvcml6ZWQ=","IGNvbmZlcnJlZA==","IEJPVFRPTQ==","LmdldEZsb2F0","b2dyYXBoZWQ=","YXJkeQ==","IHNlcnZpw6dv","b3RveGlj","L2F1dGhlbnRpY2F0aW9u","IHJlcHLDqXNlbnQ=","IGNvbXBsZXhpb24=","CUNvbW1vbg==","X2Jo","V2hvbGU=","SW1hZ2VEYXRh","IHRpbms=","ZXF1YWxUbw==","IFRIUg==","IGRlbHRhcw==","IEFHRQ==","aXphZG9y","YWRtaW5pc3RyYXRpb24=","cXVldHM=","X2ZpbGxlZA==","IEjDpA==","YWxsb2Nh","IEJvb25l","CWxjZA==","Rm9sZGVyUGF0aA==","LlJhaXNl","XyN7","ZXJ0aW5v","IFRocm9uZQ==","4K6/","b3hldGluZQ==","cHJheQ==","IGRpbGlnZW50bHk=","IEFyY2hpZQ==","Lm11bHRpcGFydA==","IHNlbw==","LmdldFByb2plY3Q=","IHBhag==","Y2xlcm9zaXM=","YW1lcm9u","IHRvdXJlZA==","IG5pa2U=","IEJha2VyeQ==","LHBhcmVudA==","X1RFTQ==","U3BhdGlhbA==","bGFwcGluZw==","UHJvZHVjZXNSZXNwb25zZVR5cGU=","KGJhbGFuY2U=","SHVuZHJlZHM=","LXRlcm1pbmFs","IkRv","Q29udGVudFNpemU=","IGJiYw==","IGTDqWNvdXZyaXI=","dXRpbHVz","LnVuZG8=","LG91dHB1dA==","Z3JvdXBOYW1l","JG1heA==","IEFsbGE=","INC60LDRgNGC","Lk9ORQ==","X2RlY2lzaW9u","RUVFRQ==","IHhPZmZzZXQ=","56o=","IHJ1bmF3YXk=","IGhhbmRqb2I=","IGdlbml0YWxz","KGpUZXh0RmllbGQ=","LnJhZGlhbnM=","IFBhZHJlcw==","ZGVwZW5kZW5jZQ==","IHN3YWxsb3dpbmc=","cm90ZWlu","IGZsZWV0cw==","IGNhcmF0dGVy","KGNhbg==","IEZsb3JhbA==","X01zZw==","IGRlY2xhcmFjacOzbg==","bHNydQ==","c2Nob29scw==","IGRlbGVnYXRlZA==","IFBlbmFs","IENoZXJu","U21hcnRQb2ludGVy","c3Rvcnlib29r","IE55bG9u","5oCd","X0xFU1M=","L2FkZHJlc3M=","IENPUlM=","IOydtOuvuA==","IG1vZGE=","bWRw","IGRlcmJ5","IFBoYXJtYWNldXRpY2Fscw==","IGV5ZWQ=","X2NwdXM=","6KaL","fHwK","Lm1hZw==","KFFM","IENpdmlsaXphdGlvbg==","6Yw=","X0RlcA==","IHN3ZWFyaW5n","IFNob3J0cw==","dWViYXM=","IGRlbGluZQ==","IEFkdmlzb3Jz","IOyeiOuLpA==","X0ZJTkU=","fSk6","LGFzc2lnbg==","IFBDSWU=","e3t7","U2Np","IGFtYm9z","aWxlZW4=","IHR1bmVy","IHBhcmFtTmFtZQ==","LHRvdGFs","KExvY2FsRGF0ZQ==","IHNwcA==","IGVycm9yZXM=","IEhlbHBpbmc=","X21lcmdlZA==","LnRpbWVTY2FsZQ==","X0VMRU0=","X1NPTA==","IGF2ZW50","PGQ=","SnVuaW9y","CWJhcg==","Lmx2","IOy5","PXd4","IG1pcmFjdWxvdXM=","IFJhbmRvbUZvcmVzdA==","IEZyYW5rZW4=","YGAs","KEluaXRpYWxpemVkVHlwZUluZm8=","IHN1cGVyaGVyb2Vz","IGFuc2libGU=","X1R5cGVEZWY=","IFBlcm0=","T0xFUg==","R3Jhbg==","LW5vdGlmaWNhdGlvbg==","IGtheg==","IGV4aGlsYXI=","c2VydGVy","IHN0b3JlZnJvbnQ=","X2VuZHM=","IyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMK","CWdpdA==","RFNQ","Q0hBSU4=","rLQ=","SW52YWxpZE9wZXJhdGlvbkV4Y2VwdGlvbg==","IFNseQ==","77yaPA==","QnJpdGFpbg==","L3NsaWRlcg==","IHptcQ==","IGJhag==","YnJlZA==","LlZBTFVF","IGdyaWV2aW5n","IHBvcm7DtHM=","aWd1YQ==","SU5DTFVERUQ=","V2FrZQ==","Y2Jk","IE1vbmdvbGlh","aW52aXNpYmxl","IGNvcnJlY3RpdmU=","IGNlbnRlcnBpZWNl","Q2F1Z2h0","IGthcmFrdGVy","YWxtw7Y=","IGJlbHVt","IGFkam9pbmluZw==","Pygi","IFZpc3VhbGl6YXRpb24=","a2tl","aWZpY2Fkb3M=","c3Bk","X0NCQw==","LUxhbmd1YWdl","IHN0aWw=","b3JldGljYWw=","KGNvbXBsZXRpb24=","IFZlcmbDvGd1bmc=","X1RyZWU=","cmlwcGxpbmc=","LlJlbW92ZUVtcHR5RW50cmllcw==","IFRBWA==","CUNvZGU=","5YuV","dXJnYQ==","INGD0LbQtQ==","IGFpZGVy","IFByZXNjb3R0","IGZpbGFtZW50","IC0tLS0tLS0tLS0tLS0tLS0tLS0t","dGhlcm9z","0LXRgNCw","ZGViaWFu","w6RobA==","b2xhaA==","X1VOSVRT","QXJr","TW91bnRlZA==","LlRyaW1TcGFjZQ==","LmdldE51bWJlcg==","X2VvZg==","Lm5y","IFNIQVJFUw==","aWxhdGVy","IHdpY2h0","X2NvbXBhcmlzb24=","ICki","Y2xpbmljYWw=","IFRFbnRpdHk=","dmVuZXM=","LmdldFByb3BlcnRpZXM=","IHJlbGF0","IGFubm95YW5jZQ==","YmVi","IGFuZXN0aGVzaWE=","X2ludGVydmFscw==","X2Zo","IHN1ZG9rdQ==","IGRpc2Vu","Y29ubmVjdGluZw==","IG9h","IOKWkQ==","WkY=","IGN1eg==","U09FVkVS","IE3DtmdsaWNoa2VpdA==","Y2hhcnRlZA==","IGhhc2hlcg==","IEtlZXBz","QUVB","CWxvZ3J1cw==","CU5hbWVzcGFjZQ==","b3J0aG8=","JGFjdGlvbg==","IFJvYw==","Jyk7Pz4i","IFBST1Q=","QGFwaQ==","Y2hzZWw=","L2dpZg==","KEhhbmRsZQ==","IGFudW5jaQ==","L3B5","aW52YWxpZGF0ZQ==","IE1FUA==","dGVtcw==","O10v","6IM=","6L+Q","IHRhY28=","QURW","aHBw","QnV0dG9uQ2xpY2s=","IGJyaW5nZW4=","IFRJTUVPVVQ=","IGFzdHJvbG9neQ==","ZGF0ZUZvcm1hdA==","T0dSQVBI","RmlsZVN0cmVhbQ==","5a6h5qC4","LkNvbW0=","J2I=","IEdFVEdMT0JBTA==","ZWF0aW5n","YW5kZXN0","IFNFVFVQ","IEFkdmFuY2Vz","LnNjcm9sbEhlaWdodA==","QVpF","ZW5kdGltZQ==","d2VhdGhlcm1hcA==","IE1hbmdv","IFJJUA==","IGl0ZXJhdG9ycw==","IGNvYXg=","IOWbvg==","PG1haW4=","cm1z","cGNi","IHZhY2NpbmF0aW9ucw==","IGRpc2FncmVlbWVudHM=","CWV2ZW50cw==","PExvY2F0aW9u","Lk1lYXN1cmU=","IHF1ZWRh","IHNpZ25hbGxpbmc=","IGRlZ3JhZGVk","IEFtZWxpYQ==","LWNvbmZpZGVuY2U=","ZGJOYW1l","X2luYWN0aXZl","b25hdGlvbg==","IHBlcmlwaGVyYWxz","5qC3","U1VQRVI=","J1I=","LndheQ==","UExBSU4=","IEVuZ2Vs","cmVsYXk=","IGRlYmlkbw==","IFRyb3Rza3k=","6Iw=","INCw0LTRgNC10YE=","CXVzZXJz","ZXRjaHVw","dGVw","IG5ld1Bvc2l0aW9u","IHdhaXZlcnM=","ZWRpY2luZQ==","IHRhbmdnYWw=","IGFtbW9uaWE=","LWRldA==","L2V4ZWM=","KHBhZGRpbmc=","IFNob3BwaW5nQ2FydA==","IFByaW50Zg==","SGFuZGxlZA==","IE5BTUVT","KGNsb2Nr","IHt9Og==","IHNpbXM=","IFRlYXJz","IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0=","X0NBTk5PVA==","TEVHUk8=","LlNldFBhcmVudA==","5YW25Lit","IGVycmV1cg==","aXBp","PEV4cHJlc3Npb24=","LnRpbWVsaW5l","ICdfJyw=","IGNvYXRpbmdz","IHVzZUZvcm0=","LnRr","IEZlYXN0","LlNL","w6RzZW50","Y2h3aXR6","IGludmVudGl2ZQ==","IE1laQ==","IHZlc3RpYg==","IG7DpGNoc3Rlbg==","L2JpZw==","IHJldHJlYXRlZA==","IHByb3BhbmU=","dmljdGlt","QWt0","IFByZXNlcnZhdGlvbg==","IFBpcw==","X1NIQURPVw==","IHByaWNlbGVzcw==","csOzZA==","b2JibGVk","IHJvbGVOYW1l","IEdEUFI=","ICciLA==","Q2VudHJl","QXJjaGl0ZWN0dXJl","Q3BwQ2xhc3M=","IG1hdHRyZXNzZXM=","IGJlZXA=","IERhbWlhbg==","5p2D6ZmQ","YmV0dA==","X2Flcw==","KGNlbGxz","IOuwsOyXtA==","IGJpdG1hc2s=","Y291bGRu","LW5vdw==","IGlubm92YXRl","IGhhY2Vu","IEx5b25z","dGhpY2tuZXNz","IHdoaXN0bGVibG93ZXI=","JGZpbHRlcg==","IGV1bGVy","IEhhcm0=","IGxlZHM=","IEtlbHZpbg==","LnF1aWNr","IEzDs3Bleg==","cmV2ZQ==","IG5pZ2VyaWE=","IGp5bGxhbmQ=","LmVtcHR5TGlzdA==","IHVuc2V0dGxpbmc=","dXNiYW5k","IHRyYWNrZXJz","PVwiIjsK","IGNvbnRpbnVh","IE51bWVybw==","ZW5kb24=","IEdlcnJ5","LlRPRE8=","UmVwZWF0ZWQ=","IFNlcmVuYQ==","0LjQvNCw0LvRjA==","cHJvZmls","INCy0YHQtdGF","QGFkbWlu","LkxpbmVz","IHRyYW5zbWlzc2lvbnM=","IGNq","YW7Dp2E=","5Yig6Zmk5oiQ5Yqf","IGdldE1lbnVJbmZsYXRlcg==","dWZyZXE=","IE1hdGhlbWF0aWNhbA==","TmF2aWdhdG9yTW92ZQ==","IGZ3ZA==","dW5pdHRlc3Q=","IHN5bnRoZXNpemVk","IGNyZWVk","KEZyYW1l","cHN5Y2g=","dm9k","dUM=","4bqndQ==","IOKAnOKApg==","IGtyYXQ=","ZHJhd2FibGU=","w6ZyZQ==","PXRvcA==","KExvZ2dlcg==","RXJyb3JFeGNlcHRpb24=","YWlzYWw=","L3dz","dWxsZWQ=","QVJJTkc=","IG5JbmRleA==","IGludGVybmFscw==","IGVmZmljaWVuY2llcw==","ICNA","X2JyaWdodG5lc3M=","X25vcm1hbHM=","IFN0b3V0","IHVudmVpbA==","IFNob3Rz","LWNvbXBhbnk=","X2VsdA==","KGRsbGV4cG9ydA==","IHByb2R1Y2Npw7Nu","Q2lzY28=","Qmxha2U=","LW1vdXRo","UGVhcg==","INC00L7RgdGC0YPQvw==","IEpBQ0s=","IO2YuA==","IHN0b3B3b3Jkcw==","IFRlc3M=","IHBvc3Rl","cmF6aWVy","6K0=","TWVzc2FnaW5n","t+aWsA==","VGFtYmFo","IG5hcmNvdGljcw==","IGNhbXBlcg==","IHRyaXBvZA==","IGdsRW5k","IGdpb2M=","Y29tYmU=","VXNlclJvbGU=","VWw=","RXF1aXZhbGVudA==","IGdub21l","IEZ1w58=","cGFja2FnZU5hbWU=","X3Vl","RGlzY2xvc3VyZQ==","YW1hdGU=","X3RlbnNvcnM=","IEthdGhyeW4=","X0Jhcg==","VGhyZWFkSWQ=","IHZlcmlmaWNh","LmFzc2VydE51bGw=","IE9kaW4=","YsOp","INGB0L7RgdGC","IGp0","LlNlbGVjdGVkSXRlbXM=","IGFjdGlvbmFibGU=","IFJlZ2FyZHM=","aGVr","Om51bWVs","LEdM","IFBIT05F","CURlZmF1bHQ=","IGVsYXN0","IGJlY2s=","PWNyZWF0ZQ==","OicK","YXJodXM=","bW9kaWZpZXJz","aW50cHRy","IHByb3Bpbw==","77yI56yR","IHJlcXVlc3RPcHRpb25z","IGltcGxpYw==","IGR1cm8=","IFBDUw==","RGVsaW1pdGVy","KGxvZ2l0cw==","LkVWVA==","V2l0aENvbnRleHQ=","IG9sdHJl","X0VYRUNVVEU=","b2xpY2l0ZWQ=","X0VudGVy","L2Zyb20=","INGB0LvQvtCy","IEhvcm0=","dWliTW9kYWw=","X0lORklOSVRZ","77yM44CK","VUdJTlM=","T05HTA==","LGJ1Zg==","IHBvdXJyYWl0","cGo=","KGN1YmU=","IHVnbA==","IFNhd3llcg==","SUZFU1Q=","QXBpcw==","IENvcmVEYXRh","IHNlc2FtZQ==","LnB0aA==","LmdldFVzZXJOYW1l","Y2FzZWQ=","IHZhbmlzaA==","X0FwaQ==","Ly86","L25vbg==","LmRvY2tlcg==","LnNp","YWxlcnRz","IGludGVzdGluZQ==","cGFydGljaXBhbnRz","LXZpc2libGU=","ZW1zcA==","bXVl","X3B2","IENyaQ==","b2dyYQ==","X2V4cGVyaWVuY2U=","IElOVEVSVkFM","X3JlZ3Jlc3Npb24=","7ZWY7IS47JqU","ZW5kZXJlY28=","bGF0YWJsZQ==","LmxvY2FsdGltZQ==","IEJJVFM=","IEZvbGRpbmc=","CSAJCQ==","w6lzZQ==","LWJlYXJpbmc=","IFhQQVI=","T1BTSVM=","J14kJyw=","aW5jbA==","IE9wcmFo","IGJvb3Rocw==","IFJvaGluZw==","LkJvcmRlclNpZGU=","YXRhdHlwZQ==","Q3JlYXRlZEJ5","LOKAmeKAnQ==","ZG9jdHJpbmU=","IGJyZWF0aGVk","X2JlZw==","IGFmZmxpY3RlZA==","TW91bnRhaW4=","QmxvYw==","IHJ1aW5pbmc=","LkFubm90YXRpb25z","CWludGVudA==","IHN0YXRpY2FsbHk=","X1V0aWxz","TGF1bmNoZXI=","Om5vcm1hbA==","IHVzZXJpbmZv","LUp1bA==","S3lsZQ==","LlJlYWRVSW50","KHVybHM=","L2lm","bWl0dGVs","YmNt","QE1vZHVsZQ==","IENvbnN0YW50aW4=","IGJq","ZXJuYXV0","PHI=","IE1lbnRvcg==","IGVncmV0","X29hdXRo","LkRhdGFDb250ZXh0","X0NMSQ==","KENvbnN0cnVjdG9y","IHNldFBvc2l0aW9u","cmVzYXI=","ZW50aW5n","4Li54Lil","VHJhbnNtaXNzaW9u","IG5vdGlmeURhdGFTZXRDaGFuZ2Vk","IE1vdXNlQnV0dG9u","ICoi","ICAgICAgICAgICAgICAgDQo=","IEx5ZGlh","IHN3b3Jl","IHBsYXRhZm9ybWE=","CWJ1dHRvbnM=","IHNwcnVuZw==","KFRva2VuVHlwZQ==","Q3g=","QXF1","CQkJCQkJCQkJICA=","CUFERA==","dWlkcw==","IOCkrg==","IOaXtumXtA==","LkFjdGlvbkJhcg==","IG9jdXI=","IGlsbWE=","LW5ldXRyYWw=","ICIuIjsK","CVNpemU=","UGllY2Vz","IHN0aWY=","ICI9Iiw=","IEVxdWl2YWxlbnQ=","IGlnZW4=","ZGZk","X3RoaWNrbmVzcw==","X3JlYWRhYmxl","L2ZhbHNl","IHRvb2x0aXBz","b3BsYXN0","aHVh","aGFuZGxlUmVxdWVzdA==","LkxBWlk=","PFVGdW5jdGlvbg==","aW1tdXRhYmxl","aWhpbGF0aW9u","IG9ydGhvZG94","LnBvcHVsYXRl","IHZlcmE=","IG9iZXI=","c2FuZA==","dmln","Q29uZmVyZW5jZQ==","KENvbGxpc2lvbg==","L2F1dG8=","IFNvbGlkQ29sb3JCcnVzaA==","Kic=","LGFkZHJlc3M=","IHN3ZWV0aGVhcnQ=","w6F0aWNhcw==","YW5pbmU=","X3BheW1lbnRz","IHVubWlzdA==","IHRydW1wZXQ=","QkFM","IGZpbGVJZA==","bmllanM=","QURG","IG1uaXN0","IEZlaGxlcg==","44CRLA==","Q2hhcmFjdGVyU2V0","IFZhbmNl","SW5zZXJ0ZWQ=","IGRvd253YXJkcw==","IHJvdGF0aW9uYWw=","IGVuY291bnRlcmluZw==","TUJQcm9ncmVzc0hVRA==","L1N5c3RlbQ==","L3BvcA==","IH0pDQoNCg==","IC4nPC8=","77yJDQo=","IGRjYw==","YXN5YXJha2F0","IHByaW5jaXBhbGx5","5a6a5LmJ","KGNob2ljZXM=","LnBhZ2luYXRvcg==","IHVwYnJpbmdpbmc=","IGRvdGVudg==","KCkpLw==","IFRBUw==","Z2Nk","X2ludGY=","Lm11dGV4","cHJlc3Rhc2hvcA==","IGLDtnI=","ZGFw","X2RlbWFuZA==","XERlc2t0b3A=","dG9GbG9hdA==","IHNlZ3JlZ2F0ZWQ=","IGNsaW1hdGVz","Lk9yZGVyQnlEZXNjZW5kaW5n","KCcsJyk=","UHVsbFBhcnNlcg==","QXRvbXM=","IGJlbsO2dA==","IGhvbWVy","YW50dQ==","SXNFbXB0eQ==","IEJlZ2lucw==","PlNob3c=","IFN1cHBsZW1lbnRz","b2NjdXM=","IGRvcGU=","LmJvb2tpbmc=","IEFsbWlnaHR5","W2VkZ2U=","IEViYXk=","X3JhY2U=","RnJvemVu","X3RyYXZlbA==","IHBhc3RvcnM=","X1NVUkZBQ0U=","X2dlbnJl","X0hPVA==","LGRpbQ==","VGJs","bXRz","cHJlZGljdGlvbnM=","X2N1bQ==","IGRldGFsbGVz","LXRyYW5zaXRpb25hbA==","IHdha2V1cA==","UGVyc29ucw==","LmNvbG9yYmFy","U3RyYW5nZQ==","2K/Zhw==","Jlc=","IEFSUA==","X1NPRlQ=","X2RyYWZ0","SVZB","IGdyb3A=","IGxpZWJl","IGlpZA==","2KfYsw==","Y2FuZGlkYXRlcw==","Z2V0QXM=","PV8oIg==","LkdldE9yZGluYWw=","KSk9PQ==","YW5ub3RhdGU=","IEx1bWlh","SVJNV0FSRQ==","X09QRU5HTA==","KGZvcm1EYXRh","ZW50aW1lcw==","IHdhdGVyc2hlZA==","INCx0LXQtw==","IGZsb3BweQ==","VG93YXJkcw==","KGNvbXBhY3Q=","RERE","e24=","IHBva2luZw==","QG0=","IHJlY3ljbA==","c3RydWN0b3Jz","a2V5Q29kZQ==","IHZlaGVtZW50","IGxpdHJl","IEJJTkQ=","IEZyYW5jb2lz","IG51ZGl0eQ==","IGlzaXpl","CW9uQ2xpY2s=","eXN0YWxz","IGdldFN5c3RlbVNlcnZpY2U=","V2ViUmVzcG9uc2U=","ZmlsZXNpemU=","IENobG9y","Y29saQ==","X3NlYXQ=","LkFkZEluUGFyYW1ldGVy","KXRlc3Q=","IHF1ZXM=","IGNhdXRpb3VzbHk=","ImRpc3BsYXk=","LnNodG1s","IEdVSURBVEE=","KCIqKg==","IGdyYW5kZGF1Z2h0ZXI=","IEFzc2VtYmx5RGVzY3JpcHRpb24=","Rm9yRWFjaA==","V2lsc29u","LGVn","IGJlbGlldmFibGU=","IGNyb3Nzd29yZA==","bG9iYmVy","IFN0YXBsZXM=","KHNoaXA=","IHdhZ2Vk","IEJvbHNoZXZpaw==","LkFkZEl0ZW0=","KEZpbHRlcg==","X0FCQw==","IGBc","0L7RiQ==","IG1ib3g=","IE5lcw==","IEFWQ2FwdHVyZQ==","IGNvbmhl","IElOVEVSTkFUSU9OQUw=","b3Nn","IF0pLT4=","U0tUT1A=","IGtpZGQ=","IFNTVA==","IOWFsw==","IEV0aG5pYw==","RVJTSEVZ","IG11bHRpYw==","X01VTA==","IEZpbmRPYmplY3RPZlR5cGU=","IEV4cGVuc2Vz","Z2V0TW9ja0J1aWxkZXI=","LWd1aWRl","J0w=","IOeZuw==","IHJhag==","IEJsYW5jaA==","IEFkZHJlc3Nlcw==","Tng=","IElzbGFtYWJhZA==","0L7QutGD0LzQtdC90YI=","IEJlYXZlcg==","LnN0dWRlbnRz","IEFzeW5jQ2FsbGJhY2s=","c2hlZXRz","ZWNhc3Q=","IEZ1bmRhbWVudGFs","IHZlcmRpZW5lbg==","IGV4YWNlcmJhdGVk","IE1vZGVyYXRvcg==","Q0NDQ0ND","IHRpbWVvdXRz","IHN1YmRpdmlzaW9ucw==","IGNvbXByb21pc2Vz","dXp6ZXI=","fSwkew==","X2Jsb2NraW5n","ZXJtYW5u","IE1pa2hhaWw=","IFNlbGJzdA==","6ZSA","LnNob3dz","5LiH5YWD","IFRm","IElIdHRwQWN0aW9uUmVzdWx0","IElFbnRpdHk=","IGlx","Rk1M","b2RlbQ==","c3Rw","dWN0aW9ucw==","LmZhdm9yaXRl","LkdldERpcmVjdG9yeU5hbWU=","IGdyYWM=","IHhtbERvYw==","X3B1c2hCdXR0b24=","Y29sbGVjdG9y","PWV4cGxvZGU=","IGRlc3RpbmF0aW9uVmlld0NvbnRyb2xsZXI=","IFNlcmlhbGl6ZWQ=","Om1lc3NhZ2U=","IENDQw==","X3JlY292ZXJ5","LWtpdA==","c2hpbWE=","cm90Y2g=","IGB9Cg==","X3N1cHA=","VGFibGE=","0YDQtdC00LXQuw==","R3RrV2lkZ2V0","IFNJTVBMRQ==","LnBoaQ==","IExpYmVydGllcw==","LS1b","IHVudmVpbGluZw==","IGV4dGVudHM=","YmNk","IGh2YWQ=","CWNy","LnJlYWRkaXI=","IHJlYWRhYmlsaXR5","IGRpc21pc3Npbmc=","Q2FtYg==","IGNhc3VhbHR5","IElQVg==","bWl0ZXM=","IHB1cmlmaWVk","Lk9yaWVudGF0aW9u","IGxq","aW11bGF0b3I=","ZnJhbQ==","L2xvY2F0aW9u","IGNvbW11bmljYXRlcw==","OlVJQWxlcnQ=","L3NvY2lhbA==","ZWx5bg==","REVO","INee","IGJlZm9yZVNlbmQ=","IFVudGVycw==","JykuIg==","ICcnKTs=","LndyaXRlT2JqZWN0","KGdyYW1tYXJBY2Nlc3M=","IEFwcGxpY2F0aW9uQ29udGV4dA==","QnlVc2VybmFtZQ==","IHNraXBz","IGZpbGhv","IHZpZXV4","IG1SZWN5Y2xlclZpZXc=","IGFyb3VzZWQ=","Lm93bA==","IGN1cmxlZA==","L2NhbGxiYWNr","KCc6Jylb","IGludW5k","IGJyZWFrcG9pbnRz","LWV2ZW4=","LnN0ZW0=","IGRlcm9n","IG5lcA==","IENvbXBsZXRhYmxlRnV0dXJl","LUxpbmU=","Lyov","LkhleA==","IHJ1c3Nl","IGJpZg==","IEZvbmQ=","aWVjdA==","IGFsbG90dGVk","ZGV0ZWN0b3I=","IC8KCg==","ZW1vZGU=","dWhl","dWlzc2U=","IEZJWEVE","bWF0aHJt","IHVuc3Vz","IEF1dG9z","IC4uLi4uLi4uLi4=","LnRyYXZlbA==","TkFW","IGxlc2Jpc2s=","IMO8emVy","IGNsZXJpYw==","IGxpbWl0bGVzcw==","b2x1Y2lvbg==","IG5lY2tsaW5l","IGRyaWZ0ZWQ=","IFJlbGlhYmxl","IENhcnk=","IHRlbsOtYQ==","ID8+Jw==","L2NvbW1vbnM=","IEdNQw==","X05QQw==","IEJsaXNz","IEJ1cm1h","5ZCM5pe2","KGRlcGVuZA==","LXN1aXRl","CXN0YWdl","RG91Zw==","aWRlbnRpZmljYXRpb24=","X3Jlc29sdmVy","QmVnYW4=","W3RocmVhZA==","IDsKCgo=","TlRTVEFUVVM=","IGRpc29iZWQ=","fGg=","IGFjY3VtdWxhdGluZw==","ICIsIik7Cg==","dVBhcmFt","LmJpbGw=","cml0Y2g=","Q3JpbWU=","0LXRgdGM","IFJlbWFpbg==","54Sh5paZ","X1RIQVQ=","YCJdCg==","LnN0YW1w","IHBhcmFub3JtYWw=","IE1QQw==","InVybHM=","IEVzdGF0ZXM=","VG9Gcm9udA==","VGhpcnR5","QmV0aA==","J3U=","IOy9lOuTnA==","VUZBQ1Q=","IENyb20=","IE1pc3Rlcg==","IEVRVUFM","ZW5oZWlt","IC8vew==","X3dhcw==","IGJvdXF1ZXQ=","IE1pZGRsZXRvbg==","aXp1","X2hhc2hlcw==","IGhlbm5l","IExJTlVY","CVNlcnZpY2U=","IFRBTQ==","IGBf","IEFUQQ==","IGRhbmdsaW5n","cGFpbg==","X0JPVU5EUw==","cHJvZ3JhbW1pbmc=","IGN1cnJlbnRJdGVt","IGJlc2ll","ZW1ibGU=","KGNhbGM=","LlNraW4=","IHBlYXJscw==","IEJ1cmI=","LW1vbml0b3I=","L2Nz","Zmly","KHZlcg==","W2FyZ3M=","w7xja2Vu","ZXBhcmF0b3I=","RG91","LkVudA==","IEVTQQ==","KGZt","dG9uZXM=","IFphYw==","a3NhbQ==","4oCZYWxs","IE1TUw==","IkRvbg==","IHNpbXBsZXg=","IENvbnNjaW91cw==","IEFwcGxpY2FudA==","cGVsbGllcg==","IHBlZGVzdGFs","JGh0dHA=","IEF2YQ==","LkNH","IGludMOpcmVzcw==","IEludGVncmFs","cmVkZQ==","PWZvcm1hdA==","LlBhdGhz","X1BBUlRJVElPTg==","IHNlaA==","IFF1YW5kbw==","WW91dHViZQ==","LnB1dFRleHQ=","7KO87IS47JqU","LkFXUw==","IENzdg==","Q3Vyc29yUG9zaXRpb24=","LWJlZ2lu","X2NvdW50cmllcw==","LXJhbmRvbQ==","5Y2z","UGhpbGw=","IHBhbm9yYW1h","IHRoZXJlcw==","5Y+q","IHNpbGVuY2Vk","IEN1bWJlcmxhbmQ=","LlZpc2libGVJbmRleA==","LnN0YXRpc3RpY3M=","IHByb3BlbGxlZA==","QW1lcmljYW5z","IHZhbGlkYQ==","IEd1YW0=","IEZFTUE=","LnN5bnRheA==","ZGdl","IGRlZXBlbg==","ICAgICAgICAJCQkJ","IFNwZWNpYWxpc3Rz","IFNhbnRhbmE=","IEJlZXRsZQ==","ICUKCg==","VXNlclByb2ZpbGU=","KCIkLg==","IGVtcGxvaQ==","IGVtYWlsaW5n","Z2V0T3JFbHNl","X1VQUEVS","LmRyaXZl","IHJlZGhlYWQ=","Rk9VTkRBVElPTg==","IG11bHRpcGxpYw==","L2VmZmVjdHM=","IGhhbmR3cml0aW5n","X3Rh","IEJheg==","w7ZmZmVudA==","cHJpeA==","IGNoaXBzZXQ=","IGlwQWRkcmVzcw==","w61kYQ==","IFVuZw==","IFNjaGE=","LkZMT0FU","IHF1aWVybw==","b2Nocm9tZQ==","IHJlZWZz","YnNvbg==","IG3Dug==","IHRyYXlz","Qm9tYg==","IG15TGlzdA==","eGltaXR5","IERlbmc=","VW5p","LVNlcmllcw==","b2dhbnk=","bMSxaw==","L2NhbA==","IHJlYWxpemE=","IEhpYg==","CQoJCgo=","IGh1bWlsaWF0aW5n","WyR7","IHByZXRlbmRlZA==","IERhdGVuc2No","YW5zaWJsZQ==","CXJlbG9hZA==","IG1pZ2xpb3I=","X2JldA==","IHRvdGFsVGltZQ==","IEJheHRlcg==","IGVuYW1lbA==","L0ltYWdlcw==","IFNFUw==","IFNwcmluZ0FwcGxpY2F0aW9u","KWluaXRXaXRoRnJhbWU=","CWNhbA==","RUxFTUVOVA==","IEd1dGg=","KEJpZ0ludGVnZXI=","IE1lZGk=","Lk1lbWJlcnM=","IHJlam9pY2U=","IGRvZg==","UEVuZFBvaW50","IGNsaXQ=","X1JFVVNF","TWFrZXM=","IHN6eQ==","IHNoYWRlZA==","IGZhdm91cmVk","aXN0b2w=","ZGV4","IGZsZXhHcm93","hac=","X3ByaW50ZXI=","LmZuYW1l","cGVyYXRpb24=","IG7Ds3M=","Z2dlcg==","6ICB","INCy0YDQtdC80Y8=","KGVmZmVjdA==","QnlVcmw=","IEFQUw==","dHV0b3JpYWw=","ZWpz","U3FsUGFyYW1ldGVy","IHNjcmFwcw==","R3JlZXRpbmdz","RmVk","IFJFTkRFUg==","IGJsb29tcw==","IGRlYmlsaXRhdGluZw==","b21ldHJpY3M=","IHNpbWls","LWhlcm8=","IHJlYWxwYXRo","ZGVwYXJ0bWVudHM=","QklORA==","IENhc3NpZHk=","bGlhbg==","U0tJUA==","LWNsZWFu","IHNpbGRlbmFmaWw=","X211bHRpcA==","anNvbkRhdGE=","QWdlbnRz","LmZoaXI=","IHRyaXVt","IGFzdG9yZQ==","IG5leA==","OnVwZGF0ZQ==","INC00LA=","4KSy","OyIpCg==","LlRleHRJbWFnZVJlbGF0aW9u","IG1pY3Jvc2NvcHk=","U1VS","YW5reQ==","IFBldGl0","bWFya2V0aW5n","IHZlcmlmaWNhcg==","YW1hZ2Vk","Y3Ro","IGluY29uc2lzdGVuY2llcw==","IG1hasSF","IGdldEluZm8=","IHBhc3Npb25hdGVseQ==","IGljbXA=","W10+Cg==","U2luZ2Fwb3Jl","IE5ld3Rvd24=","IHJhaWxpbmc=","IEVubGlnaHRlbm1lbnQ=","dXRoZXJsYW5k","bGVpbmU=","X3JlZ2lzdHJv","IEVyaWNh","X3RpY2tldHM=","L21ldGhvZA==","aXp6YXRv","R2F0dA==","LWZlYXR1cmU=","IDotKQ==","IHNlcnBlbnQ=","IEdyb3VwTGF5b3V0","TmlrZQ==","dW5nYQ==","IE1pbQ==","IGluY2Vzcw==","IGRlcGxldGlvbg==","X2xvdA==","IGJpcnRoZGF5cw==","IHJlbnRlcnM=","IGVxdWlwb3M=","IExlaHI=","X1BsYXk=","IHNwaWVsZQ==","IExBTkQ=","IEVuY291bnRlcg==","aXphbmRv","IHBlcnU=","IHNsYW1taW5n","IHJlaW5zdGFsbA==","IGFuZ2k=","SW5UaGVEb2N1bWVudA==","IHZlcnNjaGlsbA==","IHZlcnNv","LnN0YWZm","KHZw","KGFjY291bnRz","Z2V0QXBwbGljYXRpb24=","IG1hbnRlbmVy","LlNP","LkFE","IE1vcm1vbnM=","CXJlYWw=","IGhvdGxpbmU=","IENhcmRpbw==","cGFnZUluZGV4","Ymplcmc=","Rm8=","IGNvbnNlaWxz","IG1pZ3JhaW5l","IGxhdGlubw==","IHRvcnBlZG8=","amFiaQ==","L3Jz","dWJiZXI=","IENsYXNzZQ==","4Lw=","KC9eXA==","X2RlcGxveQ==","R1JFUw==","IFdIQVRTT0VWRVI=","IGFyY3B5","IG1pZWpzYw==","QXJteQ==","IHNjaMO2bmU=","IGJtaQ==","IDoiOwo=","IENydWlzZXI=","cWg=","LnByZXBlbmQ=","IHZpdmU=","b3JpYXNpcw==","ICE9Cg==","dGVnYQ==","YW1lZGk=","UHJvamVjdGVk","LWJyZQ==","LHJlYWRvbmx5","IHN1YlRpdGxl","IG1pc3Ry","IEluaGFs","Y292ZXJpbmc=","IHppag==","IEFSVElDTEU=","UlVMRQ==","IGFsdHJv","IHNldHRsZXM=","aWRlbGJlcmc=","OiIuJA==","KGZl","X2Jt","IHByb3ByaWV0b3I=","IGtlZXI=","U2VwYXJhdGVk","X05FQVJFU1Q=","KHN0cnBvcw==","IENvbXB1dGF0aW9uYWw=","IGVybg==","SW5WaWV3","QWNyb3Nz","IGZydWl0eQ==","X21hcHBlZA==","IGdyYXR1aXRlbWVudA==","IHt9CgoK","cG90ZW50aWFs","cGFudHM=","IHNlbnRpbWVudGFs","IExpbmtlZGlu","KHBhdGNo","IGFkYXB0b3I=","IFVJU3Rvcnlib2FyZA==","IHNsYXNoaW5n","KCIvOg==","IHRleHREZWNvcmF0aW9u","LmRpYWc=","XFJlZGlyZWN0","IG5ldXJvc2NpZW5jZQ==","IEFkanVzdG1lbnQ=","IFNjb3RjaA==","IENvc2J5","U0VB","PXZpZXc=","IGV2b2x2ZXM=","IFNhbGlzYnVyeQ==","44CB4oCc","ZXZlcnlvbmU=","KGFyYw==","IGFwYXJ0aGVpZA==","IGF6aW11dGg=","IFNoYW1hbg==","2KU=","w7NuaWNh","OmNsYXNz","IEluamVjdG9y","YWhhcw==","YWJsZXI=","X2VzdGltYXRvcg==","X0NVQkU=","IEtyYW5r","IHVuZmF2b3JhYmxl","IHJlcHV0ZWQ=","IENvbmRpdGlvbmFs","IG1pbGZz","IFJlc3RyaWN0aW9ucw==","KGhyZWY=","SnVhbg==","PEVudHJ5","CXRlbXBsYXRlVXJs","X3Byb2R1Y3Rpb24=","VHlwZUlE","IGJhbGs=","IG5ld0Fycg==","IGxpY2VuY2Vz","LnNvbHV0aW9u","LnNhbQ==","IEh2","IHRyZW1ibGluZw==","WWF3","IGZsZWVjZQ==","IHNob3ZlbA==","V2Vy","IHBhdHRlcg==","PVk=","IEZybQ==","U2NyZWVucw==","JCI=","IEJsb25k","INGB0LjRgdGC0LXQvA==","KG9k","IG5vY3Q=","b3VudGVycw==","dXNlcHBl","fGludA==","LnJlbWFpbmluZw==","IHVsdGltbw==","IG1hc3R1cmJhdGluZw==","bW1j","PUc=","Il19Cg==","IGZlYXJsZXNz","IGFsZ3VtYXM=","Y3VsdA==","QWx0ZXJuYXRpdmVseQ==","5bKB","T0RFVg==","IEFkb3B0aW9u","IHdlYWx0aGllc3Q=","IG1lbnRyZQ==","L2dvdG8=","IGluZm9ybWFudA==","IFJvdXQ=","b2Zp","IGhhbW1lcmVk","IEVzdG8=","4oCZQnJpZW4=","IMWa","IGRlbWk=","INGB0LvQtdC0","IENsaW50b25z","7IWY","5aSn5bCP","RUNI","IGFuYXJjaGlzdHM=","IEJldmVyYWdl","IGdvdQ==","IGJyaWJlcnk=","IHBpY2t1cHM=","IHViZXI=","IHN5bmVyZ3k=","ZmNu","IEhlbnRhaQ==","IEJhc2VtZW50","IG1vcmI=","X2N1","amFkaQ==","KHByb2o=","IEJpbmdv","X2NhdGU=","W2VtYWls","Klg=","X1NFUA==","IHByaW5jaXBpbw==","dXBkYXRpbmc=","Ly99fQ==","Li4uKA==","IERPRQ==","IHpn","c2hhcGVz","PXRtcA==","Q3J1ZA==","IHdvcmtwbGFjZXM=","IHN0YWJpbGl6ZWQ=","IHRlbnRhbmc=","LnByb2R1Y3RJZA==","IFRyaWRlbnQ=","IG9yY2hlc3RyYXRlZA==","IEJ1Y2NhbmVlcnM=","X3RvbGVyYW5jZQ==","aWdyYXBoeQ==","w7xsZXI=","INi1","QVE=","IGF0aGxldGljaXNt","CVNlcnZlcg==","ZXdlZA==","RGlkRW50ZXI=","UmVnaXN0ZXJz","X2VtbHJ0","IGZ1bmN0aW9uYWxpdGllcw==","KGhkYw==","X21hcmtlcnM=","T3JlZ29u","KFN0cg==","IEdldEJ5SWQ=","IHp3YXJ0ZQ==","IE9DSQ==","IEphbWU=","X2NyaXQ=","IHN0b2NraG9sbQ==","CURpY3Rpb25hcnk=","X2NhcGFiaWxpdGllcw==","Q1RS","IG51bWE=","X2ZpcnN0bmFtZQ==","IE5TUmFuZ2U=","IG1vc3RyYQ==","IEFycml2YWw=","KElTZXJ2aWNlQ29sbGVjdGlvbg==","IHRlYXNwb29ucw==","IFNldFVw","CQkNCg0K","KGd1aWxk","LiJd","IG3hu5tp","YmZm","REFURVM=","KCldCgo=","IGh1bWFub2lk","dGhybw==","KGtsYXNz","IFZhZA==","ZnNw","LVNhaA==","IFVTRVJOQU1F","IFByb3BlcnR5Q2hhbmdlZEV2ZW50QXJncw==","IGxlc2lvbg==","X0RFTklFRA==","IFRISU5L","gqQ=","bWVudGFs","IHByZWNhcmlvdXM=","IE5vc2U=","IGNvbmNs","IHdpbGRmaXJl","IFRCcmFuY2g=","IEJBTQ==","L2Nzdg==","IE5BTg==","IENsZWFyYW5jZQ==","XEJsb2Nr","LmFubm90YXRl","5om+","IFdISUxF","Z2VidW5n","Pkxpc3Q=","c2ht","Um9zcw==","YWZk","W3RpZA==","UGVyUGl4ZWw=","Kyhc","IEN5YW4=","IEtub3Q=","X3Zsb2c=","L3Zhcg==","W19f","IGhhc2htYXA=","KCk7DQ0K","IGFtYXNzZWQ=","IGRhdGVQaWNrZXI=","IFNhdG9zaGk=","X0NBUEFDSVRZ","IGJ1eg==","IE1pbmg=","U2V0Q29sb3I=","Kz0nPA==","IEludmVudA==","b3JjYQ==","aWdudW0=","IEFtcGg=","IHJlZmx1eA==","CiAgICAgICAgICAgICAgICAgICAgICAgIAo=","dWhu","KFRN","YWxsZXk=","IGxlZnRvdmVycw==","ZmRj","4oCcVGhlc2U=","IGNyYXdsZWQ=","KFZvaWQ=","aWd0ZQ==","8J+S","c2V0RGVmYXVsdA==","IEJlZ2lubmVy","UG9r","IEhMUw==","IGdhbWVJZA==","IEFtYmllbnQ=","X1BSRUQ=","LiJ9LAo=","w7xocnVuZw==","LlN5bmM=","IGludmU=","IE51cnNlcnk=","IGdsYXplZA==","q+yekA==","X2ZhdGFs","X2Rpc3BhdGNoZXI=","W10pDQo=","IGRldXRzY2hlbg==","6rGw","U2hhcGVz","IGlycmV2ZXJzaWJsZQ==","X3Blcw==","X2VzYw==","IHRoZXJtb21ldGVy","44OU44O8","X3NxcnQ=","Il09PSI=","IGN1bG1pbmF0aW9u","V29yZFByZXNz","IGxldmVu","VmVydGV4VXZz","IEhheXdhcmQ=","IEFzc2V0SW1hZ2U=","IG1haXpl","IGNoaWNhZ28=","IHRhdg==","ZXhwZW5zZXM=","0K0=","K2Y=","LiInIjsK","LVNB","IEtvdGE=","TWFpbkZyYW1l","LnNhbGU=","X0JV","IHN0cmVu","X2ZpbHQ=","L3ByaW50","KFBhY2tldA==","INC30LDQsg==","QWN0cw==","0LXQu9C10YQ=","IHJlbWF0Y2g=","IHJpZGRlbg==","IH0pKCk7Cg==","IGVuZG90aA==","IGNlcnRpZnk=","IFVJUGlja2VyVmlldw==","XE5vdGlmaWNhdGlvbnM=","CVRpdGxl","IGluZXF1YWxpdGllcw==","IE1vcmFu","IERhZW1vbg==","bGVzaWE=","IGhvcHBpbmc=","IGd1c3Rv","IEZpcmViYXNlRmlyZXN0b3Jl","IHBvbHlsaW5l","IHNwaWtlZA==","JSIpOwo=","IExBVElO","TGFiZWxUZXh0","IHN0cmFwb24=","X2ZpZA==","LXNwZWNpYWw=","YXJnZWQ=","IFNUSUxM","UXVhbGlmaWVkTmFtZQ==","LlJFUw==","I2M=","LndyaXRlbG4=","IEltbXV0YWJsZUxpc3Q=","IFRodW1i","IHNpbWQ=","RGVzY3JpY2Fv","LlNldFRleHQ=","IG5vbnByb2ZpdHM=","V2l0aGRyYXc=","LWVuY29kZWQ=","c2Jpbg==","IGFtb3J0","CWRk","cmlm","IHBhdGVybmFs","Lk1hcEZyb20=","X2Fzaw==","IHJlY291cnNl","IGJhY2tzdG9yeQ==","CW1hbmFnZXI=","X0RHUkFN","IEJpaGFy","aW50ZWxsaWdlbmNl","IHNraW1hZ2U=","KGVuY29kZXI=","IHN3aXJsaW5n","IEFwcGV0","X3NhbHQ=","IGF0dGU=","IFNRVUFSRQ==","IE5ldHo=","X3BhaW50","YXPEsQ==","aXNjaQ==","Rmxv","LWdvYWw=","LnNldFN0cm9rZQ==","IEF1c2Nod2l0eg==","IEFiZGVs","IGFuZXc=","IOWung==","IHRvdGFsUGFnZXM=","IHJlZmFjdG9y","IGNyZWF0aXZlbHk=","ZW1heA==","b2RveHk=","X3R4bg==","LlNvY2tldHM=","IFJpZGxleQ==","4buxYw==","c2FtcA==","TWluTWF4","IHdvcnNlbmluZw==","b3VudGFpbnM=","YXJ0bmVy","LXByb2Y=","c2luZ3VsYXI=","PWlz","IEZFQw==","X0ZN","IOaIlg==","IENhdWdodA==","X1NDTA==","IGV4cG8=","aW5mcmE=","IE1FUw==","Y2hhcA==","YWx0ZQ==","YXJraW4=","L21M","IHNlbmREYXRh","IGZyYW7Dp2Fpc2U=","IHPDpg==","X0RFRklOSVRJT04=","KioqKioqCgo=","XEN1c3RvbWVy","IOKWiOKWiOKWiOKWiOKWiA==","IHBlcnBldHJhdGVk","IEZ1cmlvdXM=","IHRlbmdh","bGVhcmVk","VUxMRVQ=","aW5pYw==","ZWFyY2hCYXI=","PENhcg==","IFJlbmV3YWJsZQ==","IGNvbnRlbXBsYXRlZA==","L2Zvcm1hdA==","IGZvcmdpdmluZw==","LlN1YkVsZW1lbnQ=","UFVURQ==","LmNvbnRlbnRTaXpl","IHJlc3BlY3RmdWxseQ==","4oCcCgo=","IHBvaWduYW50","dXJpbGU=","fSkiCg==","c2VxdWVudGlhbA==","L2Zhc3Q=","cHJ1bmc=","IFN0dW5uaW5n","IEJZVQ==","IGNvbXBhcmVy","CXJk","dW5pY29ybg==","xrBh","LkdldEl0ZW0=","IHNlY3Rpb25hbA==","anVkZ2U=","dXh0YXA=","IHN1bmRheQ==","IHDDpA==","TWlubmVzb3Rh","Ik4=","IGFwcGxpY2F0aW9uV2lsbA==","QU5HRVI=","IHJlYXNvbmVk","IFpFTkQ=","emFw","PWJhY2s=","b3NwaGF0ZQ==","6IqC54K5","IHRpdHRlbg==","IEFzc29j","QWN0aXZpdHlDcmVhdGVk","KVst","PyIKCgoK","IGpvdA==","2Lg=","IHVuY29tcHJlc3NlZA==","LklzREJOdWxs","IHZhc2U=","IGxvcmVt","IGVudHJlcHJpc2U=","IENvbnNlbnQ=","44Op44Oz","QnlWZXJzaW9u","IHF1aWVuZXM=","CWNvbnQ=","IEJsYWNraGF3a3M=","IEJsYXNpbw==","IHRhbmtlcg==","IHN0YXJ0dGltZQ==","IFNlYXM=","cGlvcw==","LlNwbGl0Q29udGFpbmVy","Y29tcGV0aXRpdmU=","IHBCdWZmZXI=","IGNvbnNlbnRpbmc=","LmFkZE9ic2VydmVy","aXRjaGVk","IG1pc2NlbGxhbmVvdXM=","IFRvcHM=","CWxw","Y21kcw==","LmRlcGFydA==","IGZOYW1l","CWJlc3Q=","OlA=","IHN3YXRo","IHZva3M=","YWxsb24=","IEh0bWxXZWJwYWNrUGx1Z2lu","LmxvZ2dlZElu","YnVja2V0cw==","IGhvbW9waG9iaWM=","IHN1YmR1ZWQ=","IG1lc3NhZ2Vib3g=","V2hhdHNBcHA=","IGRpc3NpcA==","IE1BTlVBTA==","TElLRUxZ","dGVzdGRhdGE=","LU9jdA==","RXhpdGVk","IFRhc21hbmlh","bGFj","IHRow7RuZw==","U3Rvcmllcw==","IGJpb2NoZW1pY2Fs","b3JyZQ==","IGVjbGlwcw==","IEFzc2VtYmx5UHJvZHVjdA==","cnRsZQ==","IFdpbGhlbG0=","cGl6emE=","X0RI","Y29uag==","IHB1ZWJsbw==","IGxpcXVl","IGN1cGlk","IEFjdGl2aXR5Q29tcGF0","LlNt","Il19","bWFpbGJveA==","Lm9wdFN0cmluZw==","LW9i","IE1hdWk=","YXRhaXJlcw==","IG1lcnJ5","Um5k","IGNhcmFjdGVyw61zdGljYXM=","VHJv","KGNu","Lmxk","LXBvaW50cw==","LnNi","IHZlag==","IGNhcmVnaXZlcg==","IG5hdQ==","RElSRUNUT1JZ","KGFuZw==","KC4p","IGV4cGxhbmF0b3J5","ZWxzZXk=","IE92ZXJuaWdodA==","IGxhaXNzZQ==","IFJBVEU=","IEdvdw==","UmVjb2duaXRpb25FeGNlcHRpb24=","aWNoZXJ0","IHJldm9sdXRpb25z","JGNhdGVnb3J5","IHVuZGVmZWF0ZWQ=","L2NvbW11bml0eQ==","LXBhcnRz","LWFwcGxpY2F0aW9u","K0E=","L3N3ZWV0YWxlcnQ=","IEtt","aWxhdGVk","YXRhdA==","UEFU","xI1l","IFRlYw==","Lm9uQWN0aXZpdHlSZXN1bHQ=","XFdlYg==","IEx1Zw==","b3ZvbHRh","IGFsdHJ1","aWd5","IGLEmWTEhQ==","IGFjdGl2YXRpb25z","IGF1ZGl0aW5n","RVJHRQ==","IOiLpQ==","Q2FybG9z","IGtJbnN0cnVjdGlvbg==","bWluZXI=","IH19Lw==","QW5kSGFzaENvZGU=","IEJvdXJib24=","LnByb2Y=","IGltcHJpbWly","IEZlcmRpbmFuZA==","0LzQtdC90YI=","L3t9Lw==","IENsYWly","IE9uQ29sbGlzaW9u","c2FsZG8=","cmFpc2Vk","IEFCT1ZF","KCk9Pg==","IGRldXRzY2hsYW5k","aGliaXRlZA==","RXh0cmVtZQ==","L2hvb2tz","IGRvdXQ=","IFZPQw==","ZXRob3Zlbg==","UE1D","IHJlc3RhcnRpbmc=","IFNDTg==","IEVP","IERKcw==","UGFzc3dvcmRGaWVsZA==","LkFjY2Vzc2libGU=","CWJ1cw==","U1RSVUNUSU9OUw==","IGxhdGVu","IFNOQVA=","X0hFUlNIRVk=","IG9uc3RhZ2U=","5bCP5pe2","IHNhaWxvcg==","IEN1cnNv","IGltcHJvdmlzZWQ=","IGdlbmVyYWxpemU=","IGJ1ZW5v","IGNlcmVtb25pYWw=","IENOUw==","IHBpZ2Vvbg==","bXNw","L0FJRFM=","bGluZUVkaXQ=","IEZpbmFuY2luZw==","IGpUYWJsZQ==","IGJvdHRvbXM=","IFRleHRJbnB1dFR5cGU=","IG1laXNqZQ==","LXNpZ25lZA==","IEdyZWVudmlsbGU=","b3BoaWxpYQ==","SWNvbk1vZHVsZQ==","IGNsYW5kZXN0","ZW1haW4=","U0NBTg==","X1RJTUVT","IGxlY2tlbg==","KGNhbmNlbA==","IGVjc3Rhc3k=","Lk1VTFQ=","IG1vZXRlbg==","IGFwcHJvcHJpYXRpb25z","IFFMRA==","IEd1aWw=","IHRyYXBwaW5n","eERB","IGvDtmxu","ZW51bXM=","4oCcVG8=","cG9ydG8=","bmluZ2Fy","IFRPTw==","LVNU","IE1hdGhz","IGt1cnM=","IFJFUEw=","X2NvbnRyaWI=","IFBoeQ==","cmFuZw==","Lm1hdmVu","LWZvbGxvdw==","IC0tLS0tLS0tLS0t","xLHEnw==","X3dpbm5lcg==","LkNyaXRlcmlh","KGRhdGFTb3VyY2U=","IHNldElucHV0","IFRJTUVTVEFNUA==","b3BlcmFuZHM=","Z2V0V2luZG93","LmZhY2VWZXJ0ZXhVdnM=","IEludmVzdGluZw==","Vnk=","IHBlcnNlY3V0ZWQ=","4bq/dQ==","IFBsdW1iaW5n","T05HT0RC","RXZpZGVuY2U=","IFN0cm9t","cXVvdGE=","TGl2ZXJwb29s","CWF0dGFjaw==","bWluaW1hbA==","IG9uS2V5RG93bg==","IG1vZHVsZUlk","IFZlcmFuc3Q=","bW9ydA==","YWNpc3Rz","IE1BU1M=","X1VOREVS","LmdldFJ1bnRpbWU=","RU5USUNBVElPTg==","Uk9LRQ==","IHNjYWxlWA==","IHNlcnRh","IEZyZXF1ZW50bHk=","X1RSQU5TRk9STQ==","IHR3aWxpZ2h0","IE1jS2Vuemll","bGVkZ2Vk","IEB7QCI=","X0FDVElW","IGhvb2tlcnM=","PWRlZmF1bHQ=","IHdhbG51dA==","IHVzZU5ld1VybFBhcnNlcg==","IENoZWVy","IHdyb25nZnVs","bmlv","YnRj","LnN0cmlkZQ==","IHN1Y2Nlc2Z1bGx5","IFRyb2xs","aWZpY2lv","LmNvbmQ=","IGhlYXBz","X1BIT1RP","PEFkZHJlc3M=","IFN0aWNreQ==","IG5pZ2h0dGltZQ==","IGRhbmRv","IEJJTEw=","INC+0YLQstC10YI=","RGV0ZXJtaW4=","IGZ6","KHNpZ25hdHVyZQ==","IHZpbmRlbg==","LkNPTk5FQ1Q=","cnVpc2U=","IHh1","cHJldmVudA==","Rk9Y","VUlBcHBsaWNhdGlvbkRlbGVnYXRl","U3BsYXNo","IGVtYnJvaWRlcmVk","IEhpbGZl","LnNoYWRlcg==","IGRvdWJ0ZWQ=","UmVzcG9uc2VTdGF0dXM=","IHVuc3RvcHBhYmxl","dW5sb2Fk","KyJd","ImxhYmVs","IGZyZWVsYW5jZXI=","RGlyZWN0ZWQ=","IHZvcmhhbmQ=","IFNubw==","ZXhpc3RlbmNl","b3JkaWFs","emFn","LkFnZQ==","IHNwYXducw==","IFBTRw==","c3RpdHV0aW9ucw==","IHNpZ2h0aW5n","LXRhbGs=","INGB0L7RhdGA0LDQvQ==","ZW5lcmltYQ==","IEJlbnRvbg==","X1N0b3Jl","VHJhbnNwYXJlbnRDb2xvcg==","IEV4cGxvc2lvbg==","X0lTUw==","Q2hlY2twb2ludA==","IGRlZmxhdGU=","0JLRi9Cx","LXRyYW5zZmVy","IEJhYmllcw==","IGltYQ==","LnVzYWdl","IG5lZ2F0aXZpdHk=","IEV4dHJlbWVseQ==","a2o=","RG93bmxvYWRlcg==","CWFjdA==","W2NoYXI=","Tm9ybWFscw==","X3JlZmVyZW5jZXM=","IGRyYWNvbg==","4bulYw==","X1RSTlM=","Y29tcGFueUlk","IFZlcmQ=","YW5pbw==","IE1hdGNoZXJz","KHJlbGF0aXZl","IHJlZWxlY3Rpb24=","LkhF","VGF1","INGB0YLRgNC+0LrQuA==","IE1ldGFscw==","IENvY2t0YWls","IGFwcmVuZGVy","X3ByZWZlcmVuY2U=","LlNjaGVtZQ==","IGdsR2V0VW5pZm9ybUxvY2F0aW9u","VXNpbmdFbmNvZGluZw==","0YDQsw==","ICJdIik7Cg==","TGVhZGVycw==","J8OqdHJl","X0RlbGF5","UHJvY2Vzc2Vz","aWN1bHR1cmU=","XCI6e1wi","4oCUIg==","RW1vamk=","LWdyb3c=","IENDRA==","Y29tcG9zZWQ=","TWFpbnRlbmFuY2U=","IFJ5emVu","KGFn","LnByb2I=","IFNpbmF0cmE=","IGhvcnJlbmQ=","IE1vdW50ZWQ=","X1BFRVI=","IGN1aw==","IHPDuGtlcg==","IFF1YXI=","X1JFU09MVVRJT04=","J2VhdQ==","IGJvdXJib24=","IGF0SW5kZXg=","L3BvbA==","IOq0gA==","CXB3","fSl9Cg==","LmZvcm1EYXRh","IHVkZW4=","IHJvYXJpbmc=","Tm90aWZpY2F0aW9uQ2VudGVy","IGNsdXN0ZXJlZA==","IHBhaXJ3aXNl","bXVsdGlsaW5l","R2FtZURhdGE=","Lkxhcmdl","KSc6","INGB0LXRgNCy0LXRgA==","IFVJTWFuYWdlcg==","U3Zj","IFBsYXlzdGF0aW9u","Lk1vcmU=","LnF1YWxpdHk=","IGNvbmZpZ0ZpbGU=","LWNvbnRhaW5pbmc=","IEdvYXQ=","ZW5jaW9u","IGxpa2VuZXNz","LXVzaW5n","IHNlYXNpZGU=","4bqpdQ==","YW50aWNpcGF0ZWQ=","Rm9sZGVycw==","LUxldmVs","b3BjaW9u","KXByZXBhcmVGb3JTZWd1ZQ==","PigpKQ==","PWFkZA==","XGdyaWQ=","IHln","X0RSSVZF","IEdldE5hbWU=","LkRBTw==","IGhhbm4=","CWNhdA==","IHZpZ24=","IEhlbGxlcg==","IENSRUFURUQ=","YmVyb3M=","YnV0dA==","IGJlbmRz","IExlZXI=","0KY=","IFNNUA==","VmVjdA==","IG9iamVjdFR5cGU=","OmFzeW5j","IGNvbXBldGVuY3k=","IFF0QXdz","TG91","L2NhdA==","UHJvc3RpdA==","LXZlcw==","CXR2","IEVJ","QW5kV2FpdA==","IFRPT0w=","fSo=","X1Jlcw==","IGFsaWdubWVudHM=","7KGw","IENsYW1w","LXBhZA==","IHdyaXRlRmlsZQ==","IEFwcHJlYw==","4oCZYXV0cmVz","dWRhZGVz","IGx1Z2FyZXM=","c3BlbmRlcg==","W2ltYWdl","RVhJU1Q=","IGRlY2VpdmU=","IGh1bnRz","X1ZPSUNF","X0RY","Q0FD","ICgoJw==","aXNrcw==","LGZpbGVuYW1l","IGxlYW5z","SW5wdXREaWFsb2c=","RGF0YUNvbnRyYWN0","IHNtb290aGVk","IHJlY3J1aXRlcnM=","IHRhbmdsZWQ=","X1RhYg==","IEZpbGVBY2Nlc3M=","WUM=","IHZY","PGR5bg==","TGV4ZXI=","IOKYhg==","IGdsR2Vu","VGVtcG9yYWw=","IEFURg==","YW5rbw==","VXNlckNvZGU=","IEtvdGxpbg==","Li4KCgoK","RU5DRUQ=","LnVudHJhY2tlZA==","X21y","IHdhdmVsZW5ndGhz","IGRpY2hv","IGltdQ==","X2NyZQ==","W0o=","X0RG","IGF0dGFpbm1lbnQ=","IGxpdGVycw==","W2tleXM=","IGxpc3Rhcg==","SHR0cHM=","IGJyZXdlcnM=","IGFjb21wYcOx","IHRvYXN0ZWQ=","LmZyaWVuZA==","IHJlbHU=","IFBzeWNoaWM=","TWFuaXA=","ZG5h","UHJp","LWZsYXNo","KGFydGlzdA==","IEtvdg==","cHJlc2VydmU=","X3BlbWI=","LnNldFByb2dyZXNz","IGR1c2s=","IGNhbm5hYmlub2lkcw==","IEt1bmQ=","IENvdW50aWVz","IO2OmOydtOyngA==","IHJlbmFtaW5n","IFJ1c3Nv","TlNTZXQ=","KEVYUFI=","5YW25LuW","RGlhZ3JhbQ==","LGxhc3Q=","KHdpdGhEdXJhdGlvbg==","IGluZGVidGVk","IERpY2tlbnM=","IEFscHM=","IERlZ3JlZXM=","aWRhcg==","LWJsb29k","K29mZnNldA==","IEh1ZA==","b3VuZGVy","dWxuZXJhYmxl","IHByaW8=","YmxpbmQ=","KHBhY2s=","IG5pZ2h0bGlmZQ==","IGlsbHVzdHJhdGluZw==","IG51dHNoZWxs","IGJyb2FkY2FzdGVycw==","IGNvbXBhbnlOYW1l","aXRvcmU=","LnJpZ2h0QmFyQnV0dG9uSXRlbQ==","Ym90ZQ==","IFBJVA==","LXNjcm9sbGJhcg==","IHdpbmR5","IFFNYWluV2luZG93","aHVl","LmVwb2No","IGNhbWVy","IENMVUI=","aWZhcg==","VW5hdmFpbGFibGU=","LXF1b3Rl","IEdyYXo=","IHZhbHU=","X01BVEVSSUFM","IHBlbnk=","IHRyYXR0","IGxpY2tlZA==","CWNhbg==","IFRhaXdhbmVzZQ==","UGFnZUluZGV4","LlRpcG8=","X1JlZA==","IHZmcw==","X3RyYW1wb2xpbmU=","IE1QUw==","IFBlYW51dA==","IExvY2tlZA==","CUFU","anNwYg==","X05PREVT","J1dl","IENvbnZlbmllbnQ=","X3N1Y2Nlc3NmdWw=","K3o=","WUxlYWY=","IHBlZGlncmVl","eHo=","IHNhbHZhcg==","X0Rlc2M=","IG5lc3Rh","IGhhcmRjb2RlZA==","LmdvbGQ=","LkltYWdlRmllbGQ=","X0JT","TEs=","Q2hvY29sYXRl","LlN0YXJ0dXA=","IGFuZWNkb3Rlcw==","Lk1h","P10=","L3RvcGlj","LlNjcm9sbEJhcnM=","0YHRgtCy0LA=","IE1PTQ==","IHFvcw==","YXJ5YW5h","w6RjaHN0","IE1jR2lsbA==","IEVEVUM=","KHBvc3Rz","IEVudHdpY2tsdW5n","X3NraWxscw==","LWd1YXJk","IHRleHRpbGVz","fHVuaXF1ZQ==","IEFyaXRobWV0aWM=","TG9hZElkZW50aXR5","KTt9Cgo=","IGFzc3VyZXM=","V2lsZGNhcmQ=","IGRlZmF1bHRlZA==","IE5vdFN1cHBvcnRlZEV4Y2VwdGlvbg==","IFRvbWF0bw==","LlN1bW1hcnk=","ISIu","dXRoZXJmb3Jk","IGxvb3Bob2xl","IGNtYWtl","LWRhdA==","IHJhZ2F6em8=","IGNhcGl0YWxz","IEltcG9ydGFuY2U=","IER1bmdlb25z","X3pvbmVz","LnNhdA==","ICAgICAgCiAgICAgIAo=","Y2F0ZWdvcmlhcw==","IGRhdGF0YWJsZQ==","IG5hamxl","KGdw","LXJlbg==","IHBhbmlja2Vk","IFNreWw=","IFFVSUNL","dmFsdWVPZg==","U3RhdGlzdGlj","IGRlbWVhbm9y","bmRlcm4=","IEFwcGVhcnM=","UHJhZ21h","X3Bhc3Q=","SGFzaHRhYmxl","IHRoYW5raW5n","LmNzcmY=","IHBhdmU=","IFZpY3RpbQ==","IFDDpQ==","Rmlyc3RuYW1l","Q0FURUdPUlk=","aWxlc3RvbmU=","JyktPl9fKCc=","IGluY2FwYWM=","U3RyZWFtV3JpdGVy","IGNvbW11bmlvbg==","X3N0ZGVycg==","6Ieq5rK7","IGh1bWFuaXRpZXM=","INC70Y4=","IFBhcmFz","bG9mZg==","SGVhZGVyVGV4dA==","Z3JlZ2F0ZWQ=","LlhSVGFibGVDZWxs","IGVudGl0eUlk","IE1hc3Rlcnk=","b2xkdA==","JykpKTsKCg==","aHVtaWRpdHk=","Li4uIik7Cgo=","RGVsdGFUaW1l","IG1rdGltZQ==","UGhvdG9u","IHBlbnNhcg==","c2NhbGluZw==","X3llbGxvdw==","X211bHRpcGx5","IFZ1bGNhbg==","IFBlYXJjZQ==","X2xj","LWV4Y2x1c2l2ZQ==","SXNVbmljb2Rl","IHBhZHI=","X1BDSUU=","IGdsaW1wcw==","IHJhbXBhZ2U=","IFBhZ2luYXRvcg==","IGNvbnZleWluZw==","bm9yZQ==","X2RldGFjaA==","J10hPSc=","IGJvbmE=","CUNvbg==","TmF6","IHNlZ3VpbnQ=","IG1pZXN6","IGVzb3M=","ICcvJykK","IGZhaXRoZnVsbHk=","IGJla29t","0LDQutGB","d2hlbG1pbmc=","LnR3bw==","IFNDRQ==","LW5h","ICgpew==","IERhbWVu","X3RndA==","YWRhbGFmaWw=","IE1NSQ==","VGhpbg==","IGRlcHJlY2lhdGlvbg==","IGFic2VudGVl","IHNhbGFyaW8=","IFNvbWVib2R5","IFNsb2Fu","IGVyZm9sZ3JlaWNo","Ok5TTG9jYWxpemVkU3RyaW5n","IGdlaMO2cnQ=","IGVtbw==","IExhZ3VuYQ==","w6FzYQ==","aXN0cmF0ZXM=","UmFpc2U=","IEFzdHJvcGg=","ICdcXCc=","X3BlZA==","IFRIUk9VR0g=","IE5pZXR6c2NoZQ==","ZW5lcmF0aW5n","b3BsYXllcg==","IHJvZGVudHM=","w7xobA==","R2FtZU1hbmFnZXI=","IEhlYWRlckNvbXBvbmVudA==","IG1pbGFu","cXVlZW4=","IFBPTEw=","IEx5bWU=","IEJyaWdncw==","ZWNlcg==","d2Fnb24=","LkRFU0M=","IGdsQmVnaW4=","U3RhdGVtZW50cw==","ZXRyaQ==","IG1vY2tlcg==","IEJsdWVwcmludFJlYWRPbmx5","L2NvbnRlbnRhc3Npc3Q=","ZW1hYWt0","L2xvYWRlcg==","X2xvd2VyY2FzZQ==","Y2l2aWw=","X3ZhbG9y","X0dsb2JhbA==","IGFkcg==","aXRpemVu","LlNpZGU=","IEVtYmxlbQ==","IHRoaXJkcw==","X1NIQVBF","UmVncmVzc29y","UFlUSE9O","IHBzeWNob3RpYw==","IGN2cw==","IEFwcGxpY2F0aW9uVXNlcg==","IGFsdW5vcw==","VG9nZ2xlQnV0dG9u","IG5nYQ==","IG3Do2U=","YWR2ZXJ0aXNlbWVudA==","5YiG5Lqr","Lm92","IEFPTA==","UkVX","INin2LPYqg==","IEdpbm55","IC8vLy8vLy8vLy8=","U29uZ3M=","YWNpYw==","Q01Q","IHJlY29nbml6ZXI=","IHDDq3I=","RElD","O1wiPg==","IGNsb3Q=","OkV2ZW50","LlRP","IEN1cnNvcnM=","XFN0b3JhZ2U=","IElvbmljUGFnZQ==","X2pldA==","KEJpdENvbnZlcnRlcg==","IGNoaWxkaXNo","VHJhZGVy","PEhUTUxJbnB1dEVsZW1lbnQ=","X0ZSRVFVRU5DWQ==","PSI7Cg==","eXN0YWNr","SnVy","IOmU","IHRjYg==","IHJlY2liaXI=","LnN6","IO2BtOuemOyKpA==","UEVSU09O","bm92YQ==","IGNvZXI=","IE1haG1vdWQ=","IFdvcmtwbGFjZQ==","IiIiKSwK","LlBhZ2VTaXpl","Z2V0Um9vdA==","KGJhc2VVcmw=","W1U=","IE1DUw==","IENsYXJrc29u","LnZvbA==","ICIifQo=","IHBldXg=","IFByb2R1Y3RTZXJ2aWNl","IG1vbmRheQ==","IFRlc3REYXRh","IE1hdWw=","IHN0cm5jbXA=","IHNob3BwZXI=","dGhlb3J5","IGV0aXF1ZXR0ZQ==","bGljZW5jZQ==","c2NhbA==","LWNsdXN0ZXI=","IGhpc3TDs3JpYQ==","IFN1YnRyYWN0","IGZpYmVyZ2xhc3M=","X2xhc3RuYW1l","IFJld3JpdGU=","L3RvZG8=","IG92ZXJmbG93aW5n","IEdhdXNz","b2theQ==","IGNsdW1zeQ==","KHh5","IGV4ZW1w","YW5hbHl6ZQ==","LXRpY2tldA==","bmluZQ==","IERlYWRwb29s","IGNvbHVt","IEpL","IFtdLA0K","IEFzcGVu","IG1hbGlnbmFudA==","aMO1ZXM=","U2NhbGE=","aW5uZQ==","IENPTlNUQU5UUw==","X1ByaWNl","IyUl","IGFyc2No","IE5TQXR0cmlidXRlZFN0cmluZw==","IEZpbGVUeXBl","YWxsb2NhdGlvbg==","X3Npbmd1bGFy","KFBvaW50ZXI=","YW5uaWVz","U3RvcmVk","ICc7Cgo=","4oCZZXg=","ZHJz","QnJpZ2h0bmVzcw==","L09S","VGV4dGJveA==","IGtuYWNr","IGplbmlz","IG9jYXM=","ZGF0YXA=","IGdhbWVUaW1l","IOCw","bmR4","IEVWVA==","QnlUZXh0","IGF0dHJpYnV0ZU5hbWU=","IGp1Z2Fy","X3NlcXM=","IEZFQVRVUkVT","OmRhdGU=","ZmJl","cmlwcGVy","56iN","LkV4cHI=","VXJiYW4=","aWRvdA==","IG9ibGl2aW91cw==","KERiQ29udGV4dA==","Q2Fyb2w=","KCcsJywk","IEJyaWxsaWFudA==","a2Fk","Y2VudHJhdGlvbg==","IGt1aw==","IE1BTkFHRU1FTlQ=","X1dFQVBPTg==","IGppaGFkaXN0cw==","IGVudHJlZw==","IGRvxJ8=","IGFwcGVuZGluZw==","IFpp","X2N0eHQ=","IHF1YWRyYW50","ZWxlbWVudFR5cGU=","PWltZw==","YnJ1YXI=","SUNBU1Q=","IGludGVsbGVjdHVhbGx5","LkFubm90YXRpb24=","IGNhbXBhaWduZXJz","LkRhdGFHcmlkVmlld0F1dG9TaXpl","IMWfZWs=","IC9eKA==","LkRhdGFUYWJsZQ==","IHdlYmxvZw==","KGxpYnJhcnk=","IEZ1cw==","IE9TVA==","X1Bhc3N3b3Jk","IEJ1Y2tsZXk=","aG9mZg==","QWxpZ25lZA==","X1JlYWw=","RU5USUM=","L2dyYXBocWw=","IFdlZWQ=","IExTQg==","b2NjYXNpb24=","YWRkYWZp","TGV0cw==","KCJg","IHdpZGVu","KHZpc2l0b3I=","ICJcCg==","QU5URQ==","LWNhbXB1cw==","LUJhcg==","Y2FtZWw=","Rm10","OmRlc2NyaXB0aW9u","LmFyZQ==","IEFuYXN0","IExvbmdlcg==","c2VyaW91cw==","IGRhaGVy","aXp6ZXI=","TXVsdGlwbGljaXR5","IEhvbGxhbmRl","IEFubm90YXRpb25z","KCk/","IHByb3Rlc3Rlcg==","IFVyZHU=","IHNwZWNpYWx0aWVz","X2x5","Q2Fk","YW5udA==","anNw","IGpvZQ==","KXI=","IFBlcnNpc3Q=","IG9ibA==","IGRlYWRsb2Nr","IHNlcmk=","UmVsYXRpdmVUbw==","IFl1cw==","KFByaW50","YWJpbGlh","IHVucHJvdGVjdGVk","IEFTSUM=","Lk5vbWU=","IFdlYkNsaWVudA==","IElUVg==","w7xybmJlcmc=","aXRvcmk=","U2lnbmluZw==","IFJlYWRvbmx5","IGVsZHJl","IENoZWNrZWQ=","YWxudW0=","U291cmNlVHlwZQ==","bGV4aWNhbA==","IGlsbHVzdHJhdG9y","IERpcmVjdG9yYXRl","IFRyb20=","bXBw","bG9nZw==","Lmluc3RydW1lbnQ=","IHdvb2RlZA==","IFVzZXJUeXBl","IFJlbmNvbnRyZXM=","bW9kZWxOYW1l","QlRUYWdDb21wb3VuZA==","PlRv","IGZyZWV6ZXM=","IENvbnRl","IENyZWRlbnRpYWw=","Y2FsYQ==","L3dvcmtzcGFjZQ==","IGxpYmlkbw==","Y2hsdXNz","b2xsZXlFcnJvcg==","IGFjY2lvbmVz","IEppbnBpbmc=","YXTDqWc=","SW50ZXJzdGl0aWFs","KSkpKSk7DQo=","eWJyaWQ=","IFJvbGxlZA==","TW9kZWxDcmVhdGluZw==","IFJlZmxleA==","IEx1Y2lmZXI=","IGVoZXI=","IGNhcm5pdmFs","ISI7DQo=","X0xPT0tVUA==","IHN1Y2PDqHM=","IHJlb3BlbmluZw==","IGNyZWFkbw==","IFNteQ==","IEVudHM=","LlNpbmNl","IEZpc2hlcmllcw==","L2Nvbm5lY3Rpb24=","IENTQQ==","INC/0YDQvtCz0YDQsNC80Lw=","bHNydWhl","CWFjdG9y","IFN0cmF1c3M=","SnNvblZhbHVl","CWV2YWw=","bG9ja2Vy","IFhJVg==","X2h5cGVy","IFBvbGx5","4oCmdGhl","IEdVUkw=","0LXRgdGB","IGRpdmVz","dWdlb3Q=","aW5lbWE=","YmVyc29tZQ==","Q29tcHJh","LWN1bHR1cmFs","IGdyYW5kcw==","U2Fj","IEJhcm5leQ==","X1FVRVNUSU9O","IG1hbWFu","IGhhc3RpbHk=","IGNsdWJob3VzZQ==","IGdydW5k","X1dBTEw=","IHB1cmlmaWNhdGlvbg==","hOS7tg==","0LLQsA==","dmVzdG1lbnQ=","LkRpc3BsYXlTdHlsZQ==","X2NvcmVz","JVM=","IG9zw7Ni","IGRpc2I=","IEZyYW5raWU=","IGluZGlzY3JpbQ==","X0JlZ2lu","KGVy","O28=","44Oz44Kw","bm9kZU5hbWU=","IHJlZnVuZGVk","IGRpc21hbA==","IEh1ZmZQb3N0","IHVuZGVjaWRlZA==","d3JpdGVsbg==","a8Ozdw==","IEJvc2U=","CWxpYg==","b3BsYW4=","aW50ZXJwcmV0ZWQ=","IE1PTkVZ","dXZv","IG50b2hz","aXNldW0=","Pmo=","IHVuZml0","IGh1Z2dlZA==","IEplc3Q=","bXBz","IGJyb20=","J28=","IGZvdg==","IFNocmluZQ==","IEVJVEhFUg==","eWNhc3RsZQ==","IHNhdHVy","cmVxdWVzdERhdGE=","W2Rpcg==","T1VDSA==","X0Rv","IHlvbA==","IGluaXRpYWxWYWx1ZXM=","W3ZlcnRleA==","c2VydmljZU5hbWU=","LnNhbGFyeQ==","IEF1dGhlbnRpY2F0ZQ==","6L6+","X1ZMQU4=","KFtdKTsKCg==","IFNlcnVt","UGF0aFBhcmFt","Zm9ybXVsYXJpbw==","IHN1bW1hcml6ZXM=","T0NS","b3JhbQ==","TERBUA==","Ymlj","cGlja2Vk","LXRoYXQ=","IGNkcw==","CWFuaW0=","IGludHJpYw==","IFdvcnQ=","IFZMQw==","IFNoaWl0ZQ==","U3R1ZGllcw==","LmRpc3BhdGNoZXI=","KGVuYWJsZQ==","Lm1peGlu","IFNleW1vdXI=","IGJpb21lZGljYWw=","IFNwb29u","IE5vcnNl","IGludGVudHM=","IMOpcXVpcA==","IERyZXNzZXM=","TFBBUkFN","LnNldFJlc3VsdA==","LmRlbGV0ZUJ5SWQ=","IG5ld2ZvdW5k","IE9TRA==","b3VzeQ==","IGVzdGFkb3M=","W0J5dGU=","Q2h1Y2s=","Lm9uVmlld0NyZWF0ZWQ=","IENvbnRyaWJ1dGlvbg==","X0VuYw==","SU5FVA==","IGZsYXZvcmZ1bA==","IOOCog==","dmlzYQ==","IEhlcmN1bGVz","LmdldEFwcA==","IFlvaw==","Lk1haW5BY3Rpdml0eQ==","KS5b","IGxhdXQ=","SW52aXRl","IENodXJjaGVz","LCcj","2YrYsQ==","KFNT","IHZlbmRh","YXNqb24=","LklOVEVS","aXBoZXJ5","KFN5bnRheA==","b25kcm91cw==","CWNlbnRlcg==","QnJhY2tldEFjY2Vzcw==","IENhcGNvbQ==","LmdldEZvbnQ=","IFZhdWx0cw==","IGRpc2XDsWFkb3I=","Om8=","KHNoZWxs","IGVDb21tZXJjZQ==","IGFsdHJl","X2F0dGFjaGVk","IGlzcg==","IG9idGFpbnM=","LkNvbnRleHRDb21wYXQ=","IGF0dGVuZGVl","IFR3aWNl","IE1vb2Q=","6YKu566x","bm9kb2M=","IFBJWEk=","c29mYXI=","IEJsb29keQ==","LkNvbXBsZXRl","IEJFUg==","IGdldENhdGVnb3J5","IGRpc3F1YWxpZmllZA==","X1RydWU=","J2Vy","LXRvbw==","IGh5cGVybGluaw==","X21heGltdW0=","TmVhbA==","IHBJbmZv","LmdldEVsZW1lbnRzQnlOYW1l","c2NoZWR1bGVk","cGF5ZXI=","CXZlcmlmeQ==","LWVudGl0eQ==","bWV0YXRhYmxl","YmlsZHVuZw==","IGRlbHRhWA==","ZW1wbGFjZQ==","IHJldmVydGVk","cmVwaWQ=","bGVhcm5lcg==","fSkpCgo=","dWNvc2U=","IHJpY28=","IGJhbmdlZA==","IEFmcm8=","KGluZXJ0aWE=","YW5zYQ==","IMOkdmVu","S2FyZW4=","IHN1cGVyc3Q=","IGZydWl0aW9u","b3RjaA==","IFBheXM=","UmVzaWRlbnRz","IHByaXNt","Jik7Cgo=","Lmptcw==","IFNsdWc=","PScnKQ==","IGd1dGVu","IFNwaWVsYmVyZw==","IFRGb3Jt","KGJlZm9yZQ==","IEZpbml0ZQ==","5paw5aKe","IG1laWxsZXVyZQ==","0L/QuNGB0LDQvdC40LU=","X0Vycg==","LWZ0","bmFubw==","LkFkZHI=","IC8vDQoNCg==","IEpvbmFo","IERpc2Nv","IGx1bmNoZXM=","IERGQQ==","ZXhwbGljaXQ=","XSc7Cg==","IHJlZmluZXJ5","IFN0cmluZ1R5cGU=","dW5zcXVlZXpl","IExpa2VseQ==","V3JpdGVz","LmJwbQ==","IHBJdGVt","b3Vuc2Vs","U3RhbmRpbmc=","IGNob2tlZA==","IGFuc2No","dXBpbA==","IERlYnVnZ2Vy","4qCA4qCA","PEdyb3Vw","IFNjYWxpYQ==","IHN1YnN0aXR1dGlvbnM=","IGNsaW1iZXJz","ICopIg==","IG5hbm9wYXJ0aWNsZXM=","IEFQUFJP","IHB1cmNoYXNlcnM=","IFFUZXN0","IEF3YWtlbmluZw==","CVNlcmlhbA==","LnJlcGFpbnQ=","IHNhdm9yeQ==","IHBvcm91cw==","IGFWYXI=","IFN1YXJleg==","LUVhc3Q=","Qm94ZXM=","IFdlaW5lcg==","IENSQQ==","IOqwkuydhA==","IHhsaW0=","Ij8KCg==","IHdhc2hpbmd0b24=","7Jq0","IHRvdGFsZW1lbnQ=","X210aW1l","LnNldFNjZW5l","IGxsYW1h","IGNibw==","ZWZk","IHVuZGVycmF0ZWQ=","cmFpc2luZw==","IE5BVElPTkFM","ICoqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKi8KCg==","b3B0aWM=","aWRlYXM=","IOaPkA==","IGxhaw==","ISEs","IGtvbW0=","cGFyYWd1cw==","U2l0ZXM=","IHN0cmVzc2luZw==","IE1hdEJ1dHRvbk1vZHVsZQ==","IENvbnZlcnRlZA==","YW5hbWU=","X1JFQURPTkxZ","XT0+","IGJvcmRlbA==","IGJpYmxpb2dyYXBoeQ==","IGdyaWRDb2x1bW4=","IGpvdXJuYWxpc3RpYw==","7J6E","IHJhc3BiZXJyeQ==","c3RpY2U=","IGFicmFzaXZl","IERCSGVscGVy","IGludGY=","IFJUQlU=","fSciLA==","IEhhbw==","c3dhbmE=","IGphbnZpZXI=","IGluc3RpdHV0ZXM=","IFNlYmFzdA==","X0NPTFM=","IGZpZ3VyYQ==","IFp1c3Q=","Zm95","PigpKTsKCg==","IExpZWJl","QWdlbmN5","IOyLnOyekQ==","IFRodW1ibmFpbHM=","dGV4dFRoZW1l","IGVjaG9pbmc=","ZW1wZXJhdHVyZQ==","IGZpcmVwb3dlcg==","ZWRi","OicpOwo=","w6lnb3I=","L2ZlZWQ=","IGh1cmw=","LWF2YWlsYWJsZQ==","IFJlbmRlcnM=","IGZkcw==","IEpTR2xvYmFs","IENpdGl6ZW5zaGlw","a2llZ28=","U3RhbmRhcmRJdGVt","LnBsYWNlcw==","IHNjYWxhYmlsaXR5","IFRyYWlscw==","Zm9sbG93ZXI=","IHNlcnZpw6dvcw==","ID8+Ii8+Cg==","W21ldGhvZA==","KGli","IHJpZGljdWxl","IGFkYXB0YWJsZQ==","ZmlsdHJv","IGtldG9nZW5pYw==","LkltYWdlVHJhbnNwYXJlbnRDb2xvcg==","IENGTw==","IFBFRA==","ICIiKTs=","b2dsb2Jpbg==","W3NpemVvZg==","QnJhbmRvbg==","LlRvU2hvcnQ=","IG5pxbw=","IFRFUk1JTg==","LmdldFN0YXR1c0NvZGU=","IGRlYnRvcg==","IENPTlNUUkFJTlQ=","CXNpZGU=","IERvbWlubw==","0YLQvtC8","IGdsYWNpZXI=","IGdyb3U=","enA=","IENhcmxh","LUZlYg==","UGVs","LnJlYWRWYWx1ZQ==","Y2xpbWF0ZQ==","IHRpbGVTaXpl","LnRyaXA=","RU5URQ==","IGNodWJieQ==","IGltcG9zaXRpb24=","TE9XRVI=","LmJ5SWQ=","Lkxvb2tBbmRGZWVs","YXJpaA==","LmZpbmRCeUlkQW5kVXBkYXRl","IFN0b3JlZA==","IGJvdXJnZW9pc2ll","SFRUUFJlcXVlc3RPcGVyYXRpb24=","IHN1Y2tlcg==","LmRlcXVldWU=","bGlja2Vu","IHN1YnJhbmdl","X01FRElVTQ==","SXNsYW0=","IFNwYXJrcw==","77yaJQ==","aW1wb3J0ZQ==","IGAt","IGpveXM=","Z3JvdXBpZA==","Rmx5aW5n","CWJz","Z3Jvc3M=","IEZpZXN0YQ==","IGNzdA==","IGFmaWNpb24=","b3Bob24=","X0NJ","am4=","QmVhdXR5","IHNjZQ==","IGNyYWNrZXJz","YXBr","IGdvcmQ=","IHByZXRleHQ=","IFtc","IENhbmRpZA==","R29hbHM=","QWN0aW9uVHlwZXM=","LG51bWJlcg==","IHBvcHVsYWNl","IGVudHJlbg==","IEF1dG9m","6Zmi","QmFzZUNvbnRleHQ=","QmFsYW5jZXI=","KEJvcmRlcg==","IG1pbmNlZA==","cmVjYWxs","Y2Jh","IGFwcHJvdmVz","IEtsb3Bw","ZXJtaW50","X2Zyb250ZW5k","ZXNjbw==","IG5pbmV0ZWVu","RHJpdmluZw==","IFhWSQ==","IFRhY3RpY3M=","IHByb2dyYW1hcw==","aWVzZW4=","TW92","ZGlldA==","YXV0w6k=","KCIuIik=","IGdvdmVybm8=","X0FuZA==","L21pdA==","IGNhZmV0ZXJpYQ==","LXRyYWNraW5n","IGNvbW11dGluZw==","LnVua25vd24=","X3R5cGVvZg==","IFNTQQ==","UFJPVE8=","Lk1lcmdl","IGZvckNlbGxSZXVzZUlkZW50aWZpZXI=","IFNhdGlzZmFjdGlvbg==","ICMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIw==","SU1QTElFRA==","IFJlc3RyaWN0ZWQ=","IE1hZ251bQ==","0L3QvtC8","S2Fuc2Fz","YXlsaWdodA==","IFRvd2FyZHM=","IFRvbWU=","IFRlbmRlcg==","X2RlcHQ=","LmNydA==","dHJlY2h0","U1RPTkU=","IGVtcHRpZWQ=","ICcpOwoK","4LiB4Liy4Lij","0Y/RgtGM","bGVjaw==","IFt+LA==","LmV4cGlyZXM=","IFRpZw==","IElyb25pY2FsbHk=","CUxM","Lk5vdE5pbA==","IOWKoA==","IEdvdmVy","IFBlcnNwZWN0aXZlcw==","IERWUg==","IGxva2FsZQ==","IHJlc2VuZA==","IGRvdWJseQ==","IGNvbXVuaWRhZA==","IEFzc2VtYmx5Q29tcGFueQ==","KHR1cm4=","IHN1Ymxpc3Q=","IGVuZG9yc2VtZW50cw==","X1JFR0lTVFJZ","ISIpDQo=","KTs7Cg==","IGdhbnpl","IEhhcm5lc3M=","X21hdGNoZWQ=","5L6h","4oCiCgo=","Q2hlZg==","CUluaXRpYWxpemU=","KTsiPgo=","IEZhcmFnZQ==","cmlzaA==","YWx0ZXQ=","RGVhbGVy","LkxvZ1dhcm5pbmc=","KGFmdGVy","IEdhcnRlbg==","IGV4cGxvZGVz","LkNMQVNT","IHVzZVJvdXRlcg==","LUxh","IHNhZGRlbmVk","YXJvdg==","VG9VcGRhdGU=","IOae","cGlp","JwoKCgo=","IFRSQU5TQUNUSU9O","b25nYQ==","bG9nYW4=","Q3Jvdw==","IGJyaXRpc2g=","IENvbnRlbnRWaWV3","X0JC","b2x2ZW5jeQ==","bG9hZE1vZGVs","VE9PTFM=","aGV0ZW4=","X25o","QUJM","LXZlcnM=","QXJlbmE=","LnNpbmdsZXRvbkxpc3Q=","KHBhdA==","CW5hbWVz","KHNx","IHZhbG9yZQ==","JHJlcQ==","IGFudGhyb3BvbG9neQ==","VGhpbmtpbmc=","IG1pc2NoaWVm","IGFyY2hpdmFs","4KS5","LlNldFRvb2xUaXA=","cHJhcg==","YW5qYQ==","IGZpcnN0bHk=","CWxpZ2h0","LS0s","IFNwZWFycw==","IG9nbA==","c3RlZW4=","aW1wbGVtZW50cw==","cmlzdHM=","K0U=","IEJhbnM=","IGZhc3RiYWxs","IEhlcm1lcw==","dmVsZWQ=","dHdlbnR5","IG5lY2VzaXRh","IE1vcm9jY2Fu","aXNMb2dnZWRJbg==","Q0xPQ0tT","LkFic3RyYWN0aW9ucw==","LlBhY2tldA==","IG1lbmFjaW5n","LXZlc20=","IExpdmluZ3N0b24=","IG9jaQ==","IGV4dHJhZGl0aW9u","ICQoJA==","IExvY2tlcg==","IFJlYmVsbGlvbg==","IG1peGlucw==","Y3RhbA==","L3JmYw==","IFNHRA==","LGlkeA==","IGJsZWlidA==","KFwk","IHBldGVy","IGJhcnJlbg==","IHBob3NwaG9yeQ==","IGdvZ2dsZXM=","LmhvbQ==","QGQ=","PSct","LmlzVXNlcg==","YWthc2g=","X2h1Yg==","aXBlbGluZXM=","IEB9","LnN1cm5hbWU=","SW50ZXJvcA==","IGluRmlsZQ==","IGVzcGVjaWFsbWVudGU=","IGF1dG9ub20=","IFphbWJpYQ==","X0NPVU5UUlk=","PENvdXJzZQ==","aWRlb2dyYXBoaWM=","IENhbWVyb29u","ZmluZEJ5SWQ=","KSIu","IERlcGVuZHM=","cml0b3M=","Lk91cg==","IHN1YnNpZGl6ZWQ=","JywnIis=","IGdsZWFu","IEFzc2VtYmx5Q29weXJpZ2h0","cGljYWJsZQ==","IHVud2l0dGluZw==","IG9tZGF0","IEVhc2U=","IGVtYm9kaWVz","KHBEWA==","IFZvdGVy","QXNzaWduZWQ=","cmV2ZWFs","IGZlbmQ=","KHBhcnNlRmxvYXQ=","IGRwcw==","dHBsaWI=","YXNzZXJ0Q291bnQ=","eG1heA==","VW51c2Vk","KGZi","IHN1Ym1pdHM=","IFJlcGxpY2E=","KGR5","IGJhbmRl","LnNlbWFudGlj","IHNlYXJjaFN0cmluZw==","IFNhbmZvcmQ=","CWZ1bGw=","cHJt","X3V0aWxpdGllcw==","VU5VU0VE","IHNjYW5uZXJz","IGJmZA==","Lk9yZ2FuaXphdGlvbg==","LWN1cg==","UmFpbA==","IHhueHg=","JSk7Cg==","IG92ZXJwb3N0aW5n","VmlldA==","IHRhcGVyZWQ=","IGNhbWVv","IFZpZXdpbmc=","IGRpc21hbnRsZQ==","IGZpc3M=","IFNlbnRyeQ==","aGVhdG1hcA==","IMOhcmVhcw==","IEdyw7w=","IGppZw==","LmNsZWFyUmVjdA==","ZXZlbnRUeXBl","IHR1cmJ1bGVuY2U=","Y2tpbGw=","LkZvY3VzZWQ=","IGludGVybWVkaWFyeQ==","IE9iZXNpdHk=","YXRlZ28=","bW9udG8=","IEFsYW1vZmlyZQ==","IFNoZWlsYQ==","IENPTExFQ1RJT04=","Q2FyZEJvZHk=","IEhhYml0","UExBTg==","LnZpc3VhbGl6YXRpb24=","JSkuCgo=","IEludGVsbGlK","IEdsb3Zlcg==","LnNwYXRpYWw=","IGdyZWV0aW5ncw==","IE9wZW5GaWxlRGlhbG9n","ey8q","IFTDqWzDqQ==","IEVm","ICJbJQ==","IG1hZ2lzdHJhdGU=","IExpdGVjb2lu","IFNlbGU=","IGNvbW1lcmM=","cHJpbnR3","bmV4dEludA==","LmdldENoaWxkQXQ=","IEdldEN1cnJlbnQ=","IGV1cm9ww6k=","IEFJUw==","ZXR0ZW4=","LkV2ZW50UXVldWU=","YW5mb3Jk","dW5ha2Fu","LnNldE91dHB1dA==","IGNtZGxpbmU=","LGdldA==","IEhlYXJk","LmNvbnRlbnRUeXBl","ZW1k","IFJldG9ybmE=","YWNk","IFBsYXlvZmY=","YWNtYW4=","LndlYnNvY2tldA==","Q2xpZW50SWQ=","LmV4YW0=","IGF0dGVudWF0aW9u","LnNldENoYXJhY3Rlcg==","CUNvbGxlY3Rpb24=","5rCX","IHByZWRpY3RvcnM=","IFNoZXJpZGFu","cmltaW5hdG9y","KFN0YWNr","X1BLRw==","PScnKToK","KHBhZA==","IE5vZG8=","IGludGVyb3Blcg==","IFRyYW5zcGFyZW5jeQ==","CWR4","emVt","IHByYXRpcXVl","IGZpYnI=","KCk/Owo=","X01PQklMRQ==","LlJFRw==","X1lFTExPVw==","VGl0YW4=","JykKCgoK","IGNvbXBvbmVudE5hbWU=","IENvb2xlcg==","aXNGdW5jdGlvbg==","LmZlZWRiYWNr","IHBlcmZlY3RlZA==","IHBhZWQ=","LXNjcmlwdHM=","U3VzcA==","PE9wdGlvbg==","IER0","7YS0","J1JF","IE5STA==","IE1hbm55","IHJvZw==","IEdhcnI=","X2Nvb2tpZXM=","U3Bs","IHByb21vdGVycw==","KmR0","XEFQSQ==","IGV2b2tl","X0VudHJ5","IGZpcmVmaWdodGVy","aXZpZGFk","SmFjb2I=","IGxlZ2lvbg==","KHBvbA==","CWZsYXNo","b29rZWVwZXI=","LmNsaXBzVG9Cb3VuZHM=","IGdyYXBoaXRl","J2h0dHA=","X1RSSUFOR0xF","IERyb3BJbmRleA==","LnNtdHA=","IFVOU0lHTkVE","X1BJQ1RVUkU=","X09SSUVOVEFUSU9O","IE9QUA==","Iyc=","w6FmaWNv","Lmhpc3RvZ3JhbQ==","IEJlbm55","Pldl","IHJlcG9zdA==","IGZpYW5jZQ==","IEJvdW50eQ==","c3RyZXNz","RGF0ZXRpbWU=","Okg=","IFNwaGlueA==","Tm9ybWFsbHk=","YXBpeGVs","IHVzZXJBZ2VudA==","IE1vcmk=","L2xhYg==","Lk1PREVM","IEVtb3Rpb25hbA==","U2NhbGVk","ZGV2aWNlSWQ=","IOqzhA==","Y2Vhc2Vk","PElN","Y2VlZGVk","IGxpYnJhcmlhbg==","KW51bGw=","IG1pY3Jvbg==","IEZvdQ==","dWxlbg==","L2xpdmU=","cnNjaGVpbg==","ZmVh","IGhhYmls","IE5hdkxpbms=","bmVjZXNzYXJ5","LmNvZGVz","LW1ha2U=","IHBQYXJlbnQ=","X3JlbGF0aW9ucw==","IHJ1c2hlcw==","IHByb3BlbnNpdHk=","IFNraW5ueQ==","V0VTVA==","X2NvcnB1cw==","KHJlb3JkZXJlZA==","ZmRi","IEdldE1lc3NhZ2U=","QnJ1bg==","LnZz","IHDFgg==","IGNydW5jaHk=","Qm9vbQ==","UEo=","SmFrZQ==","57qm","JGNsaWVudA==","IH1dKQo=","IGNvbnZlcnNl","IEdSQVQ=","IENSUw==","Lkxvdw==","KHZhbGlkYXRl","X0NMSUNLRUQ=","LmJsdWV0b290aA==","CXh0eXBl","IGNsb3NlTW9kYWw=","X2ludGVudA==","IHByb2dub3Npcw==","c2F2","Q3Rs","IGNob29zZXI=","IFN1ZG9rdQ==","PVVzZXI=","LmNsZg==","CWV4cGxpY2l0","IHBvdGVudGlhbHM=","IEdlb3JnZXM=","IGVsaWM=","IHRzbGli","IFJhZ25hcg==","X3JlcHJlc2VudGF0aW9u","LWxlZ2dlZA==","aGFtc3Rlcg==","IEZpcmVzdG9yZQ==","Y29udmVydFZpZXc=","Q29tYmluZWQ=","INC00LXQuw==","IGVzcGVjdA==","IOOCkg==","IFN0YW1pbmE=","bG9va3M=","RU5BUklP","L2ZpeHR1cmVz","LnNtcw==","IHNlbWljbGFzcw==","IHNlbWljbGFzc2ljYWw=","LlBlZWs=","XSQ=","X0RTUA==","X0xWTA==","VklSVFVBTA==","IENhcGl0YWxz","IFNDVA==","LldoaWxl","IFN1YnN0YW5jZQ==","LWRvbmU=","IGVuc2xhdmVk","Y2xhc3NpZnk=","ZW50YW55bA==","IFZlZ2V0YWJsZQ==","X0RFUEVORA==","RGFuaQ==","IHF1aWVyZXM=","IGFiYmlhbW8=","IExpYmVy","YWZj","6YCf","cHJlZGljdGVk","LlBORw==","IFdoaXA=","Ly89PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PT09PQ==","IOKJoA==","IOWM","REVN","Q0NB","L2Nsb3Nl","IC8vLzwv","IG1lc21h","IEJlaXJ1dA==","IEluaXRpYWxpemluZw==","4buZdA==","TU9OVEg=","IO2bhA==","UGFya2luZw==","Q29tZm9ydA==","IEVuZ2luZXM=","d2VycA==","QFJlcXVlc3RQYXJhbQ==","LUtleQ==","IGJhY2tsaWdodA==","cGFzc2Vz","Lm51bWJlck9mTGluZXM=","L0xpbnV4","KEhUVFA=","IEh0dHBVUkxDb25uZWN0aW9u","b3Nvcw==","Lnh4","IGZpbG1wamVz","ID09PT4=","b3B0aW1pemU=","Q2Fub24=","IC4uLiIK","ICciJzsK","IGPDqWxpYg==","IHByaW5jaXBhbG1lbnRl","IFByb3BlcnR5VmFsdWU=","T1VOQ0U=","IGV4Y3Vyc2lvbg==","IEFjY2Vzc1Rva2Vu","cmVxdWV0ZQ==","Vm9sdGFnZQ==","ZXhwbGFpbg==","fSkoKTsKCg==","VVJMT1BU","IGZ1bmdhbA==","R3JlZWs=","LWJsaW5k","IGZldWRhbA==","IFNvbmF0YQ==","IERpYWdub3Npcw==","JHhtbA==","ZWRpdGFyeQ==","IHN0aW11bGF0ZXM=","UG9udA==","Lkhhc1ByZWZpeA==","Ym9hdHM=","IFNjYXR0ZXI=","IEdFTkVSSUM=","IGZpc2hlcw==","PWxlbmd0aA==","IG1lbGhvcmVz","c3BlbnQ=","w7Rt","IEluZ3JhbQ==","Pi4KCg==","cGFyaXR5","LlZpZGVvQ2FwdHVyZQ==","IFR1YmVz","IGNvbWVkaWM=","IHByb2Nlc3NEYXRh","QURC","KG5ld1N0YXRl","5YGc","IFdlYnNlaXRl","X09mZg==","LGJvZHk=","IHN1YmNvbnRyYWN0","IGNodXRl","IGNhcnRlc2lhbg==","dGhyZXNo","LkNhcnQ=","IG1ldG9k","Y3VzdG9taXpl","THRk","CXNvdW5k","V2ViU2VydmljZQ==","IEhpbmRlcmVk","W3Jlcw==","KFRpbGU=","Y2FwYWJpbGl0aWVz","X09WRVJGTE9X","INGB0YHRi9C7","IENvY2g=","IHRlc3ROYW1l","V09SRFM=","XE1vZHVsZXM=","P3VybA==","X2NvbnRpbnVvdXM=","IFFJY29u","IHN0YXJlcw==","IGVqZWN0ZWQ=","IEludmFzaW9u","ZmluYWxpemU=","IGdldg==","PGc=","IEVkaXRvckdVSQ==","QmVybGlu","LmxpbmVFZGl0","LXJlZ2V4cA==","IHNsZWQ=","IEVBQ0g=","dWNv","IHNlZWRpbmc=","IGxvY2FsaXpl","ZXR1","X2FsbW9zdA==","cGFuc2U=","IFNlbnNvcnM=","X1NJ","KnNw","IFByb3BlcnR5SW5mbw==","IGFwcm94aW0=","IGRhdGFHcmlkVmlld1RleHRCb3hDb2x1bW4=","16A=","IGRpZmVyZW5jaWE=","TE9PSw==","IG9tbmlw","IFR1cmluZw==","IHVuaWRhZGVz","77yfCg==","LlJvd0hlYWRlcnM=","X0FDVElPTlM=","IERhbHk=","IGZvcnRpZmllZA==","IFdhZ2U=","LnNpbXBz","KGlzc3Vl","IGxlcHQ=","T3duZXJJZA==","J29yZGVy","5Y+N","56Wo","IHJld3JpdGluZw==","Lkl0YWxpYw==","IEZvcmdvdHRlbg==","KElM","IE5vU3VjaEVsZW1lbnRFeGNlcHRpb24=","ZXdu","IHBvcHVsb3Vz","IFNoZWQ=","IyR7","IEFsbw==","RGV2aWNlSW5mbw==","KElOVk9LRQ==","IHBlbmE=","IEJCQg==","LmJi","IHRvcnM=","IGNvbmR1Y2l2ZQ==","LXB1cnBsZQ==","IHNxdWFyZWx5","Ly8tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0KCg==","0LrRgNGL","ZmFzdGE=","IGNwdA==","IEluZ2Vu","IHs/fQ==","0YPQsw==","UGVybA==","LnNreQ==","LWF1dG9tYXRpYw==","aW1wbGVtZW50","b3JubWVudA==","LklNQUdF","LVNwZWVk","CUZpZWxk","IHBvdW5kZWQ=","IExa","IGF1dG9Gb2N1cw==","IOC5gA==","LkNvbXBhbmlvbg==","IFZpbQ==","dW5jaWE=","X3NrYg==","IHVubWFycmllZA==","IFNvdXI=","Z2FhcmQ=","TGVvZA==","IOCq","LkNsb3Vk","IHJlaW5mb3JjZXM=","J10+","IGZlbGl6","IFVBVg==","cmFuY2Vz","5Y2B","VG9MaXN0QXN5bmM=","LkV4ZWN1dG9y","LXRz","ICcuJzsK","IEtpbmVjdA==","44GE44GG","IGJldm9y","IEV4dHJhY3Rpb24=","X2RyYXdlcg==","JHN1Yg==","IHVwbGlmdGluZw==","LmJ0bkV4aXQ=","KCcvLypbQA==","UkVESVM=","c3RkZXhjZXB0","ZGVv","IGdpdmVy","X2JpbmRpbmdz","VG9EZXZpY2U=","Lm1p","IEVzdGltYXRlcw==","YWxsZWxl","Pz8/Cgo=","IFN0cmVhbXM=","IGFmZmxpY3Q=","LnNhcA==","IHF1YWxp","IEdhdWw=","U3BlY2lmaWVz","IHpr","IHNhbml0YXJ5","IG5ld0luZGV4","c3BlY3M=","IGZyYWdtZW50TWFuYWdlcg==","IE5lY2Vzc2FyeQ==","CVNwcmluZw==","PX4=","IE9NQVA=","Y2FyZWVy","KCItIik7Cg==","IERhcmxpbmc=","aXRhZw==","OnBr","IFN0ZWxsYXI=","IGluZmVydGlsaXR5","bGV4aWJsZQ==","VW5hcnk=","IDpdLA==","Lk5FVw==","Z3N1Yg==","X1VGdW5jdGlvbg==","LnNsaWRlcw==","IGRpdmVyc29z","X2xvY2Fscw==","XFwv","IHBjYXA=","IE9vaw==","LkRhdGFHcmlkVmlld0NvbnRlbnRBbGlnbm1lbnQ=","ZXJzb25pYw==","IHRyZWJ1aWU=","IHNlcXVlbnRpYWxseQ==","YWJhcg==","IElQQ0M=","IGRldm91dA==","XEhlbHBlcnM=","RVR3ZWV0","IHRyYWJhamFy","IFdpbGtpbnNvbg==","IGRhw58=","SHVtYW5z","VGVhY2hlcnM=","IERhdGFWaWV3","IFlvZw==","IGplZGU=","IGFtYmlhbmNl","dHJhbmQ=","IGVycmF0aWM=","IHThu6s=","LnJhYmJpdA==","IG5ld2JpZQ==","IGVudHJhbmNlcw==","IG9ydGhvZ29uYWw=","IERJU1BBVENI","IFNjaHJv","X1RVUk4=","Omludm9rZQ==","IHRhbnRhbA==","IFpvbmVz","c3RhdGVtZW50cw==","TGltaXRz","IEfDpA==","aWHFgmE=","LnByZWRpY2F0ZQ==","LkZS","IENocmlzdG9waA==","LkNvbnM=","IEhvcnRvbg==","X0N1c3RvbWVy","CU1E","IGVsa2Fhcg==","IE1TRQ==","IElzQWN0aXZl","XSop","XFVuaXQ=","IGVv","Rm9yT2JqZWN0","ZWxpYWM=","LWRldmVsb3BtZW50","IHRlYWw=","IHN0aXRjaGVk","IE91dGNvbWU=","b25jw6k=","ZW1iZWRkaW5n","IG9uTmV4dA==","IO2VtOuLuQ==","KGV4aXN0aW5n","LmJpZA==","CWFzc2VydEZhbHNl","e2w=","TEVycm9y","X2J1bGxldA==","KEh0bWw=","IGVCb29rcw==","cGVyUGFnZQ==","L3F1ZXN0aW9u","LmZha2U=","Lm1i","X2RsbA==","IGN1bXNob3Q=","IE1hZGFnYXNjYXI=","SE9MREVS","IHBlc3F1aXNh","X0RFQ0xT","XSxbLQ==","IEFsYmFuaWE=","LXRvYXN0","IHByb3RhZ29uaXN0cw==","IG15b2NhcmQ=","IHdhbGtlcnM=","ID09PT09PT0=","L1BhZ2U=","PTw/PQ==","IGVucXVhbnRv","X1RSVU5D","IHNlcHRlbWJyZQ==","IGxheW91dFBhcmFtcw==","ICcuLi8uLi8uLi8uLi8uLi8=","IFRyYWZmb3Jk","IHBhbGF2cmE=","IHJ1bmRvd24=","IGJyaXR0bGU=","w6RjaGU=","LllFTExPVw==","IENlcmVtb255","IG5ld1RleHQ=","dmVjcw==","IGVzc2Vu","IE1ldG9kbw==","IEdVSURF","IHBvc3Rwb25l","IFZTdGFjaw==","WyIk","IE1pY3Jvc3lzdGVtcw==","XFBhZ2U=","cG1hdA==","X0ZBVUxU","X21C","U3RhdGVNYWNoaW5l","RmFjdWx0eQ==","Lnd4","IE1vemFydA==","YW5pbWU=","IHB5dA==","IEJ1a2tpdA==","LUlORlJJTkdFTUVOVA==","IHNlYXJjaGVy","LWJhc2tldA==","IG9tYXM=","IFR1bmlz","IFBsYXR0","IHsNCg0KDQo=","eWFo","dG9sdWE=","SW50cm9kdWNlZA==","c3VwcGx5","IG1pc29neW4=","IFdhaXN0","IEVI","LW9wZXJhdG9y","IGRhcmtlbg==","IENvc21pYw==","IGdsYWNpZXJz","IA0NCg==","XVtf","Q29tcGFueUlk","IFJlY29uc3RydWN0aW9u","aXp6bGllcw==","IGzDrWRlcg==","IGNvbGxlZ2lhdGU=","IFBldHR5","T1VSTkFM","ZGVjb3JhdG9ycw==","cmFtcw==","KCgK","IEFzdHJvbm9teQ==","IHJpbw==","IEN5cmls","anVhbg==","IHJlaW5j","IFBpc3RvbnM=","IEJ1c3k=","cHRyb24=","IHBvbW9j","CVJUQ0s=","QnV5aW5n","Ly8qKgo=","IFdyYXBwZWQ=","IE1lZXI=","IGltYXA=","IGJlc3RpbW0=","IEFnaWxpdHk=","LlRvVGFibGU=","c3RpbmVuY2U=","XSkqKg==","IEF1dG9tYXRlZA==","ZHNw","IEdhcmxpYw==","aW9kZQ==","ZXhlbHM=","aW50cm9z","IGJlc3Rvd2Vk","KHZpc2libGU=","IGh5ZHJhdGVk","bm94aW91cw==","IEF1dGhlbnRpY2F0aW9uU2VydmljZQ==","IHNob3dNb2RhbA==","IGNvbXBvc2Vycw==","R0VORVJBTA==","Q1RT","IFNocg==","Y3JlYXQ=","IGNsb3NldHM=","IGdyb3VuZGluZw==","IENPTU1FTlRT","ICsj","IGdyb3VuZHdvcms=","KGluZGV4UGF0aA==","Z3JhdGlz","dXBwaWVz","IGt2bQ==","IGN1YWxlcw==","LkRlZXBFcXVhbA==","IGFsbG95cw==","LWJ1ZGdldA==","KF9fXw==","IGNvbmVjdGFy","LXJhZA==","IGl0Y2g=","bGFtcA==","LmdycA==","LWFkZG9ucw==","IHNlYWJvcm4=","IG5lZ2xpZ2VudA==","X0RldGFpbA==","IHNlcmVuZQ==","IGJhcnJhY2tz","IGJx","IFNlY3Q=","KGRhdG9z","IHRoZW1hdGlj","IHBvbGx1dGVk","CWFuaW1hdGlvbg==","SHVnaA==","RXhlY3V0YWJsZQ==","KCcvJylb","IGFwb3B0b3Npcw==","IGFiYnJldmlhdGVk","Zm9vbg==","UmFua2Vk","CWhpdA==","CQkgICAgICAgICAgICAgICAgICAgICAgIA==","Q29udGludW91cw==","IG1vdmVUbw==","REJPYmplY3Q=","IGNvbmNlaXZhYmxl","IEd3ZW4=","IMOhbGw=","X18oKQ==","IExhbmE=","IGVpbnplbA==","IHJlY291bnRz","eXN0ZW1z","b3dhbnk=","KTo/Pgo=","IEFrcm9u","b2xpbmk=","Q29ycA==","YXBocmFn","ICInLg==","IGNvbnZlbmVk","IC4uLi4KCg==","IGNhbGxlZQ==","IENsb3Zlcg==","LmRlc2NyaXB0b3I=","Lkl0ZW1TdGFjaw==","IHBlcnZlcnNl","X0NF","PUAi","LS0tDQo=","IGJldg==","c3VtYQ==","YWNjdW11bGF0b3I=","IGxpemFyZA==","INC+0Yc=","Z2V0RGVzY3JpcHRpb24=","IFNhcmFz","Lm5leHRTaWJsaW5n","IGVsYXN0aWNpdHk=","IGNoYWM=","bW92ZWQ=","X1RvcA==","dHJlcg==","KGRvd24=","ZWxlbXM=","b2JpbGk=","LnBvc3RNZXNzYWdl","ICjiiA==","Q3N2","IFlvc2VtaXRl","c3dlZXQ=","TUFUUklY","aWdyYXRlZA==","IGZvcmdpbmc=","IFBhZ2VTaXpl","dHJhbnNmb3Jtcw==","PVlFUw==","IGRpc2Nsb3Npbmc=","IFBlZGlhdHJpYw==","IERlYWRseQ==","UmVzb3VyY2VJZA==","LWJpbmFyeQ==","IFJvd2U=","IENhaXI=","X2V4dHJhY3Rpb24=","RGVjcmU=","IE9ic3Q=","cGxy","IFBoeXNpb2xvZ3k=","bXZj","aHRp","LlRl","IGV4dHJhdmFnYW50","IEFudGli","w7NzdA==","b3V0ZGly","IGNhcm5l","Vmlld1BhZ2Vy","IGltcGxhbnRlZA==","U2VhcmNoUGFyYW1z","w7xyZ2Vy","Y29uZGU=","YWNlbnRl","X0NVREE=","JHZhbA==","IldoaWxl","IHRlbXBMaXN0","IHN5bmFnb2d1ZQ==","Y21j","INGA0LDQsdC+0YLRiw==","IHNlem5hbQ==","IHNlc3N1YWxp","IGNhYmV6YQ==","ZXTDoA==","IGZhw6c=","Z2Vo","Y2VkZQ==","IlNvbWU=","Om9u","LWZvcm1lZA==","YnluYW1l","IOuwmO2ZmA==","IG5hw68=","IEFVRw==","IGVhc2Vk","XSl7","KHB0aHJlYWQ=","IGplZGVt","KGZpeHR1cmU=","IFBhcmw=","XX0pOwo=","IGV4cHVsc2lvbg==","IEluZXRBZGRyZXNz","IE1MUA==","LicpOw==","IG9ybw==","IFNldmlsbGE=","IGZvcm11bGFpcmU=","LXRlcnJvcmlzbQ==","L1dlYkFQSQ==","KmFuZ3N0cm9t","Y3Jhd2w=","X2xvYW4=","X0RJR0VTVA==","IEtub3h2aWxsZQ==","LmdjYQ==","IERpeQ==","bnRhZw==","YWJsZVZpZXdDb250cm9sbGVy","LkZlZWQ=","LXNoYXJlZA==","IGNvY2Np","X2ludml0ZQ==","IEJ1Y2tpbmdoYW0=","IEdsdXRlbg==","IGVuZGVtaWM=","UmFpc2Vk","IHF1ZXJ5SW50ZXJmYWNl","IG1hcnRpbg==","QuG6oW4=","IGhhcmU=","IGRlaW4=","cmFyaWFu","bXlmaWxl","IGFuZ3Vpc2g=","VGV4dG8=","IEJVRkY=","KGxu","bWFycw==","X3N1YnRpdGxl","X2dpZnQ=","IGJvbGRseQ==","IFNpbmd1bGFy","KExvZ0xldmVs","PEFydGljbGU=","L3N0YXRz","INC/0L7Qsg==","IGl0ZW5z","IGRlbm9taW5hdGlvbg==","LkRhdGFHcmlkVmlld1RyaVN0YXRl","X0xS","IER1Y2hlc3M=","CUJsb2Nr","dHJhY2Vy","LUNO","XEFwcERhdGE=","Lmxpc3Rz","KFJvdXRl","IEdPT0RNQU4=","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgCg==","IHRpbmhh","IGV2ZXJsYXN0aW5n","YURhdGE=","KGNvbXBhcmU=","IHJwdA==","XFBocA==","LkZJTEVT","IHNwYXJpbmc=","U2Nhcg==","INin2YTYqg==","IEJldGhsZWhlbQ==","IGJhY2twYWdl","c3BsaWNl","ZsO2cg==","QGR5bmFtaWM=","4bupYw==","7KY=","LnBhZ2luZw==","IEJlbG1vbnQ=","LkVYUA==","IGludGVybGU=","IENoZWNrbGlzdA==","IFVuaWNvcm4=","QkVTVA==","Z2V0UGxheWVy","LmFyZ3NvcnQ=","IHdpdGhTdHJpbmc=","IE1vZGVyYXRl","fSI+Cg==","LnNldEltYWdlQml0bWFw","IHRyZW5jaGVz","IGdlbmVyYXI=","IGZlcm1lbnRlZA==","IGRlanRpbmc=","Q3RybHM=","IGRpc2FncmVlcw==","UXVpZXQ=","KFNRTEV4Y2VwdGlvbg==","IFRlbnNvckZsb3c=","T05B","UG9ydGxhbmQ=","LlB0cg==","bGx4","YXN0b24=","Q2x1c3RlcnM=","IFVzdWFyaW9z","IGtoaQ==","IGdpYQ==","IERvbHBoaW4=","xZFz","IGx1ZGVy","IGRpc3Bvc2l0aXZv","IFZ5","b21wc29u","IO2VoA==","IGtjYWw=","IENhbGNpdW0=","U2VjdGlvbnNJbg==","IENhc2M=","IGdyYXR1aXRp","b3NvbWFs","IHVuZGVyY3V0","IENhaA==","OnBhcmFtcw==","IHJldHVyblVybA==","IEVyZQ==","w6lyYw==","IGludGw=","fS8jew==","IG91dHB1dFBhdGg=","IGZhbHNlaG9vZA==","IFVzZXJSb2xl","PEhhc2hNYXA=","IENyZWF0ZVVzZXI=","IENvd2JveQ==","CVVzZQ==","XSgK","IFNob3BpZnk=","Vmlld1N0YXRl","QWR2YW5jZQ==","LXRhbms=","IlQ=","IEplbnM=","PW9wdGlvbnM=","KCIuLg==","Lm1pbWU=","IENSVA==","IGjDpHR0ZQ==","KHNv","LlVOS05PV04=","IGRhcsO8YmVy","IENPVkVS","R2Vt","Q3Jv","X1JFQ1Y=","X2hpZXJhcmNoeQ==","Q2hvb3Npbmc=","SkVYRUM=","IGRvcnNhbA==","KyI8","IE5leQ==","V29tYW4=","QmV6aWVy","IHJpZ3M=","IG9udHZhbmc=","77yM5YiZ","IEdhdXQ=","Y21i","TmhhcA==","IG1vbm9j","IGVuZXJnaWE=","b2JzZXJ2ZU9u","c3Rha2Vz","LSot","IE5hY2s=","fX0iCg==","ZXJ2YXM=","IEhpbmRlcmVkUm90b3I=","QWRqYWNlbnQ=","IEludGVybmFjaW9uYWw=","CWFyZWE=","IPCflA==","IHNwYXJrbGU=","KCkuXw==","LmlkZWE=","IHV0cmVjaHQ=","IG1hcHBlZEJ5","IENvbG8=","CVRS","UG9zdGVy","IGNvbWJhdGluZw==","IFllbGxvd3N0b25l","aWVycmV6","YWNjdA==","IHPDoWNo","Lk5ld3M=","IGZpZWxkVmFsdWU=","IGNheg==","IEZyZWVt","CQkKCQo=","IHVzdXI=","IHNvbGE=","IGN1bWJlcnNvbWU=","IGNhdGFwdWx0","Ii4v","IEV4ZWN1dG9ycw==","IEFtZXM=","ICc8JT0=","ZmlsbG5h","LOKAlA==","OlNldFRleHQ=","LWNhdGVnb3JpZXM=","LWFyY2hpdmU=","IFBvbGx1dGlvbg==","Lk9m","4oCcQXQ=","X0NIQVJTRVQ=","KENvbHVtbg==","4oCZKQ==","IHVubWlzdGFr","IGVhcm0=","IFBsYXRmb3Jtcw==","IE1vbWVudHVt","VmVjdG9yaXplcg==","cmF3ZXI=","KHBhc3Nwb3J0","KHBsYW5l","IHJlcHJlc2VudGE=","IHB1YmtleQ==","IEphaW4=","IG1lbm5lcw==","IGluc3RhbnRhbmVvdXM=","IGV0aGVycw==","IG5lc3Rz","IFBhdHRvbg==","IEhBQ0s=","cGFja2luZw==","SVNlcnZpY2U=","IHJvY2tlcg==","IGZpY2E=","IEdsYWRpYXRvcg==","IFVQQw==","IExvd2VsbA==","YmVhcmVy","IHZpcGVy","X2dsb2I=","IG1hc2hlZA==","IGhhaXJzdHlsZQ==","IHVuZGVybWluZXM=","cmVzdGF1cmFudHM=","IHJlYWN0aW9uYXJ5","IGJpbGxpZw==","fSIpOw0K","IHZpc3Rhcw==","IG9wZW5kaXI=","CWxhYmVscw==","YWxsaXM=","IFdvbGZm","IENQQw==","IHJhaWx3YXlz","IFZhdWdoYW4=","IEFza2luZw==","Y2Fp","IEdu","X1BST0Y=","LVNlcA==","LmN1cnZl","TXVsdGlwbHk=","0YDQsNC90LjRhg==","IG1lZXR1cA==","Z2V0RGI=","KEdVSQ==","IHJlaW1idXJzZQ==","OnJlc3VsdA==","VHVtYmxy","LkNsb3NlZA==","IGNvbmZvcm1z","IEhvaw==","aWVkYWRl","TmV3TGFiZWw=","IG5hdkN0cmw=","RG9jdG9ycw==","IOyViA==","IGJvdXRz","IGlzYw==","Lyc7Cgo=","dWhs","LlVp","LXNhbWE=","IENhbm9uaWNhbA==","IG1ldGljdWxvdXM=","IGdyb3Rlcw==","IC8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8=","ZXRlcw==","IGxhbmd1ZQ==","IGZDaGFpbg==","IFR5cGVmYWNl","IEJyaWdoYW0=","aWFyZQ==","J8OpdGFpdA==","IEVGRg==","IGRlc3Ryb3llcg==","X21hdHJpY2Vz","TsO6bWVybw==","Y2FsbGFibGU=","X3BlcmlvZHM=","c3RydWs=","bWFq","LnJs","LmxpZnQ=","2YrZhA==","w5A=","UmV0VmFs","RGVudmVy","IFRyaWJ1dGU=","a2l5ZQ==","emV3","IFNwYXJl","IGxldWtlbWlh","IHdhaXRyZXNz","IHBsdXTDtHQ=","QWxpYXNlcw==","IExvY2F0ZQ==","5rY=","SWRlbnRpZmljYXRpb24=","LnRlbA==","LWRheXM=","dGVycml0","aW1idXM=","IEJ1dHRlcktuaWZl","64K0","cnVwdGN5","IEdyYWRlcw==","IHVuZGVyc2lkZQ==","IGhhcmRzaGlwcw==","dW5laQ==","LWNvbnRhaW5lZA==","IFsnLg==","T2Jzb2xldGU=","LlJldHJvZml0","IHVyYW51cw==","X3JnYmE=","IHJhcGVz","IEthcmU=","W+KApl0=","IEZpbmNo","LmJ1bmlmdUZsYXRCdXR0b24=","cXVpc2Fy","IE51cnNlcw==","ZWdhZGU=","IGhu","RXhjbHVkZQ==","IHN0b2NoYXN0aWM=","IHNvdHRv","IFBlbmFsdHk=","IHNvbnN0","IHJvc2E=","X0ZpbmQ=","IEludmFsaWRhdGU=","TGlzdEl0ZW1JY29u","JywNDQo=","X3BkdQ==","IE1lYWxz","YWrEhWM=","IE9vcHM=","IE5vdGljZXM=","IGRlcml2YXRpb24=","W10NCg==","6Lqr","eXN0ZXJ5","X2ZpdmU=","RWFybg==","PWV2ZW50","IG9ncg==","LVJFQUw=","IExpcHM=","c2VsZWN0b3Jz","YWRpZXI=","IHNldEJhY2tncm91bmRJbWFnZQ==","KHRoaW5n","IHNvZnRiYWxs","XHhhYQ==","KGlkZW50","IEp1cnk=","IFZveWFnZQ==","IFRBcnJheQ==","KFBhaW50","V2FybQ==","RVhURVJOQUw=","YXN1","ICghKCg=","LkZFVENI","IHNraXJt","T1JFRA==","Y2FuY2VsbGVk","aXR0ZWw=","IHNlZWR1","bGljaGVz","b2hv","LHJldGFpbg==","KFdlYkRyaXZlcg==","aXB0YWJsZXM=","RVJJQ0E=","IGNsZWFubGluZXNz","ZWxsb3dvcmxk","IGNvaGVzaW9u","Z2lzdA==","XS4n","ZXJnaW5n","IGlzcA==","Lm9mZnNldFRvcA==","KGZhY3Rvcg==","dW5pdmVyc2Fs","IFBsYXliYWNr","IEJ5dGVTdHJpbmc=","IGRhbW5pbmc=","IFNTUg==","YWN1cw==","IFN0YXRlbg==","IOWVhuWTgQ==","IFBlZQ==","IFNhbXBsaW5n","YXRvcmlh","c3RhcnRJbmRleA==","5ZCr","IOy0iOq4sA==","IE9saXZlaXJh","IEZsYWtl","Ym9vbQ==","X01TSw==","IEZhY2luZw==","b3JnaGluaQ==","Zm9vZHM=","VHJlZVdpZGdldEl0ZW0=","IEhBTEY=","IiIiKQo=","IENIQVBURVI=","IEV2ZWx5bg==","Pis=","IEhvcm5ldHM=","d29rZQ==","IC9b","YXRob2xpYw==","LnNlZ21lbnRz","Lm5hdmlnYXRlQnlVcmw=","IE1hbnVz","IHBlcHRpZGVz","IGZsZWV0aW5n","IEFUVg==","IFNoaWI=","SW50QXJyYXk=","IG1veg==","cHJvYmxlbXM=","b2duZQ==","Lk90aGVy","QWRtaW5pc3RyYXRpb24=","JSUqLw==","Il09PQ==","IEFuZHJlcw==","QWRh","aGludHM=","XCIiOwo=","KHBuZw==","IOqwgOuKpQ==","44OK","cmVqZWN0ZWQ=","IG1vdmVycw==","546H","IHBhcmVudGhlc2lz","KGFzc2lnbnM=","RWxpdGU=","UmVtaW5kZXI=","IHN1ZmZlcmVycw==","IFJlc291cmNlQnVuZGxl","dGhhZw==","PicNCg==","YW50aW5v","UGVyaXBo","IFNoYXJk","Q2hhcnREYXRh","KGpq","IG9zdGF0","aHVnZQ==","LWF1dGhvcmVk","LmNp","IHB5bXlzcWw=","IGxpbmVycw==","IEFUUw==","Pkxhc3Q=","KSIpCgo=","IGdldHBpZA==","R2V0U2l6ZQ==","IGV4dG9ydGlvbg==","W2Zsb2F0","IEVJTkE=","L0Jhc2U=","LnNldE9uQWN0aW9u","0L7Qu9GP","IEdsYWNpZXI=","X2F6","IHRyYW5zcG9ydGU=","IFNtcw==","dGh1bWJz","IHRyZWFzdXJlcg==","IG16","aXN0aWs=","UkVESUVOVA==","IGlzaQ==","X3N0dWZm","UE9TSVRPUlk=","c3RhcnRkYXRl","IFppbmM=","5rG9","IGthaw==","IGVyZmFocmVu","X0NPTUJP","IHVjd29yZHM=","LlBheQ==","IGtpbmdkb21z","IGV4Y2VsZW50ZQ==","aWduaXRl","X3ZhcmlhdGlvbg==","IG5hdmVnYWRvcg==","5LiT","dmlld0NvbnRyb2xsZXI=","cmlyZQ==","SG9uZXN0bHk=","Q2FzY2FkZQ==","ZXRyYWlu","QXJnZW50aW5h","Y3E=","IE1hcmlhbg==","L2Fy","IGludGVyZXNzZQ==","dXJhaGFu","KFBD","IGZyaXZvbA==","IFRydXN0ZWQ=","KElDb25maWd1cmF0aW9u","IFJpaGFubmE=","ZW5kb3ph","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg","IHByb2NsYW1hdGlvbg==","IHByZWRvbWluYW50","IGNvbnN0cw==","LW5lY2s=","V29sZg==","LmNoZWNrYm94","IHN0YW56YQ==","IGVudGVuZGVy","Ly8o","SGFuZHM=","IGJpbGxlZGVy","IFRvc2hpYmE=","YWJiaXg=","RU5DSUVT","IGppbQ==","UFVS","Lmxlc3Nvbg==","IGJlcnRo","bGFyxLFu","Qmxv","CWV4dA==","ZWVs","IGRlbWFzaQ==","IGNvbG9uaXphdGlvbg==","L2Rpc2M=","77yP","Q2VydGFpbmx5","566h55CG5ZGY","IGpvZ2Fkb3I=","dcOp","Q29sdW1uc01vZGU=","IEpW","IEluc3RpdHV0","X3NwZWN0cnVt","LmRlbnNl","IFNob3J0Y3V0","IHNlYnVhaA==","IGZsYXNoeQ==","UmVnYXJkcw==","IHNoYXJwZXI=","Y2FuY2VsbGF0aW9uVG9rZW4=","X2RldGFsbGU=","IFNjYXJsZXR0","INC80LDRgg==","IG5lZ29jaW8=","4LiW","IEpX","d2ViZHJpdmVy","LndhbGw=","IHhhbWFyaW4=","b3BhcXVl","LkFkZFBhcmFtZXRlcg==","KENvbnRyb2xsZXI=","LWFib3J0aW9u","X0ZVTkNUSU9OUw==","Q3VzdG9tZXJJZA==","IHZlbmly","IEJ1c3Rlcg==","X3ByZWRpY3RlZA==","L3J1bGVz","LU1ldGhvZHM=","IGdkemll","Il0nKTsK","IFB4","Q09OUw==","LlNsaWNl","IHJldmFtcGVk","IFRhYmxlVmlldw==","IGRpY2tz","IO2YuOy2nA==","IEF1eGlsaWFyeQ==","T3BlcmE=","L3Jj","IHVudGhpbmthYmxl","IGRlZHVjdGVk","bHo=","IExhZ2U=","IFJvd2xpbmc=","cHJvdmVk","T2ZmZXJz","LHNldA==","UkdCTw==","IEZV","IENlbnRPUw==","b3pv","IFRyb2phbg==","IG1hw7FhbmE=","IC8vPQ==","Kio6","IHtcCg==","IEJvd2Vu","S25vd2luZw==","IOW6","PS09LT0tPS09LT0tPS09LQ==","IGViZW5mYWxscw==","XT17Cg==","Qk1J","KCk7KQ==","KHBlcm1pc3Npb24=","QW5kZXJzb24=","IGRlZ3JhZGU=","U29hcA==","dcWf","IFB1cHB5","IEV0aGlvcGlhbg==","IFRFU1RJTkc=","ZW5zZXg=","IGRyZXNzZXI=","IENob3Jl","VW5oYW5kbGVk","QXNzb2NpYXRl","LmFkZGl0aW9uYWw=","IGRpZmbDqXJlbnRlcw==","aXNxdWU=","IG5lY2Vzc8Ohcmlv","IGdlbmVyaWNz","KHBm","IFxg","IE5lYXJieQ==","YXBvcmF0aW9u","IFRoZW1lRGF0YQ==","V2lGaQ==","LlJlYWw=","YWN5ag==","TGl2","IHBzeWNob2xvZ2ljYWxseQ==","bWV0aG9kUG9pbnRlclR5cGU=","IE5pa29s","IERlZGljYXRlZA==","X1BPUlRT","IEphZQ==","TlNBdHRyaWJ1dGVkU3RyaW5n","IGFtYmFzc2Fkb3Jz","IEhhbmRsZXJz","IEFuYXQ=","IHZvY2FsaXN0","IHJhcg==","IGRldnVlbHZl","Lmdz","IHhjYg==","IHN1Ym1vZHVsZQ==","IEFTU0lHTg==","dXJlZW4=","IGNsYXNlcw==","ZW1vdGg=","X0NOVEw=","X2p3dA==","IOuniA==","IG91dHBvc3Q=","IEluYm94","CWZsZXg=","IEdyb2Nlcnk=","SUxJTkU=","Lm1vYg==","IENvbnN0cg==","XT1d","KHdhbGxldA==","IHNlZGU=","ZmFs","IGltcGFzcw==","PXtbJw==","IHVuZm9yZQ==","ZnVzZQ==","X0xlYW4=","IGF2YWxhbmNoZQ==","PXJhbmQ=","IGFkdWx0ZXJ5","IEdlZQ==","CUlucHV0U3RyZWFt","IGNhYmVs","X01PVU5U","IG5vdGljaWFz","IFJhdW0=","IGJ5dGVhcnJheQ==","IG9uSGlkZQ==","ICkuCg==","JGluc3RhbmNl","IGRpZFNlbGVjdFJvd0F0SW5kZXhQYXRo","YWNhbQ==","LWNvbGxlY3Rpb24=","IHVwaGU=","UG90ZW50aWFs","IFNEUw==","X2FwcHJvdmFs","RGFtbg==","OmNvbnZlcnQ=","IE1vZGlmaWNhdGlvbnM=","IOyYiA==","IHVuYWI=","IHNjcm9sbGVk","KyIpOwo=","IGdhdWNoZQ==","IEhPTA==","YW50YW5hbW8=","IGNvbHVtbkhlYWRlcg==","CVpFUEhJUg==","emFj","IG91dGluZ3M=","IGFwcGxhdWRlZA==","aG9yaWE=","bW9keA==","IG1pbGxlbm5pYQ==","Jm0=","Lkpzb25JZ25vcmU=","IHBpb25lZXJlZA==","IENhdnM=","CWpz","ZGVwYXJ0dXJlZGF5","X2ti","LlBhdGllbnQ=","IHBldGFscw==","cG9ydHJhaXQ=","In19Cg==","SG9tZUFzVXBFbmFibGVk","LnByZXR0eQ==","LGNsanM=","IG1lZGlvcw==","aGFzaGVk","ZW1vZGVs","IE1vam8=","LmZyb21SR0JP","LXBl","IGludGltYXRlbHk=","IGVsZ2c=","W107DQo=","L09ic2VydmFibGU=","IG9iZWRpZW50","IEphbWFs","UmVxdWlyZWRNaXhpbg==","IExpc3RWaWV3SXRlbQ==","CXBsYWNlaG9sZGVy","X3RyYW5zYWtzaQ==","PFNlcnZpY2U=","IGVuc3VlZA==","IFJpY2Fu","U2FnYQ==","QVVESU8=","IGpt","LXNhbGVz","LW11bHRp","JSI7Cg==","IGNsYXNzaWZpY2F0aW9ucw==","IHTDo28=","Q29hbA==","OycpOwo=","IGRlbGlnaHRz","X2h6","X2JvbGQ=","REVQRU5E","INCh0L7Qt9C0","YXRlZQ==","X3N1Ym5ldA==","IFRvd25zZW5k","IENhc3RpbGxv","IHBydA==","JC8p","IGZpbGli","KCcvJylbLQ==","IHVwaG9sc3Rlcnk=","IGNvbXBvbmVudGU=","IFhG","LlJldmVyc2U=","X3R1bm5lbA==","SW1tZWRpYXRlbHk=","LW1vdmU=","IGFsaXN0","V1ND","c3RydWN0dXJhbA==","aXN0b3JpY2Fs","VGFuZ2dhbA==","IENPVVJU","IG9ic2N1cmVk","IGxhbmRzbGlkZQ==","IGJlZHNpZGU=","IGJhcmFuZw==","LWVsZWN0ZWQ=","IGNlcmFtaWNz","LS0qLwo=","IFdhbm5h","RHlu","IHZlcnNjaGllZGVuZQ==","IGluZHVjaW5n","IGZsdXRl","LkFwcGVuZFRleHQ=","IFp1Yg==","IFB1bGl0emVy","OmJvdGg=","Lm1heExlbmd0aA==","LlByb3BlcnR5VHlwZQ==","YXd5","aXRlbU5hbWU=","IE5hcnJhdGl2ZQ==","cmV2b2x1dGlvbg==","IGhhbHRlbg==","IEVycm9yUmVzcG9uc2U=","Z2F0aGVy","L3V0aWxpdHk=","Oicn","IEtlZQ==","IE9seW1waWE=","Q2xpbmljYWw=","OmdyZWVu","IFBsZXg=","IEtlbnNpbmd0b24=","IFBob25ldGlj","IGRpc3RyaWJ1dGVz","X2V4ZW1wdA==","V2F0Y2hpbmc=","Lk1pc2M=","IGRvbWFpbmU=","OiIu","44OV44I=","X01PRFVMRVM=","IGhhYmxhcg==","IExhb3M=","LnNldFRleHRTaXpl","LnBhdXNlZA==","X1RX","IG92ZXJ3aGVsbQ==","IGhlbWF0","THVja2lseQ==","IFNFTlQ=","IEludmVzdGlnYXRvcnM=","Pih7","KGZvdXQ=","IEFVWA==","LnJhd1F1ZXJ5","LXN0cm9uZw==","IHJlc2VtYmxlZA==","IFNoYWZ0","IFhJSUk=","c3VnZ2VzdA==","IHNpbmdhcG9yZQ==","X2FiaWxpdHk=","JGs=","CWlOZEV4","XEltYWdl","Q2FkYXN0cm8=","LnBpdm90","IG1hbnBvd2Vy","X2F0dHM=","LnNldEZpbGw=","ZXdvcmxk","Y29uc3Rz","R2V0V2lkdGg=","IGdyYXR1aXRh","IFBldHI=","LWFuc3dlcg==","IEhlbWlzcGhlcmU=","IENhag==","IFRyYWRlcw==","xIdp","IEZyZWRkeQ==","T25DaGFuZ2U=","IHBvcm5vZ3JhZmlh","IFNVTU1BUlk=","X21lYXM=","IERSSVZF","IENyZWU=","X21hbGU=","IHN1aw==","IG1hbmV1dmVycw==","c2V0VmlzaWJpbGl0eQ==","YWxsaQ==","IGRpc2NyZXRpb25hcnk=","cmVnYXRpb24=","WVNUSUNL","OmhyZWY=","IHRhcmFm","IGNodQ==","IEBb","RW5vdWdo","LlRyYW5zZmVy","SWZOZWVkZWQ=","OildKQ==","CSAgICAgICAgICAgICAg","W2F4aXM=","VHJhbnNsYXRpb25z","LnNlcnZlcnM=","IEtFRVA=","JywpCg==","c3BvbnNvcg==","YXJjaGl2ZXM=","LlVsdHJhV2lu","IEhvbm91cg==","J10pKTs=","IGluZWxpZ2libGU=","IEFudHdvcnRlbg==","IEFwcGxpY2F0aW9uRXhjZXB0aW9u","IGNhdGVnb3JpZQ==","IFdFSUdIVA==","IEJ1bmR5","IFBJWEVM","IGR1a2U=","VG93ZXI=","U2NvdGxhbmQ=","IHJlZmVyZWVz","IEFzc2VtYmx5VHJhZGVtYXJr","CXN0YXJ0QWN0aXZpdHk=","Lk9uZVRvT25l","IEF1c3dhaGw=","IHN0cmVuZ3RoZW5z","LlF1aXQ=","IFVSTFJlcXVlc3Q=","ZWVj","IHJlZ2lzdHJhemlvbmU=","IGhvc2Vz","QWN0dWFsaXphcg==","L2FycmF5","IGNvbnN0cnVjdGlvbnM=","Y2Nk","IEZpbGVOb3RGb3VuZEVycm9y","VGjDqm0=","KHJlc3VsdGFkbw==","IFNFUklFUw==","U3BlYWs=","X0FIQg==","QmxvY2tlZA==","LWZvbnRhd2Vzb21l","Ol0p","b2JibGU=","KGxpbmtz","IENhdGFsb25pYQ==","R2VW","LkRhdGVGb3JtYXQ=","IGZsZWE=","LmVm","IHNvbGljaXR1ZA==","IERZ","Y29kZWdlbg==","eXRoZQ==","IGVwb2xs","X1RE","IGFmZmlybWF0aW9u","X2Zh","SVNUQQ==","IEVhdG9u","Y3JlYXRlUXVlcnk=","IGxvZ2lzdGljYWw=","IFJheWNhc3RIaXQ=","IGNhdWxpZmxvd2Vy","IHVsY2Vy","LkFscGhh","aW5rZQ==","Wy4u","RVhBTVBMRQ==","LXdhZ2U=","IHN0YXRp","ZWN0aXZl","LmdldE1pbg==","IFNVQkpFQ1Q=","IEF1ZGlvTWFuYWdlcg==","enphcmVsbGE=","IFNlbGVjdExpc3RJdGVt","ICQNCg==","IG9oaW8=","IFRhaG9l","IGtXaA==","cXVlcnlTdHJpbmc=","IGRlcGFydGFtZW50bw==","PWFkbWlu","IHdvcmtzdGF0aW9u","KSsrOwo=","SGVhZGVySW5TZWN0aW9u","IFRyaXVtcGg=","Q2hhcmxvdHRl","IFNNQQ==","Q8OzbW8=","IHZlcm0=","IHRoZWFubw==","Ymdjb2xvcg==","XCIiLAo=","IFJlbWluZGVy","QmlsbHk=","b3JhbFR5cGU=","Z2ViZXI=","KGNsb25l","IEt1dA==","Lz4u","QXBvbGxv","IHNobA==","Wkg=","VGh1bmRlcg==","IGdpZnM=","X2tlbGFz","IFJvdGhz","IH0o","IEJyb2FkY29t","IERlcHRocw==","CUlOTkVS","cGFyY2Vs","IGVqZXJjaWNpbw==","IGluZGVwZW5kZW50cw==","aWxsb3c=","ZXhlY3V0YWJsZQ==","RXZlbnRv","IHpvc3Q=","IEhNQUM=","W0RsbEltcG9ydA==","YWxsZXM=","X2Rlcml2YXRpdmU=","QXBpS2V5","IHN0ZXBwZXI=","PXBsdA==","Z2V0SW5kZXg=","IHZhbGV1cnM=","UG9saXRpY3M=","IElEWA==","IFVzYQ==","IExUQw==","Lm1pbkxlbmd0aA==","c3Rybw==","X05D","IHN0YWduYW50","IG1vbnRhZ2U=","IGJsb3VzZQ==","ZWxpZ2U=","IHR1cnF1b2lzZQ==","IFN1cGVybg==","5q2z","dmFyYQ==","TmV3SXRlbQ==","X0VYVEVOREVE","IHdvb2R3b3JraW5n","IEVwaXNjb3BhbA==","LnBhaXI=","LlVzZXJJbmZv","IGRpcmVudA==","L3RjcA==","IGZyYXVnaHQ=","U2xhdmU=","LmdldExhdGl0dWRl","IFRvb2xib3g=","IGVhcm5lcnM=","IEhPVVI=","0LDQu9Cw","cG9zYWJsZXM=","Y29uZGl0aW9uYWxseQ==","X3h4","IGxhbsOn","KHJw","Q2hh","IGluY2Fybg==","LkRhbw==","Li8o","2KfZgQ==","VGQ=","Q0VG","L3JhbmQ=","LlZpcnR1YWw=","IGRiSGVscGVy","YW1pbmVz","IGx6","IHN0b3M=","IEF0a2lucw==","X0RE","aXRvcmlv","IG1pbmltaXNl","aGlwc3Rlcg==","KHsuLi4=","X1NSVg==","W2ZyYW1l","IFJva3U=","R1JQ","IGJhcmJlcg==","LkZlY2hh","IOuwnA==","IGdyYW51bGFyaXR5","IFNheWluZw==","X2xpa2VsaWhvb2Q=","LmJhckRvY2tDb250cm9s","IGZyb250bGluZQ==","IFdoYWxl","IHNtZWxsaW5n","IENvbnRyaWJ1dGlvbnM=","aXZhbnQ=","IGNyaXBwbGluZw==","cHJlbG9hZA==","IEhlcnJlcmE=","X1dBVENI","LWV0","OmV4cHI=","aW52ZXN0bWVudA==","ZWRlcmF0aW9u","X21nbXQ=","IGhvb3Bz","bW9ua2V5","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAK","aW50ZXJzZWN0","IGNyaW1zb24=","IHN1b2k=","IFtdOgo=","WE9iamVjdA==","U0ZNTA==","RVFVQUw=","KCd+","Y2VudHJvaWQ=","CXJlc3RvcmU=","IHByZW5hdGFs","IE1pc3RyZXNz","IHF4","dHBz","IHJlc3Bhd24=","IFtdKSwK","IGtvbnRyb2w=","44GC44KK44GM44Go44GG44GU44GW","TW9kdWxlTmFtZQ==","IG5ld1BhdGg=","IFBhZ2luZw==","IHJpbnM=","X21ha2Vy","XGJyaWVm","IGJpc2hlcg==","CVJlYWQ=","IGppaGFkaXN0","LnBlcnNpc3RlbnQ=","IFJvYm90cw==","L2dycGM=","IEpvdQ==","w6RyZW4=","77yM5Zyo","LXB0","IHpkYXJtYQ==","X05N","IENvbm5lY3Rpdml0eQ==","KGJj","IEZsb3JpYW4=","IFNvY2lvbG9neQ==","X3dv","QW5kU2VydmU=","XygpOwo=","IEZMVA==","X0RFUg==","IENvbm5pZQ==","IEJyb2FkY2FzdFJlY2VpdmVy","eyg=","IGNvbW1lbnRlcg==","IGRlbW9jcmF0","IGFtcGxpZnk=","LS0tLS0tLS0tLQ0K","IEhNUw==","IHRyYWlsZWQ=","IFNvZGE=","LXRlc3RlZA==","dWxpc3Q=","KW5ldw==","X1RocmVhZA==","VG9kZA==","IGRlYmlhbg==","Vms=","IHByZXNlbnRh","IGNvbWZvcnRz","IFdhc2hlcg==","IGdhcmc=","IEh1Y2thYmVl","INGB0LDQvA==","ICEi","QWRhcHRlck1hbmFnZXI=","IEVh","IEFzc29jaWF0aW9ucw==","CQkJCQkKCQkJCQkK","LmdldFdyaXRhYmxlRGF0YWJhc2U=","IG51Y2xlaQ==","w6lnb3JpZQ==","CSAgICAgICAgICAgICAgICAg","QkFC","IHVwa2VlcA==","IFR1cA==","LndpdGhPcGFjaXR5","bHlh","IGx1eGU=","dXBybw==","LWVuZw==","IHJlbGHDp8Ojbw==","IGtleVByZXNzZWQ=","IGh5YnJpZHM=","bGZ3","T3BlcmF0aW9uQ29udHJhY3Q=","IG5hbWVMYWJlbA==","IEhvcnQ=","X2dydXBv","IGJhbmRh","SXg=","SGVhbHRoeQ==","LmdldEVuZA==","ZnJhdQ==","KFNjZW5l","KENvbGxlY3Rpb25z","IFNraXBwaW5n","dWJv","IGbDvG4=","Ij4tLT4K","IGRyb2l0cw==","IGhvbW9zZXh1YWxz","IGFiZHVjdGlvbg==","CXdpZGdldA==","JGhlYWRlcnM=","IERBUg==","IGZsYQ==","dGhyZWF0","IGxvdWlz","LkdldFByb3BlcnR5","Ikp1c3Q=","KGZyYW1lcw==","cnlv","cHJvZmVzc2lvbg==","fGk=","7ZW07ISc","KHN2","IHVucmVjb2duaXplZA==","SW9uaWM=","RmFzaGlvbg==","U2NyZWVuU3RhdGU=","IEluY29taW5n","Tm90Tmls","IHN5bmNpbmc=","ZW1pZQ==","IHRoZXJtbw==","X3Byb2Nz","IGluY29uc2lzdGVuY3k=","cmVsaWdpb3Vz","Lm1q","IHBlcnNvbm4=","IG1vbWVudG9z","b3JhcmlseQ==","IOaK","X25ldXJvbnM=","SWxsdXN0cg==","aW1vdG8=","aWxpaw==","IFdvag==","VHJhZGluZw==","IGFwcGFyZQ==","IGVudHJlcHJpc2Vz","YWNoYXQ=","IMKs","IG5laWdo","QlVUVE9ORE9XTg==","IE1haGVy","YWdoYW4=","LWhhc2g=","ImY=","IGNsaWVudGVsZQ==","LmFkZEJ1dHRvbg==","CVNQ","UWk=","IGdyYXRlZA==","UE9TSVRF","Oj4=","IEhvd2VsbA==","IENvbXBhcmF0aXZl","IElTQw==","wq1p","T2NlYW4=","RGF2aXM=","IEZpbG1l","V2lucw==","IEpJVA==","b2NjZXI=","IENvcm0=","RU5DSE1BUks=","cmNoaXZl","aWNhw6fDo28=","IG1hdGE=","IGNoaWxkYmlydGg=","IE9wdGlvbmFsbHk=","RW5z","IHhodHRw","IGVsdWNpZA==","X09zY0luaXRTdHJ1Y3Q=","KSkpOgo=","IGludHVpdA==","IERvbmF0ZQ==","IGNvcnJlbGF0ZXM=","PkRlbGV0ZQ==","IGVxdWlwZQ==","IGJvY2E=","IGluZmxhdGFibGU=","ZXJhaA==","IERhdGVUaW1lS2luZA==","IGNhbHZlcw==","XExpYg==","IGVtbHJ0","IFRyaWxvZ3k=","IFBhbmM=","IER1aXM=","IHBlbMOtY3VsYQ==","V0FSRFM=","X0RFVEVDVA==","LXNlY3Rpb25hbA==","ZGhjcA==","Rm9yUm93","LWRlc3RydWN0","IFByZXNlbnRlcg==","L3NsaWNr","LG9u","IENpdGFkZWw=","bG9nZ2VkaW4=","X3N1YnR5cGU=","IHNpZ3Vl","IGN1cmluZw==","IEZpcmV3YWxs","IGZsdW9yZXNjZW5jZQ==","IEl0YWxpYW5z","0LjRgtGB0Y8=","LmdldFN0eWxl","SW5TZWNvbmRz","amll","LVNtaXRo","IHhsaW5r","IHN1Ym1pc3NpdmU=","0L7QvdGC","YXJib25hdGU=","IEZhdWw=","X2dvYWxz","IENvbW1pc3Npb25lcnM=","Y2hhcnRJbnN0YW5jZQ==","X1BPU1RGSUVMRFM=","IG1lZGlhbA==","IG1hbm9z","IGRlbHQ=","c3Zt","LkFwaXM=","ZXBoeQ==","IGFzeW1wdA==","IGFwcERlbGVnYXRl","IGltcHJvYmFibGU=","Y2th","c2ltZA==","L0Vycm9y","LuKAkw==","IFBUUw==","ZGVlcg==","IHNpbmE=","bWFnbml0dWRl","SURBREU=","J119Jw==","IG1heW9yZXM=","CWNvbW1lbnQ=","L2NvbnNvbGU=","IkA=","dm9sdA==","LnNlbGw=","IE1hY3k=","IG1lbG9k","IGltw6FnZW5lcw==","X2NoZw==","IGlub3V0","aWRlbnRl","KScpLAo=","ZG5p","LmJsb2I=","IHR5cG9ncmFwaHk=","IGVlcmll","X09JRA==","cGVzYW4=","YWphbg==","IGNob3BwaW5n","IGJsdWZm","YWRm","X2Jhc2Vz","LkZvcm1hdHRlcg==","IFwl","IFBhZ2VJbmZv","Q2Fycmllcg==","IENhbGlicmF0aW9u","Y29tbw==","LWJvZGllZA==","IGZpbmFuY2llcg==","IElOQQ==","LkVSUg==","IGhvb2RpZQ==","IFNhbml0eQ==","Z3VhcmRlZA==","Lm9wZW5kYXlsaWdodA==","SVNNQVRDSA==","SGlnaGxpZ2h0cw==","w7xuaw==","YW5pZW0=","YW5nZXJlZA==","YXNzaWdubWVudHM=","IHJlZ2lzdHJhZG8=","IFVQUEVS","YW1waWxrYW4=","YXNoaXJl","IE5pa29sYQ==","IENGTA==","IEhEQw==","IHBvaWRz","IElQcw==","IHByZXZlbnRhdGl2ZQ==","aXBzb2lk","aWZpeA==","LmNhbWVs","Lmdh","Vm9sdW1lcw==","LXN0ZQ==","WWFob28=","X3NpYmxpbmc=","SGlnaGVzdA==","b3B0Z3JvdXA=","IGt2aW5uYQ==","4oCd44CCCgo=","IEFwcGxpYW5jZXM=","ICI+PA==","JykiKQo=","aHR0","IElkZW50aWZpZWQ=","IHBlbmNpbHM=","IG1lbWJlcklk","IGFwcGVuZFN0cmluZw==","LmxvYWREYXRh","IG1vY2tNdmM=","IGp1Yg==","IFNsdXQ=","IFRhaXBlaQ==","c3RhdHQ=","UG9saXQ=","IHBhcnRhZ2Vy","RGlkQ2hhbmdl","SW5jcmVhc2Vz","KX0u","IEJhYmE=","X0NMSVA=","W3VuaXQ=","INC60LvRjtGH","IGFsY3VuaQ==","IExvbGE=","IGNsaW5naW5n","QFBvc3RNYXBwaW5n","KGNvbmNhdA==","IHNzaWQ=","IEZhdWM=","b2tpdA==","IFJlY29yZGVk","w6FsZXo=","KCQoJzw=","LmFzc2VydElzTm90","IGthbGk=","Vm9sdA==","IHdhcm1seQ==","IHNjYXJlcw==","Z2V0dGk=","ZsO8aHJ0","X2RvZXM=","LkVNQUlM","aW1hdGlvbnM=","IHNwcmluZ2ZveA==","IERlY29t","YXJjeQ==","IGdsaXRjaGVz","IE1vZmY=","IFZvbGw=","LmJldHdlZW4=","IGNvb3JkZW4=","IFBhcnRpY3VsYXJseQ==","R0JQ","IHNlbWJsZQ==","RWFzdGVybg==","X01TQg==","XSl7DQo=","bW9yZ2Fu","IEVWQUw=","ZGVyZQ==","SE9VU0U=","bW9pcmU=","aXN0aXF1ZQ==","X2xzdG0=","LWNvbW1pdA==","eXN0ZXJpb3Vz","IHR3aW5r","LXRodW1ibmFpbHM=","ZW7DrQ==","OicnLA==","IGJsYWNrb3V0","IEZsb29ycw==","IHNvZmFz","IG91aQ==","bGVzaG9vdA==","IFJhcQ==","LWFicw==","IGtyYQ==","TWluaW5n","c2hhZnQ=","LnNldENvbHVtbnM=","Q2xheno=","UFJFVFRZ","LnBsYXlsaXN0","6Zai","LVNhaGFyYW4=","TUlORw==","CWJs","6K6u","amY=","RE9DS0VS","aG9wZWZ1bGx5","KGlnbm9yZQ==","IFVzZXJzQ29udHJvbGxlcg==","IE1pdGFyYmVpdGVy","IExFUw==","SGFtaWx0b24=","LW1ldGFkYXRh","IEtL","aWt0aWc=","IHdvbGx0ZQ==","ZWdyYXRvcg==","XWJvb2w=","LGN1cnJlbnQ=","IHZhbHVlVHlwZQ==","IGV4Y2F2YXRpb24=","b2xhbmQ=","IHZlcnY=","L2ZpbGVwYXRo","QXV0aFByb3ZpZGVy","IHByb2NyYXN0","CVVMT05H","X01FTUJFUlM=","IHVwbGlmdA==","IEF1dG9ub21vdXM=","IGFydHdvcmtz","IE91dHJlYWNo","IHBvcmU=","SG9tZXBhZ2U=","RGlhbG9nVGl0bGU=","IEdlbmVyYXRpbmc=","UEFSU0U=","IHNlbWFuYXM=","IGh1bWFubw==","SlNHbG9iYWxTY29wZQ==","IHZvbHRl","IGJlbGxh","KGlzaW5zdGFuY2U=","IHBsYw==","XENhdGFsb2c=","IGVzdGVlbWVk","6Zu3","KHN1ZmZpeA==","IHN3ZWVwcw==","CU9SREVS","IGRvaXZlbnQ=","IFN3YXJt","IENvbXBpbGVk","Z2V0UGFnZQ==","QURS","LlJpY2hUZXh0Qm94","IE5hbWluZw==","YWdnZWQ=","IEdBTkc=","cmFzaW5n","b2RlbGVk","IGdhbGE=","IEpTTmFtZQ==","ZGRm","IGlsbHVzdA==","IExhbnNpbmc=","W3BvcnQ=","LWRlYXRo","IGRpbmhlaXJv","IEVpZ2h0aA==","IGJpYW4=","c3TDpQ==","IHZlcnNpw7Nu","IExpbmVhckdyYWRpZW50","IEhhcmRpbmc=","Liop","ZWN6eQ==","JGhlYWRlcg==","IHbDpXI=","VW5jaGVja2Vk","IGtvamU=","IFBhbGFkaW4=","KCkpKSw=","R2l2aW5n","KCl9KQo=","IGRpcHM=","RnJpZW5kbHk=","IHBvcnRyYXlz","IGhlbGl1bQ==","IGluc3VyZ2VuY3k=","X2V4cGlyeQ==","IHN0cmluZ0J5QXBwZW5kaW5nU3RyaW5n","IGFhbnRhbA==","c2xvcGU=","bWFzdA==","LmdldEludGVnZXI=","ICMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIw==","X1BJUEVMSU5F","IGRlbnNlbHk=","IG11dGF0aW5n","bWlkaQ==","IFNlaXQ=","YXluZQ==","Tk9XTEVE","IERlc21vbmQ=","IEZOYW1l","IE5haXJvYmk=","XENvbnRleHQ=","IGNhbGN1bGFy","LWRlbg==","IGNvdHQ=","XSk6DQo=","IFJlY29tbWVuZGF0aW9u","IFJvbGV4","IHZhbGlkYXRpb25SZXN1bHQ=","LnBhdA==","IG7DoHk=","IFJlc3RDbGllbnQ=","IEdQSQ==","IEFzaGV2aWxsZQ==","IE9TUA==","IFBFUk1JU1NJT04=","0JTQsNGC0LA=","L25vdGlmaWNhdGlvbg==","S25pZ2h0","X1dvcmQ=","IEJlbmRlcg==","cmFua2luZw==","IHBhcnRpZGE=","X3Jlc2VydmF0aW9u","zIA=","IG1OYW1l","IGdldGNo","IGJvcnI=","IGRpbGlnZW50","RGlzY3Vzcw==","5q2j5Zyo","YXBlYWtl","aW9uZWQ=","LU5hemk=","LmN1bQ==","IEtyb24=","PSQoJyM=","L3NpbmdsZQ==","IGVyb3Rpc2No","IFZpYg==","IHJhdGlmaWVk","IGNvbmNlcnRlZA==","IFJFR0FSRA==","IGRvYnI=","LkRyaXZlck1hbmFnZXI=","J3I=","UG9ydGFibGU=","CXN1aXRl","IHJlbGFjaW9uZXM=","IERvcA==","ZW1wbG9p","RE9C","IGNydW1icw==","IHhscw==","X0FwcGxpY2F0aW9u","KCc6Jyw=","IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQo=","bXNl","IGJlcms=","IFJldHVyblZhbHVl","IEJlbGx5","IGNhbWFy","IFBlZWs=","ZWxzaW5n","IG5vdGlmaWVz","IFRyaXN0YW4=","IEdBUg==","ZW1tZQ==","IEVsZXZhdGVk","X0NTVg==","KGNoYWxr","IHR3ZW50aWVz","IFNlYXJjaFJlc3VsdA==","PXNlYXJjaA==","IE1peGluZw==","w710","IHJlY3J1aXRlcg==","IElERU9HUkFQSA==","IEFnbw==","KE9wZXJhdGlvbg==","JHZhbHVlcw==","IHdvcmxkbHk=","IFJvc2VuYmVyZw==","IENvbmZpZ3VyZVNlcnZpY2Vz","Pio8Lw==","S0FOSkk=","IGNodWNrbGVk","IHN0cmlmZQ==","IEJvbWJheQ==","IEJBQ0tHUk9VTkQ=","ZXRhdA==","ZW51bWVyYXRvcg==","IHPDu3I=","IOOBrg==","X3BlZGlkbw==","L0Rr","IGplYW4=","X0NvbHVtbg==","IGhlYXRtYXA=","LlBlbmRpbmc=","IHVuc3VjY2Vzc2Z1bGx5","CWVw","IHNpbmZ1bA==","IEFudG9ueQ==","X0ZPQ1VT","VGV4dExhYmVs","X3JlYWN0aW9u","IElEaXJlY3Q=","IGNhcm5pdg==","V29ya3NoZWV0","IHN1ZWRl","CVJUQ1Q=","IHNldGJhY2tz","LnVuYmluZA==","IHNpw6g=","TGlxdWlk","X1JFTkRFUkVS","TWF0ZQ==","IE1pbGxlbm5pYWxz","IGVwb3h5","aXp6aW5lc3M=","IGJyYXppbA==","0L7RgdGC0Yw=","JnZpZXc=","L2dwaW8=","SmFtaWU=","LkdyYXZpdHk=","PSIuJF8=","IFZBTg==","IElEUg==","YXBwZWFyYW5jZQ==","LlNlbGVuaXVt","TGVhcA==","LlJlbGF0aXZlTGF5b3V0","U2lnbmFscw==","QWNjZWxlcmF0aW9u","CUhBTkRMRQ==","L09wZW4=","IGdldExvZ2dlcg==","U3Bp","LXdyaXRpbmc=","INCy0YvQtw==","LXdvcnRoeQ==","IHdjcw==","IFFUaW1lcg==","IFBvbHltZXI=","IHZhbnQ=","CURlbGV0ZQ==","aXR0ZQ==","V2hpbHN0","IGFsZ3Vt","IHNoaWVsZGluZw==","IGttcw==","CSAgICAJCQk=","TWV0ZW9y","IGFnZ3JlZ2F0b3I=","IFNpbmQ=","SG9zdEV4Y2VwdGlvbg==","PScnLAo=","IEpTQnJhY2tldEFjY2Vzcw==","T05P","X0J1aWxk","IHN0cmlwcGVy","IExK","PENvbXBvbmVudA==","L3NvdXJjZXM=","IGVyZ29ub21pYw==","IEFjY3JlZA==","dW5jZQ==","b25pcw==","emVpZ3Q=","IFNrYXRl","IFJlY3RUcmFuc2Zvcm0=","SW5jb21wbGV0ZQ==","IGluZ2VuaW91cw==","IGNvaXNh","IGNpdHlOYW1l","aGFiaXQ=","X1RW","IEFOU1c=","Li4uIj4K","IHNub3Jr","X29wYWNpdHk=","IGluaXRXaXRoTmliTmFtZQ==","aWFkbw==","QUFD","IF0pLg==","O3o=","X3BhcmFncmFwaA==","IG5vc2Vz","c3RhbmRz","aWZy","X21F","SXJhcQ==","LlByZWRpY2F0ZQ==","ZW5haXJl","XV1dOwo=","IHVuaWRhZA==","IHJldGlyZWVz","X2hlbGxv","IG1vZGVsZQ==","IFVJVGFibGVWaWV3Q29udHJvbGxlcg==","ZndyaXRl","X251bWVybw==","X3Zpc2l0ZWQ=","IHJlY2ViZQ==","KE5vdGlmaWNhdGlvbg==","RmFudGFzdGlj","X3N1Ym1lbnU=","IFBFTQ==","IEN1cGVydGlubw==","YXBwcm94aW1hdGVseQ==","Y2xhc3NlZA==","LlJlYWRTdHJpbmc=","IGRvbWljaWxl","X1BX","IGJhbGxwYXJr","IEthbGU=","Y29udHJh","X2Zhdm9yaXRl","L29m","UXVpdGU=","IE9UQQ==","IGFjY2VsZXJvbWV0ZXI=","ZGlkbg==","fF4=","IFJvaGluZ3lh","aXZpY3Jt","YW5uYWJpbg==","0L7QsdGL0YLQuA==","b3JhZG8=","Jykr","SGF1bnRlZA==","LElE","KFVJQWxlcnRBY3Rpb24=","dXJ2","X2JlbA==","IE1leGljYW5z","L3Rlcm1z","IFBhaW50ZXI=","SW5wdXRMYWJlbA==","IFZpbmNp","IFJvc2ll","XHVj","PE1lbnU=","IGNvb2xhbnQ=","KGN1cnJlbnRVc2Vy","X2R1YWw=","KSJ9LAo=","JnA=","IGNvbnZlcmdlZA==","IHJlc3RyYWlu","IFl1Z29zbGF2aWE=","PXRhcmdldA==","IGltcHVscw==","ZHNh","U2VhcmNoVHJlZQ==","IGhib3g=","IEltcHJlc3M=","wqfDgw==","Z2V0RnVsbFllYXI=","KGRh","IFlZUw==","LmFsaWdubWVudA==","LkdldFRleHQ=","LnRva2VuaXpl","IE9seW1wdXM=","IG11cmt5","b3Jlc3RhdGlvbg==","IGRpc3NhdGlzZmFjdGlvbg==","CVRBcnJheQ==","X2tzZXM=","LkFkZFNpbmdsZXRvbg==","IFN0YXJ0VGltZQ==","IGZhbmF0aWM=","ICAgICAgICAgICAgICAgICAgICAJ","IGVudGl0eVR5cGU=","Lm92ZXJyaWRl","IC0tLS0tLS0tLS0tLS0=","IERhdGFncmFt","Zm91dA==","KHdpdGhJZA==","ICNfXw==","n+iDvQ==","ZWt5bGw=","LmZyaWVuZHM=","YW1lbGVvbg==","IHphY2g=","LnNpbXBsZUJ1dHRvbg==","cmV0b3Jubw==","IGtvbms=","L3NtYWxs","IFF1aWNrbHk=","dW5yZWFk","RG9uYXRl","RGV0YWlsVmlldw==","IGR1YQ==","IHBlbmV0cmF0ZWQ=","T01VWA==","IG5pcg==","X3BkYXRh","Il0sWyI=","IGxvd2Vz","IGRvcGluZw==","IGFzeW1tZXRyaWM=","IG5lZWRsZXNz","b3VyY2Vt","IHVwcm8=","IEd1enpsZQ==","YWZi","IHNleHRyZWZmZW4=","LWNvbGxhcg==","IGNvbG9zc2Fs","TW9ua2V5","bmlzaA==","IGhhbmRsZU1lc3NhZ2U=","SW5jcmVhc2Vk","KmR4","IENoYXR0YW5vb2dh","Zm9yZw==","IE9yZGVu","IHNocmk=","IFZhbmQ=","ICJAIg==","SW1hZ2VTaGFycA==","IFdpbGRjYXRz","cG9uaWJsZQ==","LnNjZW5lcw==","IHBhaW50ZXJz","IFBmaXplcg==","IFphaA==","VG9Mb2NhbA==","IEZsYW0=","IMOpdGFpZW50","KSle","IFNhbmRib3g=","IFRSQURF","IGNocm9taXVt","IGFjY2xhaW0=","IHBhY21hbg==","wrR0","KXJlYWRlcg==","TWFyaQ==","LkRpc3BhdGNoZXI=","LkFETUlO","IFJlbWVk","U3dlZGVu","IG92ZXJsYXlz","LmVy","IHBhbmc=","IGNsZWFubHk=","YXZlbnBvcnQ=","VG95b3Rh","cGF0Y2hlcw==","IHZ0eA==","IEVpcw==","Y2xhZG8=","IFJpdGNo","Uk9MUw==","IGhhZGU=","IGNvbnNwaWN1b3Vz","IGRvY2tz","KGpx","IFByZW1pZXJzaGlw","IEJleg==","IOKElg==","INGD0YHQuw==","X3RvdGFscw==","IHByb3Zh","IEN1ZQ==","IHNhw7pkZQ==","IEdhbWVDb250cm9sbGVy","SU1JWkU=","LHBvcnQ=","44CCKA==","LkNkZWNs","SW5zdGFudGlhdGlvbkV4Y2VwdGlvbg==","IGNvbGxhZ2U=","IElPQw==","IGJhaXM=","IG9uRmluaXNo","LXN0YXJz","c2V0U2l6ZQ==","IG1vZ3Vs","IGRpc2lsbHVzaW9u","IGNoZXZ5","KFNjaGVkdWxlcnM=","KElS","X2xvY3M=","IGNhbm5vbnM=","IGNhbmNlbGxpbmc=","L2J1cw==","IGJ1Zmlv","IFlvdXJz","IFBpa2FjaHU=","IHRlcm1l","csOl","ZmFocmVu","IG93bmVySWQ=","IG9ibGlnYXRvcnk=","IGN1bHA=","IGFjaWRpdHk=","LW11bHQ=","IEJhbWJvbw==","ICciPg==","X2dz","IGNvbXBpbA==","bmFyZA==","LWV4Yw==","IHJoeW1l","IGJ1dHRv","c2F5cw==","YW50YXN5","67g=","IGNpdHTDoA==","IGNoZWc=","VGltZVN0cmluZw==","IHBvc2l0aXZpdHk=","IERhYmVp","IHdhbmc=","IGVzY3Jl","ImM=","CXZpZGVv","IFJhbmtlZA==","LnN0cmluZ3M=","Pj4+KA==","INC40L3RgtC10YA=","IHJlc3Rh","WzosOg==","IHJlbmRyZQ==","IGRlc2Vy","Sm9z","IGRpc3J1cHRpb25z","INC+0L/QtdGA","c2FtcGxpbmc=","c3VwcHJlc3M=","IGNvbnRhaW5lclZpZXc=","IFNlYW1sZXNz","IGFpcnk=","IG9ubG9hZA==","LldpbmRvd01hbmFnZXI=","IFBMQQ==","YnJhY28=","LnNldFBvc2l0aXZlQnV0dG9u","IHBkdQ==","IGdzaQ==","IENsaQ==","X2dyYWRpZW50cw==","0Y/QtA==","IFdoaXNwZXI=","Y3N0ZGludA==","IGzDpG5n","IGZvcm11bGF0aW9ucw==","w6lub20=","b3VybmVtb3V0aA==","WyRf","IG9yZGluYXJpbHk=","LnNldFVzZXJuYW1l","IGZhY3VsdGllcw==","TUlUVEVE","L3ZhbHVlcw==","IHdlaXI=","IEFwdA==","TVo=","CWNm","dWNrZW4=","CQkJCQkJCQkJCQkJCQkJCQkJCQk=","ZGVmZW5zZQ==","W2lWYXI=","IEJ1c2luZXNzRXhjZXB0aW9u","U2VsZWN0b3Jz","KGNvb3JkaW5hdGVz","IFJlc2V0cw==","IERyaW5rcw==","b2xlYW5z","KHN0eXB5","X0lPQw==","Lnh4eA==","IFNsYXRlcg==","IEJlbGl6ZQ==","IC8qKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKio=","YWRkaW4=","X2VwaXNvZGVz","IGlzY2hlbQ==","bGVnYWxBcmd1bWVudEV4Y2VwdGlvbg==","RGFubnk=","IHBhcmVk","LmNvZGVoYXVz","IEFzc3k=","CVJlY3Q=","4p4=","Lmxpc3Rh","INCy0LDRiA==","IHZldHM=","SFdORA==","aXNvbmVy","IHhv","IG9yYWxseQ==","IFN0bXQ=","LnJubg==","IERQSQ==","IFN0cmlrZXM=","LnNldFZpZXdwb3J0Vmlldw==","IOiHquWKqOeUn+aIkA==","WUVMTE9X","R0xlbnVt","cGFydG5lcnM=","IEltcGxpY2l0","IHRha28=","4oCZZWxsZQ==","IGVybcO2Zw==","dG90YWxDb3VudA==","R2ls","CXdvcms=","IHByYXRpYw==","aW5hdGk=","YWJpZXM=","IFNraW5uZXI=","IHNwaXJpdGVk","IHBhbmNyZWF0aWM=","IGhkZg==","J2Vt","IHBzeWNob3Npcw==","b2xpY2l0","ICJ7Ig==","X2F0dWFs","IMOpbGVjdA==","VEVBTQ==","IGRhaw==","IFNXQVQ=","LkZyYWdtZW50TWFuYWdlcg==","IHByb3Zpc2lvbmluZw==","bGlmZXRpbWU=","X0VYVEVOU0lPTlM=","IENBU0NBREU=","ICFb","KEtQ","IHZlbQ==","IEludGVycmFjaWFs","J119LAo=","c3BhY2Vy","X2t2","V2FyZWhvdXNl","UkRE","X2ZzbQ==","LlN0cmV0Y2hJbWFnZQ==","LFllcw==","IFJlZnVnZWU=","IEJyaW5naW5n","IHbDoWxpZG8=","LmludGVyc2VjdGlvbg==","IHNwb29reQ==","X3BvcnRhbA==","IG1vdGg=","IFpvZGlhYw==","IFNPQ0lBTA==","TWltZVR5cGU=","J119fTwv","IHJlc2l6YWJsZQ==","5Lqb","KHBoYXNl","KG1hcHBlZEJ5","IG11bmRpYWw=","IGNvbnZv","L2xlZnQ=","L2RvY3VtZW50cw==","d2FzaGluZw==","IEFtw6lyaWNh","X3F1b3Rh","LnBvc3Rlcg==","J10iKTsK","IHN0ZWxsdA==","IERJU0NMQUlNRVI=","W29wdA==","IGVkcw==","IFJhY2Vz","dmVudGFz","IHB6","IENhcGFj","IFVzZXJEYW8=","aXRlc3Q=","UHJvdmVlZG9y","IFNob3RndW4=","IHRoaXJzdHk=","IEJhbGFuY2Vk","aXF1ZXRh","IGhlYWxlcg==","LyIp","LlNkaw==","IHRlcnQ=","ImRhdGE=","X3Byb3ZpbmNl","LkF1dG9tYXRpb24=","IGZvbnRXaXRoTmFtZQ==","X0FOVA==","55WM","b29kbGVz","IFJFUFJFU0VOVA==","X0dQUw==","IHBlcnN1YXNpb24=","IERpc2N1c3Npb25z","IGZyZWQ=","TkVH","OmJvcmRlcg==","CWluaXRpYWxpemU=","CWdsb2c=","LWNhcGl0YWw=","IEltVmVj","IGRldmlz","Q2FuZGlkYXRlcw==","LmFuaW1hdGlvbnM=","IHJhZ2F6emk=","IFByb21ldGhldXM=","IEtpZGQ=","IHByb2dyYW1tYQ==","Q2VydGlmaWNhdGVz","Q29udGE=","LmVzcHJlc3Nv","IOuQmA==","IGJlaWRl","6ZmG","LmdldFJhdw==","IEZ1bGxOYW1l","IGlhbQ==","KCopKA==","bWFpZHM=","Qkg=","IENvbnNwaXJhY3k=","X0RV","IGJsYXRhbnRseQ==","IFx8","IFdpZw==","IENvbmo=","UmVuZGVyaW5nQ29udGV4dA==","TWl0Y2g=","IGFsbGVsZXM=","IOazqOaEjw==","IHJpbXM=","IE5laWdoYm9y","IEt5bGll","LnBhcnR5","dG9ycw==","IOyhsO2ajA==","IHdlcw==","IENyYWZ0aW5n","WyIu","LnNwb25nZQ==","IOqx","SXNsYW1pYw==","IHByb3NlY3V0aW5n","IHdpaw==","Lm9zZ2k=","b25pbmdlbg==","R3JhbW1hcg==","J2lt","IGF4aWFs","Q2xlYW5pbmc=","LmdldEV4dGVybmFsU3RvcmFnZQ==","PS4v","IGNocm9tYXQ=","0LXRhQ==","YWJheQ==","IGJvbGE=","LkFnZ3Jlc3NpdmU=","J10sJF8=","aXphY2Fv","UHJlcGFyaW5n","OkFueQ==","LkVOVEVS","LXdpbmRvd3M=","IGVucmFnZWQ=","X2RpY2U=","IGRldHRh","ZWNhbA==","X09SSUdJTg==","IC0tLS0tLT4=","X0JsdWU=","IGJvdGFuaWNhbA==","IGZyYWdz","IGZhbWlsaWFs","LWR1","IHNlaXppbmc=","KGJsb2Nrcw==","LnJk","LmNoZWNrTm90TnVsbA==","IG1pc2Vy","IG1heHg=","IEtuZWU=","Vmlld0l0ZW0=","SW5uZXJIVE1M","RGFuZ2Vy","KChfXw==","IHByenlwYWQ=","Y3JlYXRlVXJs","Kios","IERlY29yYXRpbmc=","QVRFR1k=","Pz4v","LkRlc2lnbmVy","aGV4ZGlnZXN0","IEV2ZXJ5d2hlcmU=","YWxsZXJpZXM=","LlRFWFRVUkU=","LkJsb2Nrcw==","emVsbA==","IHByZcOnbw==","U3VkZGVubHk=","aW5wdXRFbWFpbA==","KHN5bmM=","LmJk","Z29sZGVu","PicpOw==","IERpY2tpbnNvbg==","Pj4oCg==","IFFVRVVF","IGdldENvbHVtbg==","IFNBTkQ=","LnBpZWNl","bGljZXI=","Rmx1dHRlcg==","IGdldFZlcnNpb24=","IHJlc291cmNlSWQ=","b2ds","xYJhdw==","LkJyYW5jaA==","CXdlYg==","IGZyYW1lcmF0ZQ==","UFBQ","IGZyYXk=","Q05U","IGluZm9ybWF0aWU=","J10NCg0K","bmVhcw==","SGVhZGVyQ29kZQ==","IOa4","IHRyZw==","cmF3dHlwZXM=","SG9uZGE=","IG1hcmtldGVy","IHJlcXVlc3REYXRh","IFBn","CW5vdA==","IHBhZ2VJbmZv","IGFrdHVlbGxlbg==","44GV44KT","IEFNUw==","cHVzaFZpZXdDb250cm9sbGVy","CUFM","IHZlc3Rz","cHJvZHVjZQ==","LW3Dqm1l","IFJhaG1hbg==","RnVubnk=","RVo=","X1ZhbGlk","IHNxdWFkcm9u","IGxhc2g=","IGlybQ==","aWFzY28=","IFBhcmFu","IHBldGl0ZXM=","IERlY2F5","IHVuaW5pdGlhbGl6ZWQ=","cHJpdmlsZWdlZA==","IG1iZWR0bHM=","5aSH5rOo","IF4u","IGVjc3RhdGlj","RGV0cm9pdA==","IHBhcnRlbg==","IHNvdXZlbmly","LmdldExvZ2lu","0LzQvtGC0YA=","ZW7Dp8Ojbw==","IG3DrW5pbW8=","IEFjY2Vzc2Vk","cmnDsw==","TWlj","IFZvY2Fs","LlNldFN0cmluZw==","IG1lbnNhamVz","5YCN","IGF0dHJhdmVycw==","IEFwaA==","ICcpOw0K","w7xuZGU=","IGVuY2hhbnRlZA==","IFJvb3RTdGF0ZQ==","IENMT1NFRA==","CQkJCQkJCQkNCg==","IGNhbGllbnRl","b3JyaXM=","IHBoeXNpY2lzdHM=","aHduZA==","X3Zp","IHLDoXBpZG8=","IGNhcGl0YWxpemVk","ZWRCeQ==","IG1hY2hpbmluZw==","IGh1YmJ5","IFN0YWN5","LkJ1cw==","ZHJpbms=","SHVy","IHByb3BpYQ==","VW5pdFRlc3Q=","IG1pc2NvbmNlcHRpb24=","X18pKTsK","L2Rj","IE1heXdlYXRoZXI=","X21D","LmNyZWF0ZUZyb20=","IFFQYWludGVy","cm9wc3ljaA==","aW5uaXR1cw==","YXlhcw==","IGdlZw==","KGR3","IHVzYWRv","IHRyaWNrbGU=","IGFubmloaWw=","IFBhc3Rh","ICsrCg==","KEV4cGVjdGVkQ29uZGl0aW9ucw==","LnBvc3RWYWx1ZQ==","aWNhcA==","IERvbmV0c2s=","X3NvdXA=","LXB1Ymxpc2g=","IFBi","bWVudGlvbnM=","QUNDRVBU","LlB1bGw=","LOKAmeKAmQ==","IHJldGFyZGVk","X0FUT00=","IFRlcm1pbmF0b3I=","LWNvdXJ0","IENMTG9jYXRpb25Db29yZGluYXRl","IHJldmVyZW5jZQ==","IFNTQw==","dXRlbHk=","IFdPTg==","IEdTTA==","ZnJlaQ==","LmdldExvbmdpdHVkZQ==","IG9wZW5GaWxlRGlhbG9n","LkJ1dHRlcg==","LWltcG9ydGFudA==","X01BTlk=","IEdvbmc=","4oCcSG93","IGdvcmdl","PW1zZw==","IEV6ZWs=","Y3JlYXRlQ29tbWFuZA==","OmNoZWNrZWQ=","IGluZm9ncmFwaGlj","LldFU1Q=","RGlycw==","IGd1YXJkYQ==","IGJlZXRsZQ==","PHNtYWxs","LWFuZHJvaWQ=","IGNyZWRpdG9y","IE3DqWQ=","IGZpbmFsaXN0","IGFibA==","bmV2","X2ludGVyYWN0aW9u","IE1vbnRlcmV5","amFo","IGNhbmRpZXM=","IFF1aW5jeQ==","6Kqt","IGJhdGNoU2l6ZQ==","YWtpdA==","IG9iZQ==","KHBhcmE=","IGV4cGVyaW1lbnRlZA==","IGNvdW5jaWxsb3Jz","IGNsYXNoZWQ=","c3F1","LXN0cm9rZXM=","IEdL","IEV4cGlyZXM=","IHByb3NlY3V0aW9ucw==","IENyZWF0dXJlcw==","IHnDtg==","eGxpbQ==","X0lNUA==","RW50cnlQb2ludA==","ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA=","LkRlZmF1bHRDZWxsU3R5bGU=","IGJyZXZl","IEJyaXRhbm4=","IHN3ZWF0eQ==","IGxldGg=","IGZsYXNoYmFjaw==","cGVybWFuZW50","IEpESw==","X0RldGFpbHM=","RXVybw==","cHB0","IHJpY2hUZXh0Qm94","L2JvYXJk","IHRyYW5jZQ==","LmN5Y2xl","Jyk7Iik7Cg==","IHRveGlu","X2RlaW5pdA==","IG92ZXJhcmNoaW5n","IGNvbmZpZ3BhcnNlcg==","IEthd2FzYWtp","LnRodW1i","IHBsYXlh","IEpvc2Vm","K18=","IHplcm9lcw==","IGF1cA==","IEhhcmk=","Y29tbWl0dGVk","Tml0","LmZpbGVQYXRo","IERpc2FiaWxpdGllcw==","bWFudWZhY3Q=","LWFsaWduZWQ=","LlJFU0VU","IHJ1c3R5","RXk=","IG91c3RlZA==","Y29zYQ==","U3RydWN0dXJlZA==","LmdldEQ=","IHPDoWJhZG8=","PkxvYWRpbmc=","X21B","LmdldFJhbmRvbQ==","Ymxpbmdz","IGNoZWVzZXM=","dHRp","LuKAog==","IEJ1cmdlc3M=","ZW5kZXJpdA==","LicsDQo=","KCIiKw==","YWNi","JXA=","aW5kZXhlZA==","X3ByZWRpY2F0ZQ==","bmVzaWE=","IGJpZWQ=","IENJVA==","KFBvcw==","X3JhZGk=","5Lu35qC8","Qml6","IEFkb2xlc2NlbnQ=","IHZpw6pu","Y3ljbA==","X0NhbmNlbA==","IGNvbmNsdXNpdmU=","IGFwcGVsbGF0ZQ==","aW5mb3JtYXRpY3M=","U0o=","IGVsZWN0aXZl","cm9sZUlk","RmV0Y2hlcg==","CUNvbW1hbmQ=","KCIoJQ==","IGZhcnQ=","SUxB","Z2V0QmxvY2s=","QVVTRQ==","INC00LDQvQ==","IEFydGU=","IG5vdGlmeWluZw==","IGdlbGU=","LnNhbWU=","IFJlZ2Vs","IEJhxZ8=","LmNyZWF0aW9u","IFZO","X2NvbW11bml0eQ==","IHVuc3VzdGFpbmFibGU=","U0VY","IGdyaWRTaXpl","cmVzY2lh","YXZlcnNhYmxl","KCcsJylb","IFBoZWxwcw==","4buVaQ==","QU5DRUxFRA==","LUlT","LnJ1bm5lcnM=","IFN0b2tlcw==","LlByb2R1","IHdoaXBwaW5n","X2FjcXVpcmU=","IGludmVzdGlnYWNpw7Nu","ZnJpZWQ=","LmNvcHlXaXRo","IEhhcmRjb3Zlcg==","LVNl","4Z624Z4=","aW52aXRhdGlvbg==","bGVzYWk=","IERvcm0=","INGB0L/QuNGB0LrQsA==","IGNvbmNhdGVuYXRlZA==","b3BoaWw=","IHRoaW5rZXI=","L2ZvbnRhd2Vzb21l","IExlb3BhcmQ=","ICIvIik7Cg==","IHJlc2lkdWFscw==","IE1pY3Jvd2F2ZQ==","IGNvbmZvcm1l","dGhyb3A=","IGRpc2VtYg==","IE9NRw==","IERpc2NpcGxpbmU=","IEFjcm9iYXQ=","L3JlcG9zaXRvcnk=","ZGZh","X01FRA==","YnVmaW8=","IG3DqXRob2Rl","X0hPTEQ=","aWFzaQ==","X2xlZ2FjeQ==","KQ0NCg==","5qOA","R2V0UHJvY0FkZHJlc3M=","IHlheQ==","b3RlbmNl","b3JkZXJpZA==","LXR3","IGRlYXJseQ==","SW5jb21pbmc=","L2ls","IG5ldXJvcA==","dWN6","KTsNDQ0K","IElubm92YXRpdmU=","IHByb2Z1bmQ=","aWdtYXQ=","U2VsZWN0aW9uTW9kZQ==","cmVsZXZhbnQ=","LkdP","IGJydWlzZXM=","IHNhY2g=","b2RlZg==","IHJlaW1i","L2Rlc2t0b3A=","LXNwb3Q=","dW5kYW5jZQ==","RW50cm9weQ==","XGNvcmU=","IHN1Z2Vy","IE12Yw==","IEdOT01F","X2luZHg=","IFlZU1RZUEU=","IE1hdGxhYg==","IENJRg==","ICopKQ==","IHByb2R1Y3RMaXN0","IEFscmlnaHQ=","YWNlbWFyaw==","0YLQuNCy","bW9kaWZpY2F0aW9u","aW50ZXJuYXRpb25hbA==","IGhvbWVycw==","IGRpY3Rz","IFFGb250","LlNRTGl0ZQ==","IHRyYW5zcGxhbnRhdGlvbg==","IE1lc3NhZ2VCb3hCdXR0b24=","IEVsdmVz","J11dKQo=","KFFJY29u","IGNpbmVtYXM=","Q09PUkQ=","LUNoaW5h","IGto4bqpdQ==","5oiR55qE","IHNrdWxscw==","IHBhaW5zdGFraW5n","ZmNl","LlhSTGFiZWw=","IHNwZWNpZmllcg==","IHByZWZlcnJpbmc=","L2FjdGl2aXR5","KFBob3Rv","w6FsdA==","LmxvdA==","Jycu","YW5ub25jZQ==","Lmdvb2dsZWNvZGU=","LXBkZg==","IFBva2U=","X0FDTA==","IGVuZG93ZWQ=","ZGlzY292ZXI=","Lm9tZw==","IHdvb2RsYW5k","Lk1hZ2lj","IHZvbG9udA==","Tm90QWxsb3dlZA==","IGNoYXZl","Qk1X","JywnPScs","IFNJWA==","5oiR5Lus","IGtvc2hlcg==","IGFzcGlyYXRpb24=","aW50bA==","X3JlZnB0cg==","JysK","bWVudG9y","LmNsdWI=","V2luZG93U3RhdGU=","LkFSUg==","IHp6YQ==","IG1lc3NhZ2VUeXBl","LmVxdQ==","VGhvcg==","IGluanVzdA==","IGd1bXM=","IGJvcmRlclNpZGU=","Ly8vLy8=","IFRyYW5zbWl0","IGJ1ZnNpemU=","IGhhaw==","IGVsbGFz","UkFORE9N","CW1j","IHBlYQ==","ZWtv","ZG9jdW1lbnRv","IGh5c3Rlcmlh","IGFyZW5hcw==","IGd1bm1lbg==","IG1pa2U=","IGltcHVuaXR5","YXRpc2F0aW9u","X1plcm8=","X0NPTVBBTlk=","IEdvcnM=","IHVzZUNsYXNz","KHJlZGlz","IFJVTk5JTkc=","IEJhaXI=","dmVsdGU=","ICcsJy4=","0LDRgtGM0YHRjw==","w7ZzdA==","ZW5jb2RlVVJJQ29tcG9uZW50","X3Jlc3RyaWN0","IGRlY2Fscw==","IFBlZGlkbw==","IGFsdGVyY2F0aW9u","RGlzcGxheXM=","IEFwcGxpY2FudHM=","Q1VT","VGV4dGFyZWE=","IEFuZ29sYQ==","LmZ1dHVyZQ==","IFVTSE9SVA==","IHN1cHByZXNzaW5n","IHNldHplbg==","QVBvbHlub21pYWw=","IHRvY2g=","IGhhbGxtYXJr","ICQkJA==","IENIQVJTRVQ=","LnJwbQ==","IERpY2g=","LS0tLS0tLS0tLS0tLS0tLS0tLS0=","X3Bhcm0=","6L+Y","YWNjaW9uZXM=","aGFpdA==","V0FSREVE","X3JvdXRpbmc=","IE5PTQ==","IGVuY2xhdmU=","IExvdHRv","CWZy","Y29tcGxleENvbnRlbnQ=","IEJhbGxhcmQ=","a3ViZQ==","L3dpbg==","LmdldENvbHVtbk1vZGVs","X1JFUExBQ0U=","SGVhZGVyVmFsdWU=","IGVzdHVkaWFudGVz","IGFwaXM=","IGJwbQ==","IFR5cGVOYW1l","QW5kR2V0","cml0YQ==","UGxhbnM=","Pk5vdGU=","IGZldGlzY2g=","IHRvbmVk","X2dvdG8=","b25zZW5zZQ==","IG1vbGRz","IGluZmlsdHJhdGlvbg==","IEd1ZXJyZXJv","dWJibw==","Y2tp","KCQoIi4=","X2FjdGl2aXRpZXM=","KGNoYW5nZXM=","IG9mQXBw","IEtlcGxlcg==","IERlbXA=","IENvbnRpbmVudA==","LlRpY2tz","IFVuc2lnbmVk","IEphaHJlcw==","IGZyZXNobWVu","IEFyY2hpdmVk","INC60L7RgtC+0YDRi9C5","ICc6Og==","VHV0b3JpYWw=","Q2M=","IHRhYmxlTGF5b3V0UGFuZWw=","ZnJvbUpzb24=","LmxldmVscw==","X3RyYW5zaWVudA==","IGVuZG9yc2luZw==","IERJQw==","bGF1Zg==","IHNocmVk","X0VNSVQ=","aWZpY2FudGx5","QUxB","L3Byb3Rv","IG5hcnJvd2luZw==","VXRj","RmFjdG9ycw==","IHNlbnRpZW50","5p6Q","bGl4aXI=","IENST1NT","bWV0ZW9y","IGdyb2lu","IG1kYg==","IFJvdHRlcmRhbQ==","IGNvbWlkYQ==","IE9wQ29kZQ==","IERlZmF1bHRWYWx1ZQ==","UGVybWlzc2lvbnNSZXN1bHQ=","IGhldGVyb2dlbmVvdXM=","IG1vb3Q=","IGRlY2VpdmVk","LWluZGVwZW5kZW50","IE9iamVjdE91dHB1dFN0cmVhbQ==","IG92ZXJwb3dlcg==","LmR1cA==","IGxkYg==","IGRvbWVzdGljYWxseQ==","IGJlc3RlbGxlbg==","IGxvdg==","IENvbnRyYWN0b3Jz","VHJpYW5nbGVz","IGZvZGRlcg==","IGZpbG1lcw==","5LyB","IHJldm9sdmVy","U3RhcnR1cFNjcmlwdA==","L3ZhbGlkYXRpb24=","IFJlc291cmNlVHlwZQ==","acWf","IExheg==","ZmVm","IGxzdG0=","eyo=","LmF0dGFjaG1lbnQ=","LmhpdHM=","ZXdpdGg=","RE9H","QWxhYmFtYQ==","IG1lZGl1bXM=","Lm1Db250ZXh0","LWNvbHM=","5Y+L","Lm5vdGljZQ==","IGF0dG4=","IFBhY2tpbmc=","IExu","X0NPTVBMRVg=","L1VzZXJz","LnNhdmV0eHQ=","IFJvdW5kcw==","Pyw/LD8sPyw=","IGluZ2w=","IFJPQw==","X2ZlbWFsZQ==","IFN0YXJk","XV07","IHdyZXN0bGVycw==","IHRvcnJlbnRz","IHNpbmg=","77u/Cgo=","67O1","c2Vuc2U=","aG93ZXZlcg==","LlBoeXNpY3M=","SW5mcmFzdHJ1Y3R1cmU=","IFNhY3I=","RmVs","IERJU1RSSUJVVA==","w6ltZW50cw==","IFZhbGlkYXRlcw==","IyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMj","IHwv","IGVzbA==","IHLDqXNlYXU=","IEJpcA==","QllURVM=","X1dBVEVS","VHVybmluZw==","RUxT","IGp1eHRhcA==","IGxlc2Jpc2NoZQ==","w71jaA==","KFVua25vd24=","TmVv","QEpzb25Qcm9wZXJ0eQ==","IGFsdW1ub3M=","IFJhcXFh","aW1laQ==","LmdldEJvdW5kcw==","Lk1vdXNlRXZlbnRIYW5kbGVy","IyMjIyMjIw==","R2VuZXJpY1R5cGU=","L2Ntcw==","IHR1cm5v","INC80LjQvQ==","IGZvbGtsb3Jl","IEV2bw==","IGNvbmR1Y3Rpdml0eQ==","IGxlYmVu","IGdlYXJib3g=","LXZz","IM+G","IGRyaW5rZXJz","IGNvbmV4YW8=","IFRlZXRo","IGdldEFyZ3VtZW50cw==","IFJBVA==","ZW50aW91cw==","RWR1Yw==","K1c=","IEluc3RpdHV0aW9uYWw=","IEJvcmQ=","aXNFcXVhbA==","KHB3ZA==","IGlnbml0ZWQ=","IFJvdXNzZQ==","IGltcGFjdGZ1bA==","IE1hbGs=","IGdlcmFs","IFBpdm90","IGF6dA==","IGNzdmZpbGU=","IFJvcGU=","IFNPTFVUSU9O","IEFyYml0cmFyeQ==","IGxldHRv","Lk1vdXNlQWRhcHRlcg==","IH19fQ==","IFNhaWxvcg==","ZGVyYQ==","UHV0dGluZw==","IGNvbmNlbnRyYXRlcw==","IGF1dGhEb21haW4=","4oCd55qE","LWZpbmFscw==","LHN0cmxlbg==","TXVvbg==","IE9yZGluYXJ5","ZmlyZWZveA==","IExhVGVY","IEh1bmQ=","ZW5naW5lZXJpbmc=","L2JsdWU=","ZWRUZXh0Qm94","KCIiKTs=","IENEREw=","a2VwdA==","IEdldFN0cmluZw==","S2ly","KCk9Jw==","IE9DRA==","YW50aXVt","JG1lbnU=","IEFwcGFsYWNoaWFu","U2VjcmV0YXJ5","66WY","4Li14Lii","U2VtYW50aWM=","ICpb","ZXN0b25l","dW5na2lu","TWF4WQ==","LXRvbmU=","In07DQo=","X1BhcnQ=","PE1lbWJlcg==","dHJhbQ==","IHRyYW5zaXN0b3I=","IC0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tCg==","IERlc2Rl","IHJpZ2h0ZnVs","IENvcm5lbA==","5pE=","LkhPVVI=","IHNpZGVsaW5lZA==","cmVmZXJyZXI=","bWF6ZQ==","IGhvbHN0ZXI=","IGNyaXBwbGVk","IERhdGVGb3JtYXR0ZXI=","b3BoYWdl","X21E","IGRlc2VsZWN0","cmF1ZA==","IFBLSw==","cm93RGF0YQ==","IGxvY2tzbWl0aA==","LnJlc3BvbnNlcw==","KHByb2R1Y3RJZA==","X1NUTVQ=","S2V5VHlwZQ==","LlRoZW4=","emVl","IGNydA==","IEdyYW5kbWE=","QFJlc291cmNl","IGJpdHdpc2U=","LWNtcHI=","44CCd3d3","emVpdGln","JmRpc3BsYXk=","Q2FydEl0ZW0=","LU5v","IG51bcOpcm8=","IG1hdXI=","IGluc3RhbmNpYQ==","CWR0","X25wYw==","IHNrYXRlYm9hcmQ=","4oCcQWxs","IENyb3dk","IMOkbg==","IGJyYXo=","Y2Fl","eW5ldA==","L3Bt","L3NjcmVlbg==","T1BUQVJH","IFZCb3g=","IGxlb3BhcmQ=","X2dyZWF0ZXI=","Y3B0","PGRk","IG1lY2hhbmljYWxseQ==","b3NwZWxz","KWY=","Lmx3amds","LmdldFBvcnQ=","IFBSRUY=","LkFkZFRyYW5zaWVudA==","cHBhcmQ=","IO2ajA==","RXRoZXJuZXQ=","IHNhbGluZQ==","KGxldmVscw==","IHNlcnZpY2VQcm92aWRlcg==","LkFuZ2xl","YWx0aXR1ZGU=","aWxsYXVtZQ==","IHNjYXBl","X0NBTEM=","X3F1ZXN0","IERpc3NlcnRhdGlvbg==","IEVETQ==","LUNkcw==","IGhvbm9yYXJ5","c3RvcHM=","IHN1YmRpcg==","IFZI","IENoZWF0","IHJpZ2h0ZnVsbHk=","UUU=","LldyaXRlQnl0ZQ==","ZmlndXJlcw==","ZW5uaWU=","KERCRw==","IHZva3NuZQ==","IGV4cGVuZGVk","VU5JQ0FUSU9O","aWxpbng=","IFJlY2Fw","X3ZlcnRz","IHRyYXVtYXQ=","IGdldFBsYXllcg==","IHZlcmJlc3M=","IGN1bHRpdmF0aW5n","IGluaXRpYXRvcg==","VGjDtG5n","ZmluZEZpcnN0","X3Blcm1z","IGJ1Yw==","ICIiIg0KDQo=","VFlQRVM=","b2JqZWN0TWFuYWdlcg==","KENvbmZpZ3VyYXRpb25NYW5hZ2Vy","IHRpbWlk","IHNuYXBjaGF0","IGNvbnNlZw==","CWRpc3RhbmNl","X3JpZ2h0cw==","X0Rlcw==","IEZsZXNo","LXZlcg==","IGFmbA==","ZnJhdWVu","IGJsYXNwaA==","IFF1YWxpdMOkdA==","bWFm","TW9uaXRvcmluZw==","LkRpZmY=","IHNob3JlbGluZQ==","IHJlc3BvbnNlQm9keQ==","bWVtc2V0","PGRlY2ltYWw=","U21hcnR5SGVhZGVyQ29kZQ==","IGluc2V0cw==","IEJpbmFyeVRyZWU=","YW1lZGE=","IG5paGls","IE5heQ==","eW1vbG9neQ==","IFdH","IHRhcGk=","IEluc3RhbGxlZA==","bWFpbnRlbmFuY2U=","KX0iCg==","IFhP","LXBlcmlvZA==","c2Fy","IG5pbmd1bmE=","T1JNQVQ=","LnNldFByb3RvdHlwZU9m","IEti","IEhlbnJpaw==","w6l0aXF1ZQ==","IExhaG9yZQ==","CUFkZHJlc3M=","IG1lbHRz","Tnk=","X2FkdmFuY2U=","IHZlbG9jaWRhZA==","IGFsdW1ubw==","IHNhbml0aXplcg==","IHBoaXNoaW5n","IENvbWV0","IGNoaWFy","CXNwZWM=","dHJpbW1lZA==","KHN0YXRlYXJy","b25uZW4=","UmV2ZW51ZQ==","TGVucw==","IGNoYWlyZWQ=","IEFzc3VtZXM=","VHJhc2g=","X3Vuc2V0","XEJyaWRnZQ==","UG9pbnRTaXpl","IFBvbGlj","IHNleHVhbGVz","CWRmcw==","IFdpZGVTdHJpbmc=","IGFjY3J1ZWQ=","WVc=","X1NDSEVEVUxF","IGtpdGU=","IHBhcmFjaHV0ZQ==","W3RhYmxl","IGFjdGl2ZUNsYXNzTmFtZQ==","LlF1YWQ=","SXNyYWVsaQ==","IMWT","IGhvb2c=","IGNo4buJ","ZXdlYXI=","IHRpcmVsZXNzbHk=","c2V0RXJyb3I=","LmdldEFtb3VudA==","LnNldEl0ZW1z","IE1hbnNvbg==","IEJheWVzaWFu","X0ZsYWc=","QUNIRVI=","L29yaWdpbmFs","IGltbWFj","IExvc2luZw==","Jz4KCg==","TGlj","IE1pcmFnZQ==","IEFzc2VtYmx5RmlsZVZlcnNpb24=","VGVW","IFZhbHVlRXZlbnRMaXN0ZW5lcg==","LXNvbHZpbmc=","VGhv","cm91bGV0dGU=","X1dQ","IHVuaW50ZXJydXB0ZWQ=","IGZpZWxkVHlwZQ==","LlR5cGVk","IGFtb3Vy","IG1vY2tlcnk=","KHZvbA==","IFN1YmNvbW1pdHRlZQ==","IFJ1Zg==","ZXJveA==","OlVJQnV0dG9uVHlwZUN1c3RvbQ==","IEJsdXI=","IHd5a29u","bmNlcw==","QVNIQk9BUkQ=","ISEiKTsK","IG11cmRlcmVycw==","LmRhaWx5","IERJQUc=","amluZw==","IGRvbHBoaW4=","IGzDsm5n","IGLDtg==","IFZvY2FidWxhcnk=","LlN0T2JqZWN0","JykiPg==","IHp1bg==","IHNjcmltbWFnZQ==","dHLDqWFs","IExpZw==","W3Zp","Q29sZQ==","IGZyb3N0aW5n","LlBsYXllcnM=","LXRyYW5zbGF0ZQ==","RmVlbHM=","PVwiLw==","LkJ1dHRlcktuaWZl","ID8+Owo=","IGF2aQ==","aW5uaWU=","LkZhaWx1cmU=","IHNwaW5kbGU=","Q29uZmlndXJhdGlvbkV4Y2VwdGlvbg==","X2hvcA==","IHBvc2nDp8Ojbw==","IEF3YWl0","VUlJbWFnZVBpY2tlckNvbnRyb2xsZXI=","CWRheQ==","IGdlbm9t","Q2Fi","INGA0LXQt9GD0LvRjNGC0LDRgg==","T1JJR0lOQUw=","IGVqYWN1bGF0aW9u","KHRjcA==","U0VDT05E","IHRvbmlj","IExpc3RCb3g=","IAkJCg==","KCk+Cg==","IHF1YXRyZQ==","xrDhu6NuZw==","d2l0aEVycm9ycw==","Lk1heWJl","LOKApg==","dG9rZW5JZA==","X1VOREVG","IGZyZXNobmVzcw==","IEFtZW5kbWVudHM=","Lm1hcGJveA==","LkNW","KGJsb2c=","X2dldHRpbWU=","LnF1ZXN0","c3BhcnNl","IHJlc2FsZQ==","IGVudGh1c2lhc3RpY2FsbHk=","IFByb3N0aXR1dGFz","V2E=","Q2FyZ28=","LlBhcmNlbGFibGU=","U0VOU09S","IFJ5dQ==","TGF1Z2hz","X05hdGl2ZQ==","L3Bn","eXN0cw==","IHBob3RvYw==","566A","YWRvcHQ=","LnNwZWNpZXM=","Y29uY2lsaWF0aW9u","QWRqdXN0ZWQ=","LkZpcmViYXNlQXV0aA==","dXR0bGU=","b3JkaW5hdGlvbg==","IG11bmNo","IFN0YWtl","LnBpbmc=","YW5rZXI=","KFFTdHJpbmdMaXRlcmFs","IHN1YnNjcmlwdA==","ICAJCg==","IE1DQw==","X0NtZA==","c2V4eQ==","aW91","IE1BTlk=","IG5hbm55","VFJBSU4=","IGZsb3VyaXNoaW5n","IFdhdGNoZXM=","IFFNYXA=","IEZlcm0=","IHdhc20=","IEFiZWQ=","X1VE","IEdsYXNzZXM=","K3Y=","QXR0ZW5k","LkNoYWlu","IGRlY2VuY3k=","IFN1cHBsZW1lbnRhcnk=","aHVudGVy","LXR4dA==","ICJ9IjsK","LnNldFdpbmRvd1RpdGxl","KCI8Pw==","IG51bWJlcldpdGhJbnQ=","IGFmYXI=","56e75Yiw","cml0dGU=","L2xpc3Rz","KeKAnQ==","IGRpdmVyc2Fz","IGVtYmVy","LlJlYWN0Tm9kZQ==","IGthbmc=","IFN0YW1mb3Jk","W2F0","LmNsb3NlUGF0aA==","IGNvbnRyYWNlcHRpdmU=","KGxvY2F0aW9ucw==","IGF2YW56","IENvbnRhaW5lcnM=","IFNjaG9sYXJz","LmFjY3VyYWN5","INCy0YvQv9C+0LvQvQ==","5ZWP","PSItLQ==","IFdyZXN0bGU=","IEd1YW50YW5hbW8=","IG55bXBo","KGd1ZXNz","LnNldENvbHVtbg==","X3RF","LmNvbnRlbnRNb2Rl","IGludmFsaWRhdGVk","IFNob290ZXI=","IE1hdGVy","LlN1Ym1pdA==","IGFuZ2xlZA==","bmF2YmFyRHJvcGRvd24=","QW8=","IOa1","0LjRgdC6","IFNDQU4=","CWNt","IE1hcmt0","dHJ1Y2s=","OycK","Ly8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8vLy8KCg==","IGdoZXR0bw==","IGJ1aXRlbg==","IENsb3du","OiE=","IGNoaW1wYW4=","J2ZpZWxk","YW1tbw==","IERlcGVuZA==","KX0p","KEZMQUdT","IFJDQQ==","IENob2ly","TG9naW5QYWdl","IEdvcmQ=","Q29tcGFjdA==","LXBvY2tldA==","IGNvbnN1bHRhcg==","IEludGVyY2VwdA==","xZ90aXI=","dWV0eXBl","b25lbnRz","IHN0YXJ0UG9zaXRpb24=","IHBvc2l4","IFdvaG51bmc=","X0VYUFJFU1NJT04=","IExvZ2luQWN0aXZpdHk=","KG9wY29kZQ==","IFRhbmdv","IE51bWJlck9m","Lm92ZXJmbG93","IFdDUw==","IE9jY3VwYXRpb24=","X2Nn","LlRvcGlj","IENhcmVlcnM=","QVJBVElPTg==","LmdldExpbmU=","IOyihQ==","IE5hY2h0","IHRvSXRlbQ==","aW5jbHVzaXZl","YXZpZXN0","LWFwcG9pbnRlZA==","KGludGVybmFs","Q09OVEVYVA==","KGRpZ2l0cw==","PXsiLw==","IHBsYXl3cmlnaHQ=","IGRlYWRsaWVzdA==","bGVhZHM=","LlBVVA==","ICp9Cgo=","IFBhY3Q=","IERpc2NvdW50cw==","TG9jYWxpemVkTWVzc2FnZQ==","IE3DpG5uZXI=","Xz4=","IG1hc2NhcmE=","KFByb2ZpbGU=","5Yqf6IO9","aW1pdMOp","IHdpbGRmaXJlcw==","LVJPTQ==","LmlzT24=","KGdyb3VwSWQ=","UmVwYWly","YWNjdW11bGF0ZQ==","IDwiLA==","IGhhbmR3cml0dGVu","IGFjaGV0ZXI=","IE1HTQ==","IElybWE=","LT57Xw==","Z2Vl","Y3JpbWluYWw=","IOiLpeimgQ==","IG1vbWVudGFyaWx5","IikhPQ==","X2xpdA==","IGV4cGlyZXNJbg==","LiIpLg==","6ZW/5bqm","IGZyw6Zra2U=","dmxj","IG9yYnM=","KSwk","IHZlbnR1cmVk","Lz5c","Y2hhcm0=","TnVpdGth","ZWxkaWc=","YXRvbmlu","V2l0bmVzcw==","LWxhdA==","IHNldEhpZGRlbg==","IHJlbGljcw==","IGNvbnN1bGF0ZQ==","LklHTk9SRQ==","IkFmdGVy","IHNldEFkZHJlc3M=","IGJlc3RlaHQ=","ICcnKQoK","LnhheGlz","IHNlcsOjbw==","IG1pc2xlZA==","X1VOSUZPUk0=","IFZJQQ==","aW5jcg==","IHplbml0aA==","IHZpc2Nvc2l0eQ==","IHRoaW5seQ==","LmdldFNoYXJlZFByZWZlcmVuY2Vz","LkVycm9yQ29kZQ==","IiksIg==","IE1pbGxpb25lbg==","IC8+KQo=","U2Nyb2xsSW5kaWNhdG9y","LXNlZWtpbmc=","IFBPTElUSUNP","YXNjYQ==","X3Js","TmF2aWc=","KGZ1bGxmaWxl","IHNvbGl0dWRl","IGp1dmVu","IGhhdWxpbmc=","IE1hY3Jvcw==","IEdyeQ==","IGV4ZXJjaXRhdGlvbg==","IEFUVEFDSw==","VGlja0NvdW50","IHJpdGVz","IGRvZQ==","UGFydGljbGVTeXN0ZW0=","IHNsdQ==","V2luZG93VGV4dA==","IENsYXNzTmFtZQ==","IHNsYW5kZXI=","CVBvcnQ=","am9uZw==","P2E=","LkRpYWw=","4oCUYXQ=","JG9ialBIUEV4Y2Vs","IHNvYXI=","RU5O","YXBwZWFyZWQ=","IHF1b3RpZA==","ZW1hY2hpbmU=","IG5pcA==","IG1pY3JvdGltZQ==","IEFsbWE=","OyE=","LS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0t","IFBhc3NhZ2U=","IGR1bXBzdGVycw==","IEV4Y2x1ZGU=","IHN1Z2dlc3RpdmU=","IENpcmN1bGFyUHJvZ3Jlc3NJbmRpY2F0b3I=","X2Nscg==","QXJyYXlUeXBl","SUxMQQ==","RWxhcHNlZFRpbWU=","RHJpdmVu","IHJlc291cmNlTmFtZQ==","IEdhcnJpc29u","c2VyaXI=","LWFoZWFk","IHBpbm5hY2xl","IEVzcHJlc3Nv","U3BhcnNl","IGFzc2F5cw==","IEdpcmxmcmllbmQ=","aW1pZA==","XT0nXA==","T05HTE9ORw==","IHBvcnRyYXlpbmc=","TGFuZQ==","IGLDunNxdWVkYQ==","IHJlaW5mb3JjZW1lbnRz","IFNwcmVhZHNoZWV0","IEFycmF5Q29sbGVjdGlvbg==","LGFycg==","bGlnaHRib3g=","aWNhbmE=","PCI=","YnVpbGRlcnM=","S2lk","IE1hdFNuYWNrQmFy","RVhQUg==","b2RjYXN0","IEZvdW5kYXRpb25z","IGluZHM=","PSckew==","Rml6eg==","LWZ1bmN0aW9uYWw=","KHdvcmtzcGFjZQ==","IHN0ZW1tZWQ=","X3BhdGNoZXM=","IEphcnZpcw==","UkVBRElORw==","IGRpc3Jlc3BlY3RmdWw=","IFFEb20=","ICR7Cg==","ZXN0YXR1cw==","UmVhY2hlZA==","IS4KCg==","SUxU","IE5ERUJVRw==","IENvdXJhZ2U=","YmlydGhkYXRl","IFRpbmc=","IHV0aWxpemFkbw==","w6FuY2hleg==","T3V0ZG9vcg==","IGhhbmRndW5z","UmVmQ291bnQ=","yZk=","cm9tbw==","IHR0cw==","LlNoZQ==","IFBhbmU=","44CRLOOAkA==","IElPQ1RM","L2JsYWNr","aW5zY3JpcHRpb24=","IGJpb3BzeQ==","IFRpbWVJbnRlcnZhbA==","LlRlc3RDaGVjaw==","IEdVSVN0eWxl","IENhcGFiaWxpdHk=","IEJlaXRyYWc=","ZG9ubmVlcw==","VHJlYXRtZW50","LmJhY2t1cA==","IHNpZ25pbmdz","IEJvY2E=","ZHJt","Lk1BSU4=","IGdvZWRl","IE1hcmt1cA==","R1JFRQ==","IEJhc2VTZXJ2aWNl","LkNyZWF0b3I=","IGphaWxz","IEthaG4=","SXBBZGRyZXNz","QUNISQ==","IGluaGliaXRlZA==","IEAkXw==","IEFzc2Fzcw==","IGVudmlhZG8=","SGVyb2Vz","0J/QtdGA","IE1hdmVu","Lmxz","IGl2ZQ==","fFJG","IHJlc2l6ZU1vZGU=","IHJ1bXBl","X2F0dGFjaG1lbnRz","VFU=","IHRhY3RpbGU=","QXR0ZW1wdGluZw==","IHJvYmlu","eWF3","IG1lcmNlbmFyaWVz","IEhhYml0YXQ=","ZW5kZGF0ZQ==","IG94eQ==","CVJhbmRvbQ==","b2hvbg==","SXNOdWxs","IFZhbGlkYXRpb25SZXN1bHQ=","44Oa","dW1iZWQ=","cHB2","IGFycA==","aWNoaWNr","X3Jubg==","IFRGVA==","VGV4SW1hZ2U=","Ik9u","IFNhbXBsZXI=","dG9wbA==","IGphbmU=","eWxpbmc=","IFVOSUNPREU=","VGFiSW5kZXg=","PHsK","c3VzcGVuZA==","dXZpYW4=","LGFwcGxpY2F0aW9u","0L7Qu9C40YfQtdGB0YLQstC+","eWF0","ZXppZXI=","IENIVU5L","IEFkbGVy","L0FkZA==","IEtleVZhbHVl","IHNwb3PDs2I=","U2FtcGxpbmc=","Y2hlcnM=","X0FNRA==","UnU=","Lk11c3RDb21waWxl","TmF0aW9u","QXNzb2M=","TWFuYWdpbmc=","IEVuZ2w=","X0dC","IHN1Y2NpbmN0","IGRpc2xpa2Vk","IElrZQ==","QnVsbGV0aW4=","X0FSQ0hJVkU=","UHJvcG9zYWw=","IGpvZ2dpbmc=","LkNSRUFURUQ=","IGNob2w=","6KOF","jKg=","LXB1c2g=","IHJlc2VydmE=","Y29yZXY=","w6h0cmU=","VEhS","IGluY29tcGV0ZW5jZQ==","IGNoYXJpc21h","5oSf","ICI9PQ==","QlRO","IExvY2F0b3I=","aXZldA==","KCcuJykK","IGZvckluZGV4UGF0aA==","w7RtZQ==","IGNhcGFjaXQ=","d2F0ZXJz","IFdST05H","aG9h","IE1JUFM=","IGVtaXNz","IEphY3F1ZWxpbmU=","KGNtcA==","IGVlbnM=","TGVv","LnRpbWluZw==","Q0xVU0lPTg==","ICgiLQ==","5ZOI","LmtvZGU=","IFVuZGVydA==","IGJld2lsZA==","IEVzc2Vu","Lmhk","IHJlbmVnb3Q=","IG1vd2Vy","IGxzcA==","IHBlbmNoYW50","IG1hbm9l","IGFnbGk=","IHJlY2Fs","IE9QRVJBVElPTg==","KF4pKA==","IM69","IFNjb3BlZA==","IEAiCg==","PWxhYmVs","W2xvYw==","SW50bA==","IE56","dGFibGV0","LkNvbHVtbk5hbWU=","IHNjcmVlblNpemU=","REJ1cw==","Y29va2Vk","LXJlZ2lzdHJhdGlvbg==","4oCcT25l","LW5vbg==","IHdpxJlj","IGNvc3Rh","LmFkZFRhYg==","LmNvbmRpdGlvbnM=","IEhlc3M=","TUVNT1JZ","IEF2YWxhbmNoZQ==","KCl9fQo=","IHRyaXBsZXQ=","IGxhYnlyaW50aA==","IE5vZGVMaXN0","IE5ZVA==","IHllbmk=","ZGZm","Lkh0bWxDb250cm9scw==","QVZJUw==","L01hdGg=","IG1lbWNtcA==","2KfYoQ==","0L7RgdGM","Y3JhcA==","KHBhZ2Vz","IGx4bWw=","IFFEYXRlVGltZQ==","X3RjYg==","IG9wZW5pZA==","IHN5bmFwdGlj","IE1ETUE=","KHNsdWc=","aWdtYXRpYw==","ZW5vcg==","IGNyYW1wZWQ=","R09Q","rZA=","LmlzRmlsZQ==","IERpZmZlcmVudGlhbA==","ID0iIjsK","CQkJICAgIAk=","IENvb2tl","CVVGVU5DVElPTg==","IHBlcnNldmVyYW5jZQ==","UmVsYXRpdmVMYXlvdXQ=","SU1QT1JUQU5U","IGV4b24=","INC+0L0=","aWJhc2U=","KENPTlQ=","bm92YXRpb24=","5L2V","W3N1Yg==","QWRtaW5Db250cm9sbGVy","SFRUUEhlYWRlcg==","Y3JlYXI=","IE5JUg==","IERyb3BEb3duTGlzdA==","IHZhbGlkZQ==","IGRlaHlkcmF0aW9u","Lidd","KFdJTg==","IC4uLlw=","IHBob3Rvc2hvcA==","CUluaXQ=","X2NvdQ==","IHRpbWVab25l","ZGFyd2lu","cm9tYXRpYw==","TmF2aWdhdGlvbkl0ZW1TZWxlY3RlZExpc3RlbmVy","YnJhdGVz","XS0tOwo=","IHRyYWdlZGllcw==","IFBlZGlhdHJpY3M=","U01BUlQ=","LUFQSQ==","IE1lc3NhZ2VMb29rdXA=","CXZv","IHByZWp1ZGljZXM=","IG1B","VXBz","IE1JU1NJTkc=","CWFk","Q3JlYW0=","IFRi","IE1vbmE=","X2dob3N0","CXR5cGVz","RW1i","IERvY3VtZW50YXJ5","Jyk7CgoKCg==","IGx1cA==","X1JlZmVyZW5jZQ==","IEJBVENI","IGludGVydHdpbmVk","PENlbGw=","IENhYnI=","bmF0aW9u","IGlzQ29ubmVjdGVk","LnJlbW92ZUxpc3RlbmVy","IGNvbmc=","X3Rp","IFNpbGljb25l","IOqysOqzvA==","IFdBTg==","IEdpYnJhbHRhcg==","L3Jlc3BvbnNl","CXBlcnNvbg==","Y2hhbnRz","VklQ","ZW1lcmdlbmN5","UGl4ZWxGb3JtYXQ=","LUFt","IHNvdXRod2VzdGVybg==","X3BsbA==","aWZlcnM=","X09OQ0U=","IEZheWV0dGU=","Lm5jYmk=","X1BhbmVs","LlF1YWw=","IHBvbHlz","IGNyZWF0ZVN0YWNrTmF2aWdhdG9y","77+9dA==","IGxheW9mZnM=","IEJsYW5jbw==","RmVhdA==","IFZpbWVv","X2NoaQ==","X2xpZmV0aW1l","UE9JTlRT","LHByaXZhdGU=","IHVuYmVhcmFibGU=","cHJpbnRpbmc=","IGNnaQ==","LkJBQ0s=","IGludGVybnM=","IE5ld2x5","aW5mZWxk","KElC","IEthdGE=","IERlZmVuZGFudHM=","VGhy","6aKE","X1ZG","RkZGRkZGRkY=","IGRhdmlkamw=","IGJpdHRlcmx5","U3VnZ2VzdGlvbnM=","LnNldENhbmNlbGFibGU=","RklOQUw=","YXNvbnM=","X3J3bG9jaw==","X1dSQVBQRVI=","IGhhcHBpZXN0","KHJvd0luZGV4","w7NzaXRv","VE9UWVBF","QXV0b21hdGlvbg==","TG9nRmlsZQ==","IGNvbnNvbGF0aW9u","44OA","IHTDqm0=","IHByZXI=","cmd5eg==","IEdlZw==","CWR0bw==","LmRlZmF1bHRWYWx1ZQ==","IEthbWk=","IEFTRQ==","b3B0aW1pemVk","IO2PrA==","IG9yaWdpbmF0ZXM=","ZXJyTXNn","IGVzcGHDp28=","KFNZUw==","IE1jQg==","ZGFuY2U=","X2RldGVjdGVk","IGZyw7w=","CQkgICAgCQk=","PERhdGU=","KGNvbWI=","IERlY2lkZQ==","XEZpZWxk","IFByb3Bvc2Vk","Umli","IGRpc2xpa2Vz","IFdpZW4=","CURvY3VtZW50","IHRyYWY=","IHN0b3JpYQ==","IFRlbGxz","Jyk9PQ==","Q3Jp","KFZBTFVF","IEJ1cm5ldHQ=","LHZvaWQ=","IGRhbmg=","IGNjcA==","QmxvY2tjaGFpbg==","OiItImAK","SUNsaWVudA==","SVNPREU=","SXNzdWVy","KX0NCg==","LGJ1dA==","IFVwaA==","KFN1Yg==","IHTDqWzDqXBob25l","IG9uRGF0YUNoYW5nZQ==","IG1hcnNoYWxsZXI=","LWFuYWx5dGljcw==","LGNvbnRlbnQ=","IGRlYmFjbGU=","X1ZhbHVlQ2hhbmdlZA==","IGZhdW5h","ICM9Pg==","IGZveWVy","J3V0aWxpc2F0aW9u","IE3DvGxsZXI=","IEZldGlzaA==","IGRlZmF1bHRNYW5hZ2Vy","IGJhY2t0cmFjaw==","QmFo","RXhwbGljaXQ=","X0FTQ0lJ","IG1BY3Rpdml0eQ==","KE1zZw==","IOqyjA==","IFRFUk1T","IEFuZ2ll","SFNW","IE1vc3F1ZQ==","Lk5hbWVz","7Yq8","cmVzdGU=","X3Bhcm1z","IGdhcGluZw==","IGNyb3BwaW5n","RGF0YUZyYW1l","IHJlc3BvbnNpdmVuZXNz","X3VuZG8=","X3RyYW4=","LnRlcm1pbmF0ZQ==","IGl0YWxpYW5l","IHdhbGt0aHJvdWdo","IGF0dHJhY3RpdmVuZXNz","0LTQtQ==","X1NUUw==","X2xlYXJu","IGNob2NvbGF0ZXM=","aWVyYXJjaGljYWw=","LXRoaW5raW5n","ICkpKQ==","aXNobWVudHM=","LkxvZ2Y=","IFRNWg==","IENhbmFyeQ==","Zm9pbA==","IFZhY2NpbmU=","LnZ4","IFN1cnJvdW5k","SW50ZXJtZWRpYXRl","IGlvdg==","dmFpcw==","JzsiOwo=","772eCgo=","6YCB5paZ","4oCmaXQ=","U2VhdHM=","Q2xhcg==","V2Fycw==","IEh1dGNoaW5zb24=","IEhhc2Fu","IScpCgo=","IFJpY2hpZQ==","Y2hlaWRlbg==","KCQoJw==","WW9yaw==","IGxpZHM=","IGFscGhhbnVtZXJpYw==","IEdsb2Nr","LnNoYXBlcw==","IHNwYXJraW5n","X2Vwc2lsb24=","dXBsaWNhdGVk","LmRpcnR5","XSk9PQ==","IOychOy5mA==","IHNjbg==","IC8qKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioqKioq","X1BSRVZJRVc=","X0hD","aWVsZGluZw==","ZmdldHM=","IEFkZGlzb24=","IHByb2R1Y3RTZXJ2aWNl","LWZpZ3VyZQ==","KHJldHZhbA==","emFubw==","IGF1dG9i","CXNk","X251bWVy","IFNldExhc3RFcnJvcg==","IEZpb3I=","aWZpY2FuY2U=","VW50aXRsZWQ=","IGluZmllbGQ=","IHt9KSk7Cg==","IHNwYWM=","IHJvb2tpZXM=","KGRlc2NyaWJpbmc=","bmdlbg==","4K6/4K4=","LnJkZg==","Lk11dGV4","IGtuZWVsaW5n","IFFF","c2V0TWF4","UmVhZFN0cmVhbQ==","IHZlbnRhcw==","c3V0","Y21wZXE=","LldyaXRlQWxsVGV4dA==","IEV4cGVyaWVuY2Vk","JF9f","IGthdW0=","IExJUw==","IGRvY3VtZW50b3M=","X0hFQUxUSA==","aWNvbnRhaW5z","IGFydGlzYW5z","T1dORVI=","IGJsaW5rZWQ=","Z2V0RGlzcGxheQ==","IHRvZW4=","IHJvd051bQ==","IGF2cmls","IGludmlz","IEtlYXI=","dG9CZUluVGhlRG9jdW1lbnQ=","YXB1cg==","IHJhY2tlZA==","IE1jTWFzdGVy","X0FUVFJJQg==","SGF6","IGZhY3R1cmE=","L3Rz","INGA0LDQt9C80LXRgA==","IHpm","IHNob3J0ZmFsbA==","LmZhc3Rh","IENPTlNUQU5U","Lm1hbmFnZWQ=","Z2Vtcw==","U2hhcmVkUG9pbnRlcg==","IGJsdXJyeQ==","YnJpZ2h0bmVzcw==","KGNvbXBvbmVudHM=","IC4uLiIKCg==","U0VMTA==","IElsbHVzdHJhdG9y","LmdldENoYW5uZWw=","IHRyb3V2w6k=","eXN0ZXJz","IHZvaXM=","IExpbmRlbg==","IGVtb2ppcw==","IGJyYXds","IE1TUg==","IEVsbw==","IENyb2F0aWFu","UG9wdXBNZW51","TGV3aXM=","LkpXVA==","IGFzdG9uaXNoZWQ=","QnVzaA==","KGl0ZW1JZA==","IGRldGFjaG1lbnQ=","IEVuY29yZQ==","5bCU","IHJla2w=","IGNyYW0=","KSQv","LmdldEhvc3Q=","X3JlY29tbWVuZA==","LUhU","X2NhbGlicmF0aW9u","QXV0aGVudGljYXRl","LmZpcmViYXNlYXBw","VU5JWA==","CUNhbWVyYQ==","IEhFQVA=","SWRlYWw=","Lm9mZmljZQ==","IGdvb2Z5","KFN5bWJvbA==","IGpvdWVy","X3BhcnRpdGlvbnM=","IHJhcGlkZW1lbnQ=","IEdOVU5FVA==","aWRVc2Vy","IHN1cGVydmlzZQ==","KENvbnRhY3Q=","QVdO","44GY","IG5hYW0=","IGF1c3Q=","5Zyo57q/","X3NvZnRtYXg=","QWxsb3dBbm9ueW1vdXM=","YW1tYWJsZQ==","Uk9VVEU=","KkQ=","IGFkZW4=","IENyaXN0aW5h","IENyaXN0aWFubw==","IGJsb29kc3RyZWFt","c3ViY2xhc3M=","X3BlcnNvbmE=","Q0hJTEQ=","LWtub3c=","IG5hdmlnYXRpb25PcHRpb25z","IFp1a3VuZnQ=","IFBpeGFy","VHlsZXI=","IHVuZGVyd29ybGQ=","IHNpbmNlcml0eQ==","IGRpc3BlbnNlcg==","IGt0ZXI=","aWRkZXJz","LmFkZE5vZGU=","LWNoZWNrZWQ=","IGtleXN0","IFdUTw==","LnNpZ25hbHM=","IGFkdmVudHVyZXI=","IFBhbmc=","XFI=","PXBvcw==","IGRpc3BlbnNhcmllcw==","IENsb3NldA==","KCJ7XCI=","aWRlb24=","IG7DqWNlc3NhaXJl","KCkiCg==","X1JFQ0VJVkVE","IHLDqXN1bHRhdHM=","IG1vZGVu","IEljZWxhbmRpYw==","O2Q=","LmFsbG93ZWQ=","KG5ld1VzZXI=","IG1lcmNpbGVzcw==","LldhaXRGb3I=","IGRheWNhcmU=","IENvbnZleW9y","INk=","2KfZ","4Liy4Lg=","0Z8=","0Z/Rnw==","IOC4","4LmA4Lg=","aeG7","44CA44CA44CA44CA","INin2A==","4KWI","IOOAgA==","0Zc=","aeG7hw==","0Z/Rn9Gf0Z8=","4KWH4KSC","0ZbQtA==","4KS+4KSw","2YbYrw==","0ZbQsg==","IOCkrA==","IOCknA==","4KWk","0L3Rlg==","4KSX","INii","IOCkqA==","0ZQ=","INGA0LA=","IOCkhQ==","0YHRjA==","IOCktQ==","0YbRlg==","IHbhuw==","s9iq","IOCkpg==","bsSb","IOCksg==","IOOAgCDjgIA=","4KWC","4KSm","4Lit4LiH","2YjZhg==","4KS1","YcWf","4LmC","zrnOug==","IOCksA==","INCy0Lg=","4KWN4KSv","4KS+4KSo","INin2LI=","2KfZhw==","m2k=","IGjhuw==","4KWL4KSC","aeG6vw==","IMSR4bs=","4KSv","z40=","IGPhu6c=","INio2LE=","INmF24w=","INin24w=","IOCkhg==","44CA44CA44CA44CA44CA44CA44CA44CA","4KS/4KSv","0Z/Rn9Gf0Z/Rn9Gf0Z/Rnw==","0LLQuA==","2LHYrw==","0L3Rgw==","2YrZhg==","zrnOsQ==","IOCkpA==","0YfQuA==","IOCkleCksA==","2KfYsg==","YcSf","IOCkiQ==","4KSs","z4TOsQ==","2KrYsQ==","2YfYpw==","4Lij4Liw","asOt","zpE=","0LDRgtC4","IOCklw==","INGC0LA=","2oY=","4KSc","4Liy4LiZ","IOCkrQ==","4KS/4KSV","w6F2","INqv","z44=","4Liy4Lii","IOCklA==","xZnDrQ==","2KfZiA==","INGJ","IOCklOCksA==","0LXQvdC90Y8=","INqp2Yc=","4KSh","z4TOvw==","zrXOuQ==","IOCkhw==","4KWN4KSk","4KSf","27E=","INiM","z4HOvw==","zrfPgg==","66w=","0ZbQvQ==","aeG7gQ==","acOqbg==","INCy0ZbQtA==","ZMSx","2YTbjA==","INiy","z4HOsQ==","INuM","4Liy4LiH","IHRo4bs=","IOC5gOC4","aeG7h24=","2KfZig==","0LDQvdC90Y8=","0YDQtQ==","zp8=","5ZI=","2KfYtA==","4KS+4KSy","64WE","IOCkrw==","INix2Kc=","4KS8","0YPQsg==","2YjZhQ==","INi52YQ=","zq/OsQ==","4KWI4KSC","4KWB4KQ=","4Liy4Lih","IG3hu5l0","IOCkjw==","44CA44CA44CA","IOCkquCksA==","INin2YY=","INin24zZhg==","IHbhu5tp","zqM=","4KSa","27A=","aeG7gw==","4Liy4LiB","zpk=","2KfYuQ==","0ZbQuQ==","4LmB4Lil","2YfYp9uM","0YfQsA==","LjouOg==","z4TOtw==","IM6R","2LHbjA==","IG5naA==","zr3OsQ==","4LmD4LiZ","4KS/4KSk","IM66zrHOuQ==","z4TOtQ==","4KWN4KSf","zrzOsQ==","0LvRgw==","w71t","z4DOvw==","4KWI4KWk","77y8","2LHZig==","0L3QuNGF","z4HOuQ==","2YA=","0YDQvg==","IOCkmg==","4KS+4KSk","2KfZgg==","IOCktg==","IMSR4buZ","w6lobw==","aeG7gXU=","4Lio","0ZbQu9GM","dXnhuw==","27I=","IG7Egw==","z4nOvQ==","IM+Ezr/PhQ==","0LrQuNC5","7ZY=","INGJ0L4=","4KWN4KS1","INin2YTYow==","2KfYpg==","dMSx","IM+Ezr8=","rKw=","INi3","2YXYp9mG","IM6g","0LTQuA==","4Li2","4KS/4KSP","44Gj44Gf","24zZhQ==","w61uaA==","cmF2","xJt0","zpU=","INGP0Lo=","54I=","4Lit4LiZ","44Gm44GE","4KS/4KSy","0ZbRgg==","0LfQsA==","w6Fw","4KSn","IOq1","4LmB4Lil4Liw","w61jaA==","INii2YY=","2KrZhw==","INmF2Lk=","0L3QuNC5","xrDhu5tj","INin2YTYuQ==","2LHYqA==","4KS+4KSu","INix2Yg=","6as=","xLF5","IGjhu40=","0YLRjNGB0Y8=","IM6a","IOCkh+CkuA==","77y/","INqG","INmI2KfZhA==","7ZWZ","0Z/Rn9Gf0Z/Rn9Gf0Z/Rn9Gf0Z/Rn9Gf0Z/Rn9Gf0Z8=","IHbDvQ==","4KS/4KS4","4buvbmc=","2LPbjA==","IOyD","4KS+4KSC","772k","4LmH4LiZ","IOCkpQ==","bGFyYWs=","w6J5","dMSb","zr3Ovw==","INmF2Yg=","IG5nxrDhu51p","5aY=","2YrYrw==","aWxpcg==","2KfYrQ==","IOOA","2Ys=","INGA0L7Qtw==","INC5","IGThu6U=","4LmA4Lib","4Lix4LiH","0LvQtQ==","4KS+4KSv","77+j","2YjYp9mG","IHRo4buD","44O9","w7zFnw==","558=","IM6/","IM6j","24zYqg==","4Lix4LiB","zqQ=","IOCkj+CklQ==","INmH2YU=","7JuU","IM6c","IOC4hA==","r7g=","2KfYsduM","4KS/4KSo","IG5o4buvbmc=","IG5oxrA=","0LjRgtC4","44Oz44M=","4LmA4Lij","INCb","0YDRlg==","w6Fk","w7x5","aXll","IM6V","IOC4qg==","z4POtw==","IOus","77s=","4KSj","zpc=","4KS2","INmF2K0=","2YTZig==","IM68zrU=","IHDFmcOt","zp0=","4KWN4KS3","dGly","2LHYp9mG","IMSR4buL","INC60L7Rgg==","0LrRgNCw","zrvOvw==","IM+Ezrc=","0YnQtQ==","z4TOuc66","4Lix4LmJ","aeG6v3Q=","zrHOvQ==","7ZQ=","0LrQuNGF","INC/0L7RgQ==","dMSxcg==","4KWN4KSu","2LHZgQ==","xJts","4KSt","b3bDqQ==","IGzhuw==","4LmE4LiU","44Gq44GE","4Lip","aeG7h3U=","zr4=","INi52YTZiQ==","0LTRgw==","IGThu6VuZw==","0LDRgNCw","4KS+4KSm","b8W+","2YTZhw==","2YTZhQ==","0L3QvtGX","27Hb","4LiC4Lit4LiH","zqE=","4KWA4KSC","INC/0ZbQtA==","IOCkqw==","4LiY","zrXPgg==","4KS+4KS4","4LmD4Lir","0L7QstCw","2KrbjA==","4Lit4Lii","4LiN","IG7Eg20=","z4TOuQ==","2YjbjA==","INC80ZY=","INin2YU=","z4DPjA==","IHrDoQ==","4KSI","IOCklg==","IG7Emw==","Y8Ot","2Ybarw==","0YHQuA==","zrY=","bsOh","nWk=","xak=","2KY=","INin2YTYsw==","4buRYw==","4bq9","2KfYrA==","2YXYpw==","6rWt","0L7Rjg==","2K/YsQ==","4LmA4LiB","4Lig","w6FuZw==","7ZWp","IM+EzrfPgg==","INGW0L0=","0L7Rlw==","4KWH4KS2","4LiL","4KWL4KSX","0LvRlg==","IHDFmWVk","xI1uw60=","INC60LA=","IM6k","4buZaQ==","dsOt","0YDRjw==","4KS+4KSc","0LDRhQ==","4KS/4KSw","4Liy4Liq","ZMSxcg==","2KI=","zpo=","IM6t","IHThuqFp","aeG7h2M=","aeG6v24=","INi6","2KfYrg==","INin2YTYrQ==","INCx0YM=","IHbhu4E=","0LzRlg==","2YXZhA==","bcSxxZ8=","4Lib4Lij4Liw","zr/PjQ==","zrXOrw==","IOCksOCkuQ==","0L3QuNC8","2LnYrw==","INio2KfZhA==","pJE=","56A=","IG9sbQ==","z47OvQ==","IGjhu41j","2KfYs9iq","4Liy4Lin","2YjYqA==","0ZbRjw==","INmH2KfbjA==","66eI","4KWM","IMSM","4KSP","2KfYr9mH","INin2Yg=","0L3Ri9C8","4bqx","2YXZhg==","aeG7h3Q=","bGHFnw==","0ZbQtw==","2YjYsw==","IGzDoG0=","IMSR4bq/bg==","4KSq4KSo","INuM2qk=","INmE2YQ=","IG3Emw==","INio2LHYp9uM","4KS+4KS5","INmF2LE=","ZcOn","4Lit4Lij","zrXPgQ==","4Lix4LiU","0LrQvtC9","bm91","INCz0L7QtA==","4Li54LmJ","4LmA4Lil","2pg=","IMSR4buLbmg=","IMSRw7M=","0LDQvdC+0LI=","INmB2LE=","2KfYsdiv","0ZbRlw==","4LiE4Lij","4KWN4KSl","Y2Fr","0YbRltGX","IOOAgCDjgIAg44CAIOOAgA==","2YfYsQ==","4KWJ","IGdp4bs=","7YY=","4oCM2YfYp9uM","4KWB4KSw","IOC4gQ==","xYg=","5qg=","zp/O","4Liy4LiE","0LrRgNCw0Zc=","4bqjbw==","b8Sf","IOCkuOCkrg==","IHZp4buHYw==","IHPhur0=","IG7DoQ==","2YrZhQ==","o3A=","w7Z5","2YjYsg==","IM66zrE=","2YXYrw==","bsOtbQ==","b3bDoQ==","4KS+4KS1","4KS+4KWk","4KWN4KS4","57c=","4bq3Yw==","IOC4ng==","772A","w7Rp","IOG7nw==","zr/Pgg==","IHRyw6pu","0LzRgw==","0YHRjNC6","4Lif","b3ZhdA==","IG3huw==","7Y8=","INCy0L4=","zrXOvQ==","4KWC4KSw","2q/Yp9mH","IMSR4buZbmc=","2qnZhg==","0YnQuA==","INC/0YDQsA==","w7xyaw==","2YjYuQ==","4bqlcA==","bsO9","IHF1YW4=","0ZbRhw==","IM69zrE=","IOCkqOCkuQ==","INqp2YY=","Y8Sx","552A","0LHQvg==","INin2LM=","6Ls=","2KfZhtuM","4LiV4Lij","z4TOrA==","INij2YY=","6YKj","IOC4oQ==","0LrRgg==","acOq","IGjhu6Nw","2KrZhQ==","INio2YY=","aG9k","zrnPgw==","4Lir4LiZ","INGX","0LvQuNCy","INqp2LHYrw==","INmF2LQ=","2KfYtw==","2KjZig==","IOC4ow==","2K/ZhQ==","2YTYp9mF","4LmI4Lin","INmG2YU=","IOaX","6YU=","0L3QvtGB0YI=","aeG7g20=","6rWQ","YXnEsQ==","INio2YjYrw==","2q/YsQ==","IGhp4buHbg==","57M=","0YHRgtCy0LXQvQ==","IOCkleCksOCkqA==","IM+EzrfOvQ==","IOC4rQ==","INmF2Ko=","gW4=","2KzZhQ==","zrvOuw==","INGA0LU=","4Li04LiU","INin2YTZgg==","zrHPgQ==","IOCkr+CkuQ==","bsOtY2g=","0ZTRgtGM0YHRjw==","IOC4lw==","24zYtA==","xZll","IG5lYm8=","INGH0LA=","bG91","0YHRgtCy0L4=","INCn","4LiE4Lin","2YfZhQ==","4LmA4LiU","IOC5gQ==","IOC5gg==","27M=","xaluZw==","IG5lag==","24zaqQ==","IHPhu60=","2YHYsQ==","zqA=","INC/0L7Qug==","INin2YTZhg==","IHbFoQ==","4bqr","IG5ow6A=","44CA44CA44CA44CA44CA","zq7Pgg==","zr/PgQ==","IM+H","4LmA4LiX","0YPQu9GM","44WH","IHnEsWw=","0YDQvtC0","zq/OvQ==","7JeI64uk","2KfYtQ==","IMSR4bqndQ==","4KWH4KSV","0YDQvtC8","44GT44Go","INin2LE=","5aW5","INiq2K0=","xaF0xJs=","4KWN4KSy","4KWN4KSV","INqp2KfYsQ==","dWrDrQ==","IOCkieCkqA==","IM6xz4DPjA==","IG3DoA==","xb7DrQ==","IOC4iA==","YWzEsQ==","4KSr","0YfQtdGB","INi52YY=","5pWZ","776G","4KS/4KSC","IHPhu7E=","0LLQvtGA","IHRo4buxYw==","642w","44Gm44GE44KL","4LmI4LiH","2KrYqA==","IG5oaeG7gXU=","g24=","IMSR4buT","IOC4qw==","27U=","bcSb","4bqhdA==","IGNow61uaA==","zrzOrQ==","YW7EsQ==","IGLhu4s=","4bqxbmc=","xZllZA==","6Z8=","w6FuaA==","2YDZgA==","INmF2LM=","4buLY2g=","xINu","b3bDoW7DrQ==","4LmI4Liy4LiH","IOC4mw==","IG7GsOG7m2M=","0LHQvtGC","xLF5b3I=","INiu2YjYrw==","27k=","INmF2K8=","IMO8eg==","7L0=","2YjZgg==","66W0","0LvQtdC6","IGPhuqM=","0L7Qu9C+0LM=","4LmJ4Lit4LiH","bWnFnw==","4LmJ4Lin","xKk=","zpw=","4Lit4LiB","77y/77y/","4KSW","INCv","66y0","2KfbjNuM","c2vDqQ==","dXnDqm4=","ZcWf","w6Fp","w7puZw==","w6Bv","0ZbRgQ==","57Y=","IOCkhuCkqg==","77o=","zps=","IOqztQ==","INCG","IOCkheCkquCkqA==","4bupbmc=","z4zPgg==","IG5naGnhu4c=","INin2YTYqA==","4KWL4KSo","IOCknw==","IOycoA==","IGPFqW5n","IOCkieCkuA==","IOCkoQ==","INi02K/Zhw==","4Li14LmJ","27Q=","4bq3dA==","5pav","IOuN","INC/0Ls=","0LHQuA==","6rOE","zr/OvQ==","IMOnxLFr","IGJ1bHVu","2LPZhQ==","YcOn","2KfZhtmH","24zYsg==","bGXFnw==","4bqvYw==","2KfaqQ==","IOCkuOCklQ==","INC+0YDQsw==","IOC4mQ==","4KS+4KSl","INmF2YI=","IM6URQ==","0Y7RgtGM","4buZYw==","IM63","c29i","IHRoZW8=","5Z4=","INin2YTYtA==","4LmA4Lie","zq3Pgg==","4LmA4LiC","5Zk=","4KS/4KS2","INio2KfYsg==","0YDQvtCx","IM6zzrnOsQ==","zrzOtQ==","INio2KfYtA==","4KS+4KSH","IHF1eQ==","zrvOtQ==","2KfZgw==","INGA0L7Qug==","IFTDvHJr","INCl","0Z/Rn9Gf0Z/Rn9Gf0Z/Rn9Gf0Z/Rn9Gf0Z/Rn9Gf0Z/Rn9Gf0Z/Rn9Gf0Z/Rn9Gf0Z/Rn9Gf0Z/Rn9Gf0Z/Rnw==","5qk=","IHBo4bqjaQ==","4LiE4Lin4Liy4Lih","Ojo6","bMOt","IGpzb3U=","24zZhA==","w6FsbsOt","lJQ=","7ZaJ","5oOz","bMOh","IM+Dz4U=","0YvQstCw","IG5o4bqldA==","4Lit4Lih","27g=","ZWNlaw==","0ZbRgA==","2YjYtA==","zrvOsQ==","IM6S","0L7RgNCw","2YHYqg==","ZWRpcg==","0YPRhQ==","5LiW","INCj0LrRgNCw0Zc=","IO2U","zqzOvQ==","INi02LE=","INin2YTYrA==","0LXRgNC10LQ=","7JiB","IGjDoG5o","77+j77+j","0LzQtQ==","0Y7RgtGB0Y8=","INil2YTZiQ==","7JeF","INiq2LE=","0LrQvtC8","INi02K8=","INin2YTZgw==","IM+Dz4TOvw==","4KWN4KSm","66Ck","0YPQstCw0L3QvdGP","IHRow6w=","6rSA","zrrOtQ==","2LPYqA==","7YOA","IO+8jw==","IOC5geC4peC4sA==","IM+M","0L3QuNGG","INCd0LA=","0Y/Qsg==","bMO8","zrnOvw==","2YbYr9mH","2YTZgw==","IG5nw6B5","IG5ow6Ju","IF57","4KWD","IGdlcmVr","2KfYsdmH","IGPGoQ==","IOC4lQ==","5oI=","55Sw","4KWI4KSC4KWk","4Lix4Lin","dsSb","w7Z6","0LjQu9C4","IHBow6Fw","6riI","IM6f","IHDFmWk=","IOyWtA==","INC00L7Quw==","2YjYsdiv","4LmA4Lih","z4POtQ==","4Liy4LiX","b8OgaQ==","4Lij4Lih","27Y=","IOC4mg==","aXlldA==","z4TOsc65","7ISg","IM61z4A=","4KS/4KS1","6rmM","0LPQsA==","INGB0LvRgw==","IGjDrG5o","INiv2KfZhg==","IOCkl+Ckrw==","2YrYpw==","6JE=","4KSC4KSk","INiz2KfZhA==","66CI","bGVyaW4=","4KWH4KSk","LjouOi46Ljo=","IOuF","INin2YTYpQ==","4bqjbmc=","6IQ=","zr/Ouw==","0L/QvtCy","IM64","27c=","IG7Dsw==","IGTDvMWf","IHRp4bq/","2YjYrA==","IGpzZW0=","4bqhbmc=","44GC44KL","4Lit4Lia","2YjZig==","4KSV4KSw","INC00LU=","r7w=","INC90L4=","0YbRltC5","z4PPhA==","0LrQuNC1","z4POtc65","7JWI","IGjGoW4=","IOCkleCkuQ==","2KfYtg==","7Lg=","44Of","44CA44CA44CA44CA44CA44CA","44KI44GG","4KS+LA==","0LXRgNC4","66mw","7ZSE","INC/0L7RgdGC","2K7YsQ==","4KWL4KSk","w6J1","0LrQvtC5","ZGFraQ==","7YU=","Ojo6Ojo6Ojo6Ojo6Ojo6Og==","IMO2eg==","0YDQsNC2","bsOtaG8=","4Lir4Lil","IM+Dz4TOtw==","IMSR4buB","IGvhuw==","aeG7g24=","xZlp","IGt0ZXLDqQ==","ooU=","w7zDpw==","2YrZgQ==","IGzDvQ==","IHRo4budaQ==","IOyGjA==","0L3RjA==","0IY=","0YLRgA==","4LiH4Liy4LiZ","0LrQvtGX","zrzOvw==","IHPDvHI=","dXnhu4Fu","INmF2Kc=","4KSC4KSX","IMSR4buTbmc=","w7Ju","4KWB4KSy","4KWN4KSq","zrvOtw==","2YXYsQ==","0L/RgNC4","aXlsZQ==","4KS+4KSq","IOCkheCkqA==","INGU","IHnDtm4=","2YTZgQ==","YWTEsXI=","4b0=","IOqzoA==","2K7YtQ==","aW1peg==","5ZyL","INC90LDQtA==","IMWZ","0L3QvtGB0YLRlg==","INin2YE=","0LDQvdGW","4KWH4KSf","IOunkA==","44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA","IOyKpA==","4Li04LiV","5YQ=","24zZhw==","0L7RiA==","xb5pdA==","7Iuk","4KWA4KSo","IO4=","5qWt","4KWH4KSo","INiw","IGxv4bqhaQ==","4LmE4Lib","0ZbQug==","INC60YDQsA==","4KWL4KSw","4Li44LiU","INin2YTYsQ==","INGB0L7QsQ==","4Liy4LiK","IOCkuOCkleCkpA==","IM6d","2KfZhdmH","4LmJ4Liy4LiZ","IHRyw6xuaA==","INin2YTZgQ==","INin2YTYrw==","dW51bg==","0L7RgtC+0LI=","xrDhu58=","INGB0LLQvg==","zq/Osc+C","4bqlbg==","0L7Qs9C00LA=","4LiX4Lii","IGJ5bA==","2K3Yrw==","4LiB4Lil","2KjZhw==","IHbEmw==","6KKr","INii2YU=","IMSRaeG7gXU=","5ag=","IGtkeQ==","INio2Yg=","4bqrbg==","7Jyg","4KS+4KSV","a8Wv","IHRyxrDhu51uZw==","aWNrw6k=","0L3QuNGP","IM+Azr/PhQ==","xrDhu59uZw==","0L3QvtC80YM=","4LmI4LiZ","4Li54LmI","IGvhur90","IO+8vA==","IOyLoA==","acOn","IG7Eg25n","xI3DrQ==","0YLRjw==","0YDQtdCx","2YvYpw==","2q/bjA==","44OO","IGthcsWf","0LLRlg==","IHBo4bqnbg==","4LiI4Liw","4bqvdA==","2LHYqQ==","4Li04LiH","4Li04LmI","4KS+4KSI","4Liy4Lie","2YbbjA==","7Jew","YsSb","INin2YTYtQ==","7Zc=","INiz2LE=","bGFyYQ==","64uo","INmC2LE=","6I4=","2KjYrw==","INC50L7Qs9C+","4KWN4KS5","IGPDoWNo","7ZWY6rOg","IM+Az4HOvw==","INiq2Lk=","kog=","INCy0L7QtA==","56We","0LrQuNC8","IGThu7E=","4LmA4Lir","0LDQvdCw","IO+9","IGJhxJ8=","IOCkquCkuQ==","IGNhbw==","z4HPjA==","2YbYrA==","4KS+4KSP","IOW5tA==","IG5naGnhu4dw","27LbsA==","0LrQsNGP","z4HOrw==","INCx0L7Quw==","IGdpw6E=","INC30LQ=","4KWH4KSy","IGPhuqVw","4LmA4Liq","z4HOsw==","IOyC","ZMSb","4KWB4KSo","7Ig=","xLFsYW4=","0LvQsNGB","IOC4pw==","IM+DzrU=","INir","INCm","54K6","IGLDvHk=","0LXRhg==","5aSq","IOCkrOCkqA==","0L7Qs9GA0LA=","INC/0YDQvtGC","IGzGsOG7o25n","IGTDtm4=","4Lij4LiH","0LDQu9C+","INis2YU=","4KWILA==","IOuvuA==","IOq5","2YjYqg==","4KWA4KSv","4LiI4Liy4LiB","IGNo4bqldA==","zqk=","IGtow6Fj","IHRow6FuZw==","asWhw60=","IMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKg","4buRdA==","4Lir4Lij","0ZbQuw==","5YWJ","5YI=","2YTYqQ==","IOqxsA==","0L7QstC+0YA=","aeG7g3U=","INC80LXRgg==","0LDRlA==","INGH0LDRgQ==","z4HOtQ==","7Lm0","4oCM2LQ=","66y8","w7pj","4oCM2YfYpw==","aeG7gW4=","c3Rhdg==","7Z4=","INmG2Lg=","hpI=","IM+EzrE=","INC30LDQsQ==","2YPYqQ==","INCz0YDRgw==","0LLQvg==","INmF2Kw=","IHNhaA==","2KjZhA==","2LnYqQ==","0YPRiA==","INGC0LXQvA==","7Ys=","ZWNr","z4nPgg==","2YrYqg==","7JeI","54s=","2LDYpw==","7KCA","INC90LDRgQ==","INC/0L7Rhw==","5qCh","z4g=","0YHQutC+0Lk=","w7xj","2YLZhA==","INC/0L7Qtw==","INC+0YHQvtCx","4Liy4Lil","0L3Ri9C80Lg=","0L7Qu9C+0LQ=","6Lw=","INiv24w=","INGD0YHRgg==","IOustA==","2YrYsw==","67Cp","4KWN4KSa","0LjQu9Cw","IG7Dqm4=","0L3QuNC1","zrnOvQ==","bGFyxLFuxLE=","4LmA4LiZ","2YbYqg==","YcSfxLE=","xLFtxLF6","INin2YTYrg==","4LmA4Lin","4KWN4KSo","IM+F","IO2G","4bq7","4Li04LmC","zrHPgg==","0LzQtdGC","IHpw","IGplaG8=","4Li14Lii4LiZ","0YTQvtGA","xLFuxLF6","a2xhZA==","7YyM","dXnhu4c=","zrnOrA==","IOOAgQ==","2LTYsQ==","5qmf","INiq2Kc=","INC30L3QsA==","2LPYqtin2YY=","4KWH4KSw","66ek","54M=","INC20LU=","4Liy4LiU","INi2","6a0=","INC90LDQtw==","INuM2Kc=","ZW7DqQ==","4Lix4Lii","7ZaI64uk","INio2K8=","4KWB4KSV","0YLQvtCy","7LCo","2YfYrw==","4LiU4Lii","IGhv4bq3Yw==","INCf0YDQuA==","2YbYpw==","546L","0YPQstCw0YLQuA==","4Lia4Lij","IOCkleCksOCkpA==","z4POt8+C","2KQ=","6ZW3","5YWL","INiv2KfYsQ==","4Lix4LmI","xqFp","4Liy4LiI","w71taQ==","4bqldQ==","INiv2LPYqg==","a2Vt","INC+0YHQvdC+0LI=","66qo","z4HOrA==","5oU=","INin2Kg=","5aOr","hJY=","zpQ=","2YrZgw==","7Y4=","IHnDvHo=","YWTEsQ==","4Liy4LiV","5LuA","7J2064uk","IHp2","IHTEmw==","IO2W","4KSl","IOCksuCklw==","7JiA","INCw0L0=","55c=","7Jet","0L3RltGB0YLRjA==","xZ4=","IHBow6F0","2YLYqQ==","IHRo4bq/","IO++","7LKc","IOyEoA==","4LmD4LiK","acOqdQ==","xJ9pbmk=","2YLYrw==","IGt0ZXLDvQ==","0YHQutC40Lk=","4KWN4KSh","dGFkxLFy","INGB0Lw=","2YjZgQ==","2KfYsdmK","5b63","4Li04Lih","2K7Yqg==","5b6I","INCz0L7RgA==","77yM5oiR","IOyYgQ==","IOuPmQ==","0YHQsA==","4LmA4LiE","66+8","4Li24LmI","IGxpw6pu","INmH2Kc=","bGVyaW5p","INGG0LU=","2KfZhNuM","IOCkruCkuQ==","IHbhu6U=","IHh14bqldA==","4Li04LiB","INC/0YDQvtGG","IM6xzr0=","0YDQuNC8","IGPhuqdu","INC40YU=","0L3QvtGO","IHTDrW5o","IGLhu5k=","0ZbQvA==","IG5o4bqtbg==","44CA44CA44CA44CA44CA44CA44CA","2YrZhw==","5Lqa","INC+0LHQu9Cw","IOCkmA==","bsO9Y2g=","5p2R","2YTYsw==","INC90LXQvtCx","2KfYqNip","dsOh","zr/Phc69","0YDQtdGC","YXPEsW5kYQ==","IHlhcg==","IMSRaeG7g20=","0L3Rjg==","4KS+4KSX","INqp2LQ=","0YPQtw==","IOC4lA==","4bqjbQ==","0LrQsNC80Lg=","IM6Z","4LmA4LiV","IGzhu5s=","2YLbjA==","a291","2YTYqA==","0LjQstCw","5pM=","4bq5","zrrOsQ==","67KV","6IKy","4buRbg==","IGJlbGly","7Yag","z4TOrg==","0YvRiA==","44KD","INCw0LHQvg==","c2vDvQ==","4KWI4KS4","INC/0YDQvtGB0YI=","ZWt0ZWRpcg==","YcW+","4LmI4Lit","INC+0YHRgg==","IGLhuqNv","IOWkpw==","0YvQvA==","IG3Frw==","xrDhu5tuZw==","5Y+X","2YjZhw==","INGD0L8=","2YPZhg==","IM+Ez4nOvQ==","64W4","IOC4ig==","INGC0L7Qs9C+","INCo","7J207Yq4","4LmA4Lit","0LjQvdGD","mIU=","dXnhu4Nu","7ZKI","4bqhbmg=","IOODvQ==","0YLQvtCx0Ys=","IHThuqFv","5bed","IMSR4buRaQ==","IOuPhA==","5LmF","INiq2YU=","0LDRgNC4","c3R2w60=","IGPDuW5n","7Z6I","IHRhcmlo","7KSR","7YI=","INiv2Yg=","7KE=","0LDQu9GW","4LiQ","IGPDsm4=","0LjRgtGM0YHRjw==","IOCkteCkuQ==","xZllYg==","6Zu7","INC80Lg=","b3bEmw==","IGTDom4=","0YbRltGP","24zYs9iq","5a24","IMO8cg==","2LXZhA==","0YDQuNGC","4Liy4Lir","44Gm44GE44Gf","zrjOtw==","55Y=","2J8=","acWfdGly","INCj0LrRgNCw0ZfQvdC4","67CY","4KWH4KSW","IHbhu4s=","zqU=","IOOAgCDjgIAg44CA","IGLhurFuZw==","IHThu5U=","0L7Qu9C4","4LmG","ZXpp","INC90Lg=","IM6b","IHLhuqV0","zrzPgA==","0LbQtA==","4KS+4KSw4KSk","IHXFvg==","4KWH4KS4","2KfZhtiv","IGLDvQ==","4KWL4KSy","ZMSbbA==","7JWY","INis2K8=","5bM=","4Li34LmJ","IGLhuqNu","4bqhY2g=","IMWfZXk=","INmH2LE=","IGplbg==","INCy0ZbQvQ==","ZXNpbmRl","IOCkueCkrg==","56CU","4Lia4Lia","IGNo4bupYw==","4Li24LiH","bWFsYXI=","IGRlxJ9pbA==","5p2x","IHTDoWM=","IGtpxZ8=","IHThu7E=","4KWN4KSn","4LiZ4LiX","zp/OpQ==","0YHRjNC60L7Qs9C+","IOCkpw==","IOydmA==","2YbYqQ==","w7xz","6Ks=","IHRhcmFmxLFuZGFu","hW4=","IGtpbmg=","z4POuQ==","4KWA4KSV","7Y+s","2KfZhdmE","IFZp4buHdA==","IM+Ezr/OvQ==","INiq2YY=","IOCkheCkpw==","4LmI4Liy4LiZ","csSx","4KSC4KSm","6ao=","IGNow7puZw==","0LPQuA==","z4TOsc69","INC00L7Qvw==","0L3RltC5","0L7QvdCw0LvRjA==","zpM=","IGLDvHnDvGs=","4bw=","4KWA4KSw","2LDZhw==","IOyVhOydtA==","IGRvYW5o","IMWZw60=","0YbRjw==","IHTGsA==","IOCkuOCksA==","IG3DrXN0","IOuwjw==","2LTZhg==","0ZbQsQ==","IOOAgOOAgA==","55m9","0L7RgdC/","0LrRltCy","IHThur8=","44Gt","IHThu5tp","IOyasA==","5pyD","2KfbjNiv","5qc=","7KCQ","IGR1cnVt","4LmA4LiK","4KWA4KSk","INmH2Yg=","4KWC4KSq","IGfDtnJl","INGA0L7QsQ==","IHRoaeG6v3Q=","YWrDrQ==","INin24zYsdin2YY=","4oCP","0YHRjNC60L7Rlw==","54U=","IOyEuA==","4bur","IOC4gg==","xa9t","656M","zrnOus6u","INC80L7Qsw==","2YbZig==","44Ga","4KS+4KSs","5qI=","2LnZhw==","0ZTQvA==","IM6s","zr/Phc+C","2LLYp9ix","6rG0","c2vDoQ==","INin2Yo=","IGlsZw==","IHPEsQ==","ZWxlcmk=","IM6X","dXlvcg==","4KS3","4KS/4KSu","0LXQstCw","5LuA5LmI","4Li44LmI","4LmJ4Liy4LiH","IGhp4buHdQ==","INin2Lk=","IMO2emVs","zr3Otw==","64Sk","IHRvw6Bu","IG1vaA==","INGP0LrRlg==","54o=","bWFrdGFkxLFy","2KrYp9io","INGB0YM=","IHnDvGs=","IM6n","0LfQvdCw","0L7RhQ==","xrB1","4LiX4Lij","44WL","IGthcsWfxLE=","2YXbjA==","INGG0ZY=","2KfYr9uM","4KWA4KWk","z4HOtw==","0LvQvtCy","5aSr","IHBow6Ju","INC/0L7Qvw==","57ea","0Y/QvQ==","4Li44LiT","0YHRgtGD0L8=","zq/Ovc6xzrk=","INGA0L7QutGD","bGFyZGE=","6LuK","z4HPiQ==","2YjYp9mH","6IU=","4KWN4KSw4KSk","5bex","INGA0YM=","IHRo4buL","IMSRaeG7h24=","7JaR","bsOpaG8=","4Liq4Lih","6rCB","YWPDrQ==","INCz0L7QtNCw","a2F6","IGLDtmw=","IGdpYW4=","4Lib4Lij","776e","4Lix4LiV","IGdlcsOn","INin2Kw=","IM6u","2ZHZjg==","0YHQutC+0LPQvg==","0YDQsNGF","IMWg","IOCkmw==","0L7RgdGC0ZY=","67O4","0YHRjNC60LjQuQ==","27HbuQ==","0YPQstCw","2KfZhNmF","INmF2LU=","642Y","YsOt","INmI2Kw=","z4TPjA==","ZWJpbGly","IHRp4bq/cA==","6aQ=","IOS4gA==","INGB0YDQtdC0","64Ko","zrXPgc65","2KfYqw==","0YHQvtCy","z4fOtQ==","IOu2hA==","IHRha8Op","IGTDvHo=","IO2PiQ==","INin2LU=","IM+Dz4TOt869","67CU","IGjhu5lp","2LHZhw==","2KjbjA==","0LLQtQ==","INin2YTYtw==","INGA0LXQtw==","2KjYp9ix","IGdp4bqjaQ==","44Gr44Gq","b2xlxI0=","4KSg","Ozo=","5L2P","2qnZhw==","IM6m","INGD0Yc=","4peP4peP","4Li54LiB","4KWH4KS1","z4POsQ==","INin2YbYqg==","INCy0L8=","IHF14bqj","ZW5pbg==","IOq1kA==","zrzOrA==","2qnYqg==","2YLZhw==","IFTDvHJraXll","IHRo4bupYw==","7ZeY","aeG7h20=","IOCkpOCklQ==","IOmH","4KS84KS+","INij2Yg=","w6FsZQ==","56m2","IMWfZWtpbA==","0LrQvtCz0L4=","0YjQuNGF","2KfbjNi0","2KrZhg==","0L3QtdC5","4LiX4Liz","INGP0LI=","2LHZhQ==","IG3DoXk=","4Lir4Lih","xLF5bGE=","IGPhuqd1","INC00L7QsQ==","IOyepQ==","b3bDvQ==","zrnOus+M","IOOFhw==","INGC0LXRgA==","jJI=","2LPZig==","IG9sdcWf","IGJ5bGE=","2LnZhA==","INmD2KfZhg==","0LHQvtGA","7LKt","44OP","dWJs","INin2K4=","2YTZiNiv","2KrZig==","bGFkxLE=","IMO2xJ8=","cnVo","578=","INio2LnYrw==","zpnOkQ==","aWRpcg==","44Gr44Gv","IHPDtnk=","IGtow6FjaA==","0YbQtQ==","INi02YjYrw==","57g=","IOuFuA==","w7pw","IG5lZGVu","IGjDs2E=","IOCkieCkqg==","z4POtc65z4I=","5oi/","IMKgwqA=","IOyVjA==","4KWALA==","tJE=","w6p1","0YDQvtC6","4LmA4LiI","IM61zq/Ovc6xzrk=","INio2YQ=","INGB0L7Qsg==","IMO2bmVt","IOC4iw==","7KeA66eM","5a6Y","6rKp","7ISd","IGHFvg==","IGR1eQ==","44Go44GE","2Js=","zrTOvw==","zrjOtQ==","2YPYp9mG","4KSi","4KS+4KST","IGThu4tjaA==","4buZbmc=","4Liq4Liz","xI8=","INGX0YU=","zrHOuw==","ZcSN","57K+","INC30LI=","6Ieq5bex","INin2YTZhNmH","INCh0YI=","INiz2Ybarw==","INC00L7QvA==","0LPQvtGC0L7Qsg==","0L/QvtCy0ZbQtA==","IELhu5k=","4KWN4KSv4KSV","2LfYqQ==","0LzQvtCy","4LiX4Liy4LiH","4Li24LiB","INGW0Lc=","4KWL4KSc","IGfDtnN0ZXI=","INio2KfYtNiv","aWxlcmk=","INGB0LXQsQ==","0YnQvg==","IOOFh+OFhw==","2KjYqg==","0YHQtQ==","4KWH4KSc","IGzDqm4=","INiq2Yg=","0ZbRgdGC0Yw=","776G776G","IHRoxrDhu51uZw==","IG9sZHXEn3U=","dsSbdA==","7IaN","44Gd44GG","IOyEsQ==","67Cc","IOC4geC4suC4ow==","INi02YfYsQ==","c2xlZA==","4bqjbmg=","5p6X","bGFjYWs=","IG3DrG5o","2qnbjA==","IOC5g+C4mQ==","IGTDuW5n","INC80LDRgQ==","0YTQtdC6","5rCU","6ac=","INin2K0=","6LWw","zpnOmg==","4KWH4KWk","0YHRjNC60LA=","INGH0LDRgdGC","bGFyxLFuxLFu","IOq5gA==","7Li1","0L3QuNC80Lg=","6Kqe","5YCL","IOq1rQ==","0LrQvtGA","bWF5YQ==","4Li04LmC4LiZ","LuC4qA==","IGjhu4c=","INiq2YI=","zrPOug==","IOCkhuCkquCklQ==","0YHRgtC+0YA=","IMSRbw==","IGNo4bun","2KfbjNiq","IFF14buRYw==","0LPQu9GP","44CC44CNCgo=","IG7DoG8=","4Lit4Lil","5oqK","2YjYsdiq","IGJ1ZGU=","5pu4","ZWxpaw==","INis2Yc=","INio2YjYp9io2Kk=","6Iqx","2K/Yp9ix","IGLDvXQ=","0YfQtQ==","44KT44Gg","INmF2Lc=","bGVyZQ==","zpfOow==","7ZiV","4paN","xJ91","INCy0Lc=","2YrYsg==","INCg0L7RgQ==","7Yuw","INiv2KfYtA==","7KeR","YXTEsQ==","bWVzaQ==","44KJ44KM","xa92","csOhdA==","0L7RgdC+0LE=","5ZCE","dXnhu4du","5YGa","w7xzdA==","6YeO","zrHPgw==","IG3hurd0","0LXQu9C+0LI=","5Y2a","0LTQtg==","INiv2KfYsdiv","IGZhcms=","4LmJ4Lin4Lii","0L7QvdC4","INio2K4=","4KWB4KSk","IMSRw6J5","zrHPgc6x","IM60zrnOsQ==","IOiv","0LrQsNGF","Y2jDoXo=","emVuw60=","0YDQvtC/","4KWA4KSu","7Ya1","ZMO8","4Lig4Liy4Lie","IO2K","2YjYpw==","IHThu5F0","77yf44CNCgo=","IOaciA==","IG5oxrBuZw==","IG5lxb4=","4KWL4KSh","7JeQ6rKM","4KSC4KSh","tow=","INC80LXRgdGC","4KS+4KSB","7Kad","IMSRYW5n","4Lit4LiU","7ZuE","4buNaQ==","c2vDqWhv","INC00L7Qug==","INiq2LU=","IHBow7JuZw==","IOqwlQ==","IHRyxrDhu5tj","7ZGc","2ZQ=","IHBow60=","IGNo4buNbg==","5LmQ","IMWfZWtpbGRl","IO2O","6bo=","66Oo","4KWI4KWkCg==","2YjYsduM","0YHRgtGA0LA=","aWxkaQ==","IM6xz4U=","0LLQsNC90L3Rjw==","7Jq4","LuKAnAoK","INGC0LDQutC20LU=","65Ox","0LXQutCw","5omN","2YXYqQ==","IHBoxrDGoW5n","6ams","44CAIOOAgA==","b3bDvWNo","4Li14Lii4LiH","IFRydQ==","0LXRgdC/","c3R1cA==","xIw=","IGRhbMWhw60=","2LLbjA==","IOunpA==","INC+0LHRgNCw0Lc=","IGHDp8Sxaw==","6rCV","2YHYp9iv2Yc=","2q/Yp9mG","4LmJ4LiZ","4bqpbg==","5bel5L2c","IOCkpOCksA==","2YrYuQ==","IOOAig==","LOKAnA==","IG5ldg==","4Lix4LiN","xJ/EsW7EsQ==","IGppbg==","2KfYrtiq","2LPYsQ==","IHTDoGk=","IGt0ZXLDoQ==","INin2YTZhA==","4KSF","aXptZXQ=","4KWB4KSu","4Liy4Liw","IOq3","bMSxxJ/EsQ==","54++","bGnEn2k=","6rWw","YWzEsWs=","INiv2YjYsQ==","IOyLpA==","INC30LDRgQ==","2YLZig==","IOG7qW5n","INmD2Yc=","zp/Oow==","6Kit","54w=","44GE44Gf","7ZiE","INGC0LU=","0LXRgNGW","c8Sxeg==","IMO9","0LTQvtCy","IOCkh+CkuOCklQ==","0LPQvtC0","IGJ5bG8=","4Liy4LiE4Lih","0LXQvdC40LXQvA==","0Kg=","5pyv","IOCkquCkueCksg==","IGHFnw==","4KS/4KSc","5ZOh","0LLQsNGA","4LmJ4Liz","4oyS","b3bDoW4=","IGdpw7pw","0KU=","INGB0YPQtA==","IOCkleCkrg==","4bqhbQ==","2LHYsw==","IOS6ug==","INio24w=","IOCkieCkqOCklQ==","66a9","4bqteQ==","IHbhuq10","0LvRj9C10YLRgdGP","IHNlw6c=","IOy9","0YDRg9C2","2KrYtQ==","fDo=","IOug","0LjQvNC4","INC70Y7QsQ==","IOC4nA==","77yM5L2G","INC90LDQsg==","4oCs","4LmI4Liy4Lii","INix2LM=","c2luaXo=","66g=","0LXQvdC40Y4=","IOC4pQ==","2KfYs9uM","4KWc","INm+24zYtA==","zq/OtA==","INm+24w=","0LXRgNC20LDQsg==","4KSG","IGTDvMWfw7xu","5b+r","0YDQtdGB","5YWr","0YLRlg==","4KS/4KSf","INGC0LXRhQ==","w7p0","2YbZhw==","INmG2LQ=","55m6","IOqwpA==","0LvQtdC0","IOuTpA==","IGJpbGc=","IHNwb2xlxI0=","IMSRxqFu","IOCkieCkpA==","IHRy4buL","INi52YU=","IOClpA==","IMO6xI0=","44G4","4Lin4LiB","INGB0LvRg9GH0LA=","4buNbmc=","5Y+I","0LjRgtGD","5pyJ6ZmQ","66aw","64uY","IGhv4bqhdA==","IOydtOuPmQ==","0LfQvdCw0Yc=","INin2LPYqtmB2KfYr9mH","INC/0YDQvtGG0LXRgQ==","YW7EsW4=","0LPRgw==","INin2YTYqw==","5pel5pys","zrnOus6s","INGX0Zc=","7KeB","aW51","INiz2KfYsg==","44Kh","776J","INin2YI=","IGvhur8=","xa9zb2I=","4LmH4LiB","5ZCn","5ryU","0YnQuNC1","54Y=","0YzQvtCz0L4=","4KWL4KSf","2KfZvg==","5a6k","IOOAgOOAgOOAgOOAgOOAgOOAgOOAgOOAgA==","IHRyaeG7g24=","IHThuq1w","6aOf","67k=","INGH0LXRgNC10Lc=","INGG0Lg=","0YHRgw==","INC90LXQvA==","INCw0YA=","INmE2Kc=","IOynhA==","55+z","INC/0YDQvtCx","IOybkA==","24zZhtuM","0Y7Rh9C4","4oCN","27Hbsw==","44Ks","56CU56m2","7YKk","IGdlcsOnZWs=","INit2LM=","7ZS8","6IKh","IM+Ezrk=","IHbFoWVjaA==","IHbDrA==","2KfZhtmK","INmH2LPYqg==","IOuCqA==","xZllag==","0LXRgNCz","IHPDtno=","INin2YTZhdiq","IGNo4bq/","4buTaQ==","5Y+k","4paN4paN","4buTbmc=","44Oi","INC00Lg=","zrXOuw==","INC+0L3QsA==","INC90LDQuQ==","IF97","0L/QvtC7","YWxpeg==","IHTEg25n","IGTDrQ==","w6lw","INmE2YU=","IG1vxb4=","IG5nb8OgaQ==","6Jc=","INGH0LXQvA==","IMSR4buV","0LXRgtCw","5Y+y","INGB0LrQsNC3","44K/44O8","0LDQvdGM","IGfDtno=","67OE","44GL44Gj44Gf","IOuNlA==","INmG2YI=","INGD0YfQsA==","IHNhaGlw","INGB0L/QtQ==","zq/Ov8+F","7L2U","IOuI","bWFt","IHJvY2U=","INmG2KfZhQ==","0LXRgNCw0YLRgw==","xLFw","44GE44Gm","IO2VmQ==","IOCkh+CkqA==","5ak=","IG5oacOqbg==","YXTEsXI=","xZllbsOt","2K/YqQ==","44Oq44O8","4Lil4LiH","IOmA","IOC5gOC4mw==","0LTRlg==","zq3PgQ==","7ISk","0LPRgNCw","ZXNpbmU=","INC10LU=","IGlraQ==","INiq2Kw=","bGFyxLFuYQ==","ZMO8cg==","INin2YTYsA==","2YXYqg==","IOCkkA==","4KS/4KSm","IOu5","0YTQvtGA0LzQsA==","INC+0L3QuA==","0LPQvtGA","0L3QtdGB","7JiA64uk","xLFsZMSx","IMOnZWs=","INC00L7Qsg==","2K/bjA==","IMSMZXNr","0YjQsA==","INin2Ko=","5bGL","5pa8","IHByw6F2","w6ltdQ==","5biI","44WL44WL","IGlsZ2lsaQ==","4Lir4Lin","4KSH","4KS+4KS3","656R","YXN5b24=","0YbRjA==","4LmB4LiV","4bufaQ==","INCy0YvRgQ==","0ZbQu9GM0LrQuA==","INC60L7RgtC+0YDRi9C1","0L3QuNC60Lg=","INin2K8=","IMW+aXY=","IM6xz4DOvw==","2LHYtg==","2KfYqQ==","IGtkecW+","4buvYQ==","IOuMgO2VnA==","IHTDtGk=","0YPRlA==","2LLYsQ==","IOWl","44OL44OL","2KjYqQ==","z4TOv8+C","0YbQuNC+0L0=","INmF2Yo=","IMSDbg==","4KWH4KSX","INGA0LXQsw==","IGzhu5tu","7KSA","7Ius","IGJp4bq/dA==","YWxhcsSx","2YHZig==","5LiW55WM","INC90LXQvtCx0YXQvtC00LjQvA==","4LiZ4Lin","zr3PhA==","IOG6o25o","7ZaI","IOCkteCksA==","aGxlZA==","4Li04LiI","5q27","INin2YTYqtmK","0L3QvtGB","cHJhdg==","0Y/RgtC4","0YnQsA==","2YjZhtmH","IGHEnw==","4Lie4Lij4Liw","IHRo4buRbmc=","0YTQuA==","INCz0L7Qu9C+0LI=","IGtob2E=","IOugiA==","44GS","IGdldGly","2LTYqg==","0LbQtdC90L3Rjw==","0LXQvdGW","IGdp4buv","bGVyaW5pbg==","4KWA4KS1","6YG4","4KS44KSw","INGH0LXQu9C+0LI=","4KWN4KSc","INCx0YPQu9C+","INin2YbYrw==","4Lix4LiZ4LiX","6K6p","IHF1eeG7gW4=","INit2KfZhA==","7LKY","INC70Y7QtA==","z4HPhw==","0LDQu9GM0L3Qvg==","44CA44O9","6riJ","44Kx","INmF2LHYrw==","IMO0bmc=","INin2LQ=","5aSn5a2m","7KaI","5oim","ZXlp","INCQ0L0=","4KS/4KSq","IHRpw6p1","2LTbjA==","4bqvbg==","6a2U","44Go44GE44GG","IOyggA==","0LrRgtC4","INmF2K3Zhdiv","IO2GtQ==","4Li44Lih","5Y2h","0L7RgNC+0LI=","0LrQvtGO","IGzhu7Fj","5bO2","INix2YjYsg==","0YXRltC0","IGjhu5M=","IMO8bA==","INi02YU=","2YbYsw==","2KjZiA==","IHRow6pt","4bqhYw==","5bqc","ZWNobg==","IM6azrE=","6JGX","55y8","w6Fo","IM65","6rmM7KeA","bWF6","zrvOv86z","IGpzbWU=","IOG8","INC/0YDQsNCy0Lg=","0LrQu9Cw0LQ=","IHRo4bun","c2Fo","xJ9pdA==","INmB24w=","0LXQvdC90L4=","4KWB4KSb","44G7","55m+","0LjRgtCw","INCx0YvQu9C+","IHZ5cw==","IOy2nA==","4bqvbmc=","IMSR4bqhaQ==","INmF2YjYsdiv","0LXQu9Cw","0ZbRiA==","0LvQtdC90L3Rjw==","5pA=","INC90LXQtA==","aXlhdA==","7Lw=","IG9sZHXEn3VudQ==","2K/Yp9mG","7Z0=","INiz24w=","4Li14LiB","xJtzdA==","xLFtxLE=","5LiJ5LiJ","44K9","INGC0LXQvw==","INGA0LDQuQ==","4KS+4KSn","IOyCrOuejA==","IFRydW5n","77yP77yP","IHTDom0=","xaFlbsOt","44ON","IM+Ezr/Phc+C","INC90ZY=","0LLQuNC0","5r8=","INi4","44Ov","7KKF","0LLQsNGC0Lg=","IHF1w6E=","4Lik","IMSRxrDhu51uZw==","4KWB4KSm","cm9q","INGD0YE=","6aaZ","7L2Y","INmI2Ko=","4Lih4Liy4LiB","5YiH","IMOhbg==","INC80LXQtA==","7JeQ64qU","IGhsYXY=","2LHYqg==","4LmD4LiI","5rSy","INC70ZY=","5oiY","2YjZhtiv","6Laz","5YuZ","55Sz","IOyx","IOydtOuPme2VqeuLiOuLpA==","0YnQtdGB0YLQsg==","IOu2iA==","2YTZiA==","w7x2ZW4=","6IiH","IGdp4bubaQ==","INmI2YI=","IOqwpOuhnOq3uA==","INi52KfZhQ==","mJA=","Ojo6Ojo=","INGD0LQ=","LdGC0L4=","INGE0L7RgA==","0LjQvdC4","44GX44GE","IOqwpOuhnOq3uOuhnA==","44Gz","44Op44Kk","ZW7DoQ==","IG5leg==","IMO2bmVtbGk=","INC90LjRhQ==","4KSC4KS4","IOCkieCkuOCklQ==","4KWN4KSw4KSm","IG7Ds2k=","2YPZhA==","4Li04Lin","zrrOvw==","4KWB4KSW","w7Z5bGU=","zqzOuw==","w7NuZw==","INiv2KfZhti0","INC30LE=","7Ls=","4Lic4Lil","65Ok7J20","IGV0aw==","2LHYp9iq","IM61zro=","0YLRgNCw","4KWN4KSk4KSw","4KSC4KSs","INC80ZbRgQ==","5qC5","44OZ","IHThu4k=","4LmA4LiL","7Iig","77yM5LiN","7Jio","IG3Em3N0","gbU=","YXrEsQ==","cmFkYQ==","z4DOsQ==","bcOp","2YbYp9mF2Yc=","2KfbjNmE","zrzOtw==","bHVr","2YPZig==","IO+8iQ==","INC00LXRgg==","IGnDp2luZGU=","0Y/QvA==","IGTGsOG7","INC/0YDQtdC00YHRgtCw0LI=","w7xyZQ==","5ZWK","INGC0YDRgw==","ZXNpbmk=","INCw0LvQtQ==","44Oz44OJ","4KWD4KSk","zrXPhQ==","4KWB4KSG","IGhpw6c=","55S6","INCW","54Wn","a8Oh","IHRy4buNbmc=","INiq2LQ=","4KS+4KS2","INmF2Ks=","ZXRpbQ==","IHRo4bqleQ==","IOCkrOCkuQ==","2LnYqg==","4Li24LmJ","IHNldg==","0YHRgtCw","IGPhu6k=","IHRp4buBbg==","4KWA4KSc","0Y/Qsw==","INC+0YDQs9Cw0L3QuA==","INCx0YvQuw==","dMO8cg==","INio2KfYstuM","IOyerA==","4KS14KSw","5pyJ6ZmQ5YWs5Y+4","a3Vw","IGl5aQ==","7ZWY6rKM","44CAbA==","44K344On","2KfYsdip","4Liq4Lij","IHTDrWNo","INC60LDRgA==","0LjQsQ==","INCy0ZbQtNC/0L7QstGW0LQ=","IHBvZGxl","4KWN4KSw4KSV","aXlvbg==","0LrQvtC90L7QvA==","IM68zq0=","INC/0YDQvtC40Lc=","IOKAjw==","bWVrdGVkaXI=","zqnOnQ==","IGLDoW8=","4LiI4Liz","642U","67iM","IHPhu58=","24zYsduM","0L7QvdGD","xLFuZGFraQ==","0LDQu9GM0L3QvtCz0L4=","zrzOsg==","0LvQuNC3","IGplamljaA==","5pa9","5L6/","bGXFn3Rpcg==","INmI2KM=","IOCkuOCkrA==","bGVyZGU=","INqG2Yc=","z4TOrQ==","IGfDrA==","IMOa","INGA0LDRgdC/","IHTDvG0=","4LmA4LiH","6JC9","7Iah","4LmE4LiX4Lii","bcSxxZ90xLFy","INmC2LHYp9ix","IOC4hOC4suC4qg==","IGvEsXM=","0L7QstCw0L3QuNGP","44KC44Gu","2K/Yp9mF","7Jyh","b2xvag==","INC/0L7RgdC70LU=","INCi0LDQug==","INCx0L7Qu9C10LU=","IMSR4buVaQ==","bGFr","7YWM","IGF5bg==","0Y/RgQ==","INC/0L7Qsw==","IGFyYXPEsW5kYQ==","iKw=","4KWC4KSy","IM6xzr3OsQ==","IHF1eeG6v3Q=","IHRodeG7mWM=","IGTDvG4=","IHDFmWVz","0YTRlg==","IOW4","2KfZhNmK","INC/0L7QstC10YA=","0YfQuNC90LA=","c2tv","57WQ","2KE=","INCz0YDQsA==","0L7RgtC4","IHF14buRYw==","0YbRltCy","bGVuZGly","0LLRltC0","INC20LjRgg==","w7x5b3I=","77yM5LuW","bGFyxLFuZGE=","IHV5Zw==","IHRyw60=","INi02YY=","2KfYqNmE","5rex","wqBw","0YHQutCw0Y8=","0L7RgtCw","2YjYtw==","INin2Lc=","5L6G","INC30LDRgg==","INC40LzQtQ==","4LmA4LiX4Lio","64u0","bsSbbsOt","0YPQu9GP","LdC/","5Zg=","INCy0LjQvw==","0LDRgNCw0LrRgg==","4LmA4Lia","56aP","z4HPjg==","2LPZhw==","4KWM4KSw","IGRpxJ9lcg==","4LmC4LiU4Lii","INGB0L/QvtGB0L7QsQ==","5Y23","6JY=","0LDQvdGC","0Y7RgtGM0YHRjw==","INGN0YLQvtC8","IO+9gA==","4Liq4Liy4Lih","w6xt","INGI0Lo=","IOC4m+C4o+C4sA==","4KS84KWA","ZWts","bXXFnw==","INGC0LDQutC+0LY=","2YjYs9i3","IMSNaQ==","4Li14LiZ","24zZhtmH","xJtr","5b28","bGVyaW5l","IMSR4bqldA==","4KWB4KSP","0L7Qu9C+0YE=","IOWwjw==","2LLZitip","INCy0LvQsA==","4KWA4KSy","IGV0dGk=","INGB0L7RgdGC0LDQsg==","2YTYp9mE","IOeO","IHDFmcOtcGFk","65+w","4Li44LiB","INGH0Lg=","5YWN","bsSbasWhw60=","4Li04Lil","5Y2A","c2vDvWNo","4Liy4Lio","5ZCX","IO2YhA==","IGFsxLFu","5aeU","4Lie4Lij","YcW+ZA==","INCx0ZbQu9GM","4LmI4Lin4LiZ","b29n","YWPEsQ==","bMSxxJ8=","IGtodQ==","IGhpem1ldA==","IOmb","IM6Y","IGRlxJ9lcg==","5YWt","INiv2Yc=","IG7Em2s=","4LiE4LiZ","0LXRgtGM","2KjYp9mG","z4TOuc66zq4=","IMSR4buLYQ==","IEPDtG5n","7YyQ","INC60L7Qs9C00LA=","INqp2YbYrw==","44Gn44GN","IM+AzrXPgc65","bGFyZGFu","INC30LXQvA==","2KrZiNin2YY=","6LOH","bGlrbGU=","IHThu6U=","IGThuqtu","IG5heQ==","INGB0YLQvtGA","INi02YXYpw==","2KvYsQ==","IGRlZGk=","0LrQvtC1","65GQ","0YbQtdCy","2KzZhw==","IG3Fr8W+ZQ==","4KWB4KSq","4KWN4KSw4KSu","IHRhxZ8=","0L7RgNGC","zrPPgc6x","55m8","4Liy4Lia","aeG7hW4=","INmF2LPYqg==","0LvQtdC60YE=","IHByYXY=","INC00L7RgQ==","IGTEscWf","IHplbQ==","IGdpYW8=","IHZsYXN0","INGN0YLQvtCz0L4=","772w","4Lin4LiH","0YDQvtC5","IGJpcmxpaw==","ZW7DvQ==","IOuLqA==","0L7QstCw0L3QuA==","6aOO","7Y+J","IHphaA==","0LHQsA==","5Yqp","6YCy","6raM","IGRpeWU=","4KSC4KSV","IGNodXnhu4Nu","IOyXrQ==","INGC0YDQuA==","IMO2bmNl","77yM6L+Z","b+G6oWk=","0LvQtdGC","IM+Dz4XOvQ==","bMOhZA==","w6dl","dMO8","IMSNw6FzdA==","IM61zr0=","IGJp4buHdA==","IOmr","4KWL4KSV","2YTYp9iq","2KjYp9mE","ZWNpZXM=","IOuLuQ==","4LiK4LiZ","z4TOsc+C","4KWN4KSj","dWrDrWPDrQ==","xI1ldA==","INC/0L7QsQ==","2YjYp9ix","aXlhcw==","IGRydWg=","2K/Yrw==","z4zOvQ==","0YDQtdC9","4Liy4Lij4LiW","5L2O","7JW9","0YDQvtC3","64qU642w","44KT44Gq","xI1lbsOt","KioqKioqKioqKioq","IM6h","INGC0L7QvNGD","4Lij4LiB","4KWB4KS4","5Lmd","5bCx5piv","o2k=","6Ziy","2YPYsQ==","INGN0YLQuA==","INqp2LTZiNix","IOqwkA==","INCw0LQ=","INiv2KfYrw==","6YGO","2as=","IGzhuq1w","INin2YTZhw==","5pyb","INiq2Yc=","7KeI","44Gn44GC44KL","INC80LXQtg==","INGA0LXQt9GD0LvRjNGC","540=","0LXQvNGD","INiq2YjYp9mG","INix2KfZhw==","44O844Og","5YS/","5bGe","0LHRiw==","4b8=","4LiE4Lil","4KWL4KSI","w7x0w7xu","4KSX4KSw","7JWY64uk","4oin","IOywqA==","57WE","zrzOsc+EzrE=","4Li44LiZ","INGC0L7QvA==","0LXRgNCy","zpHOow==","IGnFn2xlbQ==","2LnZhQ==","64M=","44OE","2KfZgdiq","5Yqe","IG5lcw==","YXZhxZ8=","INmG24zYsg==","5by6","IOmZ","0ZbQvdC90Y8=","5rKz","w6HFmQ==","5p2Q","INij2Yo=","IOy5tA==","IG5lbsOt","INmI2YU=","INqp2YU=","aeG6v3U=","IOaw","5Yy7","IHpvcg==","zq/Pgw==","4KS/4KSn","INC/0L7QutCw0Lc=","4KS54KSw","IGnDp2Vy","2K3YqQ==","4KS/4KSW","0LDQtNCw","2KrYsduM2YY=","IGJhbw==","IHjDow==","4LmA4LiE4Lij","IG5naMSp","4LmB4Lia4Lia","IGRvxJ9ydQ==","0ZbRgtC4","INio2YrZhg==","INC70LXRgg==","2KfYug==","24zaqduM","csOhdg==","4KWN4oCN","4oCZbmlu","IOC4og==","5Y2K","INC60L7Qu9C4","IHRy4buf","6Z2S","656A","IOuo","INmI2LE=","776K","6KeC","INC/0Lg=","0L3Rg9Cy","aWxtZXNp","2LPYqtmH","INC00LXRgNC20LDQsg==","5a6D","5Yil","64WA","0LvRgdGP","4KSC4KSn","INGC0Lg=","IHDFmWlw","0L/QuA==","4buTbg==","0L7QstCw0YLRjA==","7J206528","5pyd","IOuYkA==","IM6tzr3OsQ==","44G+44Gn","2KzYp9mF","IOuK","0L3RltCy","z4DOv8+F","INiy2YXYp9mG","5puy","INmF2Yc=","66Co","5LiD","44Go44GX44Gm","bGFiaWxpcg==","0L7QttC1","5aSc","INC90YPQttC90L4=","5b2p","54ix","IGhvw6Bu","w7xuw7w=","IOuEpA==","INis2YY=","IG7Em2o=","0LrQuNC80Lg=","IGF5bsSx","INmD2YQ=","IG5oYXU=","4bqz","2YrYp9iq","IG1lemk=","INGA0LXQug==","IHTDvHI=","INCz0L7QstC+0YA=","IGZhemxh","5YeG","0YjQuNC5","0J/RgNC4","0YDQvtGB0YI=","INC+0YDQs9Cw0L0=","bsO9bQ==","INGA0L7QtA==","INmI24w=","aWNrw70=","66a8","772y","5oCO","INmH2LDYpw==","INGH0LDRgdGC0Lg=","w61y","4buHbmg=","IO2X","6rs=","bHXFvg==","w61s","Y8OtY2g=","5a6f","44Gg44Gj44Gf","2YrYsdip","IHbEg24=","5riv","IM+EzrnPgg==","2KfYsdiq","IHbhuqVu","4pSB4pSB4pSB4pSB","5a++","z4HOrQ==","INCz0L7QtNGD","INiz2Kg=","2KfYsdin2Ko=","0LXQu9C10Lk=","INC30LDRhQ==","INCy0LDQtg==","IHThu4luaA==","2KfYqNi5","IOCknOCkrA==","IOCkkOCkuA==","INC00YM=","IOmrmA==","6rKg","0L3QtdC1","772M","INC80LDQuw==","6L65","44Gg44GR","4LmJ4Lij","2YLYtw==","IGLDqm4=","IHNlYg==","INiu2YjYp9mH","c2l6","IG9sdXI=","IOuUsA==","IOyiiw==","IHN2xJt0","aWNrw6E=","4bu5","IHF14bqjbg==","INC40YE=","IHphxI0=","4Li34Lit4LiZ","0ZTRjg==","4KS/4KS3","54q2","z4POvA==","4Lix4Liq","w7Nj","INCx0LXRgA==","IO2d","Ozo7Og==","INm+2LM=","IOuRkA==","0L3QuNGH","INC+0YfQtdC90Yw=","IOyVhOydtOy9mA==","IM64zrE=","INCy0YHRgg==","2KfYr9ip","IGRldmFt","4Li34Lit4LiH","INC70Y7QtNC4","7JiI","4buxYQ==","0Y/RhQ==","4oCM2KfbjA==","INiz2Yg=","5bC8","IHRo4bup","bWV5ZQ==","IOi1","6Imv","IGRlxJ9pxZ8=","0YjRlg==","IHRy4buj","IOKAjiM=","55eF","7JuM","IGtkZQ==","zqc=","5qQ=","INGF0LDRgNCw0LrRgg==","5oc=","IGJp4bq/bg==","2YLYuQ==","5Z+f","INC90LXQvw==","IGTFrw==","INC/0LjRgg==","INGC0YDQtdCx","2KfYstuM","INi32LE=","INmF2YQ=","IHRoYW0=","INmI2KzZiNiv","IHN2w6k=","6aeF","2KfbjNmG","IHRpw6pu","c3RydQ==","IHbhuq15","w7xuZQ==","IOC5gOC4oQ==","IHLhurFuZw==","0LDRgtGD","5LqR","0L3QuNGC","5LyK","2YjYtQ==","IOmd","INC/0YDQvtCx0LvQtdC8","ZGVraQ==","KioqKioqKioqKioqKio=","w7Jh","IMSR4buBdQ==","44KM44Gf","2KfYsdiz","44Gq44GP","2KfZgti5","6LuN","2YPZhQ==","xI1hcw==","IGvhu7M=","2LTZhQ==","4KWH4KSh","6Zi/","IGplasOt","IOaZ","IMSwxZ8=","YXJkxLFt","IOCkuOCkruCkrw==","INCd0L4=","aWxlcmlu","INi52KjYrw==","bsOtaw==","INi02qnZhg==","4Li04LiX4Lii","4buF","0YDQtdC3","IGNo4bupbmc=","IDou","IOCkquCkpA==","IMW+aXZvdA==","5aKD","q2E=","IHRydW5n","0L3QuNC60ZbQsg==","INin2YTZhdmG","INGA0LDRgdGB","INC20LjQsg==","INC30LDQutC+0L0=","IOuqqQ==","IHrDoXY=","IGhha2s=","5Luk","INGP0LrQuNC5","INio2Yo=","zrvOrQ==","b2N1aw==","INGO","4LiB4Lin","INin2Ybarw==","4KWB4KSC","IG7DoW0=","4buVbmc=","INC20LXQuw==","IMSR4bq3Yw==","xI1pdA==","IOqxtA==","INio24zYtA==","0LrRgNCw0ZfQvQ==","INmI2Yc=","0L3QtdC90L3Rjw==","IOC5gOC4ng==","0L7QvNC10L0=","IGzhuqdu","INi52YXZhA==","IO6BtQ==","xJ4=","0ZbRgdC70Y8=","xrBuZw==","4KS+4KSr","4LiX4LiY","0LTQtdC9","INGJ0L7QsQ==","0YfQuNCy","xLFsxLFy","2KfYudin2Ko=","asOtY8Ot","67Ko","2obZhw==","2KfYsdis","INm+2LHZiA==","INC+0LTQuNC9","0LvQuNC9","0LHRgw==","IOCkuOCksOCklQ==","5YCZ","67aA7YSw","4KWI4KSCLA==","5bQ=","4LmC4Lil","IHbFoWFr","INC+0L/RgNC10LQ=","7LE=","5r0=","IGThu7FuZw==","cHLDoXY=","4Li04Liq","IG5oaeG7h20=","IGlsacWf","INC10YnQtQ==","IGplxaF0xJs=","INGA0LDRgdGC","4Liu","4KSC4KSf","4oCM2qk=","INio24zZhg==","b3ZvdQ==","5pmu","zq/Otc+C","0L7RgNC+0Yg=","IG9sbWFr","IHN0w6F0","ZGnEn2k=","IHTDrG5o","IGTEmw==","INqv2LHZgQ==","z4POvw==","INGD0YI=","7ZWZ6rWQ","4Lix4LiQ","4Liy4Lit","IMSR4bq3dA==","INC80L7Qs9GD0YI=","67Cw","dGlr","qr0=","bGnEnw==","z4DOtQ==","IOiA","a8O8","YWRlY2U=","zrrPjA==","INC00ZY=","4bqnbQ==","54Sh44GX","27LbsNux","6LWb","0L7RgdGD0LQ=","IOyViOuCtA==","INCU0LY=","5bqn","aWNrw71jaA==","IOyggQ==","4KWHLA==","b3bDqWhv","IHbhuqtu","IGJpcmxpa3Rl","IOCksOCklg==","INmG2Yc=","2YLYsQ==","4KSq4KSw","ZXTDrQ==","INGC0Ys=","gOydtA==","IOCkheCksg==","INC80L7QttC1","44K0","IHN0cmFu","2LfYsQ==","6L+Z5Liq","INio2Lk=","5Yab","ZWt0aXI=","IGjGsOG7m25n","2YbYp9mG","IOCkkQ==","z4zPhM63","0L7RgdC6","5Y2D","YXPEsW5h","INi02Yc=","INC00LXRgA==","INmF2K7Yqg==","INit2YI=","44O+","2LPYp9mG","IGN1bmc=","0LrQvtGA0LjRgdGC","z4TOuc66zqw=","INCy0L7QvdCw","2KjYpw==","44GV44KM44Gf","bm91dA==","IMSx","6KeJ","IMO2xJ9yZW4=","IOy9lA==","5bim","0YHQu9C+0LI=","IM61z4DOuQ==","6rCQ","INmF2LHYqA==","INmB24zZhNmF","INC60YDQvtCy","IOuNsA==","4KS+4KSj","IGVsZWt0","INC90LDRgNC+0LQ=","24zYr9mH","57SE","INC/0YDQvtGE","z4HOv8+C","IOOF","5LiN5piv","IOCknOCkqA==","4Lix4Lil","INi12YjYsdiq","44Oc","IOCkl+CkiA==","xJ9pdGlt","0YHRjNC60LjRhQ==","INC70LXQsw==","INiq2YjZhA==","IOyatA==","2LnYsQ==","IG3DoHU=","0LPQvtCy","5rOi","aW5kZWtp","7KCB7J24","4bqlbQ==","IO2ZlQ==","INio2KfbjNiv","4LmM4LiX","IGtlbmRp","4Li14Lin","4Li04LiB4Liy4Lij","INqp2LHYr9mH","5be0","4KSB","4Lij4Liy4LiK","4KWN4KS2","INCU0LvRjw==","5aWH","INGD0YHRgtCw0L3QvtCy","0LnRgtC1","44KH","zqzPgQ==","INCu","IGx14bqtdA==","44CJ","6LSo","2K/Ypw==","IGTDvHplbg==","4Liq4LiZ","0YDQvtC9","ZMSxxJ/EsQ==","4oCZZGE=","IGZhcmtsxLE=","0YXQvtCy","bMOhbg==","0YfQsNGB","0YfQuNC9","IOywuA==","7LSI","0YbQuNC/","57k=","6ZaA","0LbQsA==","0YDQvtCy0LDQvQ==","4LiT4Liw","2YTZitiy2YrYqQ==","z4fOtc65","4KWILg==","0LrRgdC/","2KfZiNix","IG5ndXnDqm4=","44Gr44KI","4KWH4KSu","z4PPhM61","2KrZiA==","xI1law==","0YbRiw==","IOusvA==","0Y3Rgg==","IGthemFu","2YHYsw==","ZWhpcg==","0LLRltGC","INiv2YjZhA==","IOuTnA==","IOCkmuCksg==","0LXRgdGC0LLQsA==","zrTOsQ==","INCx0YPQsg==","INCd0LU=","2K3YsQ==","0L7Qs9GA0LDRhA==","IHJvemhvZA==","INCy0LjQutC+0YDQuNGB0YI=","IHnDqnU=","zrvOv8+C","2qnYsw==","INi02Kg=","4Li04Lip","5q+N","INC00L7RgA==","IG5naOG7hw==","IHRyYW5n","4KWH4KSm","IHTDrG0=","0YfQvdC+","INin2YXYpw==","6YGL","2qnYsQ==","a8Op","IHbEm3Q=","INC90LDRgdGC","IOax","IOWbvQ==","IGdp4bqjbQ==","2KfYr9mK","64Kc","66Gg","IO+9pA==","INC00LXQvdGM","0YbRltGO","IGjhuqFu","4bqzbmc=","zrvOrg==","ZXllbg==","5LiU","5q2m","INGE0LDQug==","4LmI4Lit4LiZ","IM6/zrk=","2LLZhQ==","44GX44Gm44GE44KL","0LvQuNCy0LA=","4oCV4oCV","IMO2bA==","IOCkkw==","0YHRgtGW","4LiB4Lij4Lij4Lih","IHThu6Vj","IGfDtnLDvG4=","44GX44G+","IOym","6aas","INC80L7QttC90LA=","INqp2YQ=","INGG0LXQvdGC","IOyZuA==","zpg=","54c=","IGdlbGVu","INin2YrZhg==","INii2Kg=","IOCkhuCkrw==","4Lix4LiB4Lip","0YHQuNC8","INCx0L7Qu9GM0Yg=","INC80L0=","0L7QtNC4","IMSwbA==","IOCkhuCksA==","0LXRgtC1","0YbQuNGO","4bqtdQ==","IHRp4bq/bmc=","67aB","5qeY","INC90LDRiA==","4Lih4Liy","4oCZxLFu","44OD44OX","2YjYrNmH","INit2K8=","w6F2w6E=","2LHZiNi0","INC00LXQudGB0YLQsg==","44Gj44Gm44GE44KL","z4HOrg==","IMO8c3Q=","IHRp4bq/dA==","YWNhxJ8=","INCf0L4=","6Yo=","66i4","Y2hvZA==","INii2YXZiNiy","44Gf44KB","IGNodXnDqm4=","IHV5Z3U=","0L3RltGB0YI=","67Q=","5o6n","0YPRjtGC0Yw=","xI1p","44G5","4KWC4KSo","5pep","44OH44Kj","6JI=","INi02K7YtQ==","INGF0L7Rgg==","INqp2YbbjNiv","0LPQuw==","4Lit4Lit4LiB","6YCZ","INiy24zYsQ==","7ZWt","IMOWeg==","5ZGz","2K3Yr9ip","IGthxb5k","INGG0LLQtdGC","IOe+","INC60L7Qtg==","INCt0YLQvg==","0Y/RgtC10LvRjA==","0LvQsNGB0Yw=","4oCM2LTZiNiv","zrzOuQ==","IOay","IHPDvHJl","4Lil4Liw","6YWS","4Li24LiB4Lip","zrvOu86s","55E=","IOyDiA==","IOCkuOCkuQ==","IEjDoA==","66as6rOg","2LXYsQ==","IOaKlQ==","6aCt","IGLhu4duaA==","IOyDneqwgQ==","IOCkheCkrQ==","6rO17KeA","7JQ=","4budaQ==","562U","IGLDoGk=","0L7QtNGW","4Liy4LiC","0L3QuNC60L7Qsg==","IGTDtm5lbQ==","4Lin4Lih","44OG44Kj","4KS+4KSw4KSj","0L7Qs9C4","IGtp4buDbQ==","0L7RhA==","5LqI","5Yaz","2KfZhNin2Ko=","IG7hur91","IGNlc3Q=","2LLYtA==","2Y7ZhA==","INiq2KM=","IMSR4bqhbw==","z43OvQ==","INCy0L3Rgw==","INis2KfZhQ==","aXZuw60=","IOyeiOyKteuLiOuLpA==","z4o=","5oSb","44Ob","0LzRltC9","IHTDrW0=","4bqxbQ==","6reg","5LqV","IHjDonk=","IOyblA==","0LXQu9C10L0=","IOC5guC4lOC4og==","2KfZhNmH","IGLhuqV0","4buTbQ==","4oCM2q8=","2YjYsdip","2KjYp9iq","IGLDoW4=","4bqrdQ==","2KfZhtmI2YY=","IHrDoWtvbg==","w6HFvg==","7LaU","4LmB4LiB","44KN44GG","0YDQvtGC","55M=","INCy0L7QvdC4","IHjDoWM=","INiv24zar9ix","z4DOv865","INC90LXRgdC6","2LHYs9uM","IOudvA==","2KrZhA==","zrvOrA==","INGP0LLQu9GP0LXRgtGB0Y8=","5L6d","IOWFrA==","l2k=","IO2KuQ==","2YPZiNmG","4bqvcA==","2KzZhdmI2Lk=","z4bOv8+B","0LXQu9C+","IGfDvHZlbg==","INC80LDQuQ==","INGB0L7Qtw==","4LiB4Lij4Liw","INin2LPZhNin2YU=","INGJ0LU=","IHPhu5FuZw==","4KWN4KSs","2qnYp9ix","IHRodeG6rXQ=","IG7DrQ==","56ys5LiA","6KaW","4LmA4LiB4Lih","2KfZitip","IM6I","44K2","INmF2YjZgti5","IOWS","6KGT","INCe0LQ=","IOS4iQ==","bGVyaW5kZQ==","INGB0LLQvtGX","4KWA4KSP","IHRoxrDGoW5n","z4PPhM6/","INi62YrYsQ==","INm+2LE=","INGB0LXQsdC1","INCy0Lo=","IGtoYWk=","44KA","INmG2LjYsQ==","INC00L7QutGD0Lw=","4LmH4Lia","IO2VnOq1rQ==","772J","5bel56iL","INmI2YQ=","2K3Zig==","INC/0LvQsA==","IMSwc3RhbmJ1bA==","4oCZZGU=","0LDQu9GB0Y8=","INii2YbZh9in","INin2Yc=","IOq0gOumrA==","IGFuaA==","xaHDrW0=","bGFybGE=","77yd","bm9zdMOt","0YHRgtCy0LU=","24zZgQ==","INqv2LHYrw==","44KM44KL","IHbhu7E=","xJtuw60=","IGfDtnJldg==","IHnEsWzEsW5kYQ==","IGzhu6Np","IGFubGFt","INC/0YDQvtCy0L7QtA==","0YbRjg==","IOWJ","IOunjg==","0YDQsNGB","IMW9","2qnYp9mG","0Jk=","44Gj44Go","2qnZhA==","4Liy4Lii4LiZ","2LnYp9mE","IGvDvQ==","INC80LDRgtC10YDQuA==","6ruY","xLFsbWFzxLE=","zrzOrc69","INmG2YXbjA==","IGN14buZYw==","IM60zrXOvQ==","5bmy","77y/77y/77y/77y/","4KWA4KSf","IMOnxLFrYXI=","IGtvbnXFnw==","0LjRgtC10LvRjNC90L4=","bGFudMSx","4LmE4Lil","5b6L","IO2UvA==","7Jm4","IHPDoW5n","6YGU","0L7QttC0","INii2K7YsQ==","aWxlY2U=","4KWI4KSo","IGplZG4=","INGB0L/QtdGG0Lg=","tJ0=","INqY","IOOAggo=","6IGM","INmG24w=","0YLQvtGA0LA=","zrvOuQ==","INmI2Kg=","acWfaW0=","57u0","44CAaQ==","IG11YQ==","IGppxb4=","6LaK","44KS6KaL","IG7hu5lp","4KWN4KSX","56iu","IOOAgOOAgOOAgA==","4LmD4Lir4Lih","IM6G","2YbYr9uM","INGB0Yc=","IGzhu4c=","bHVi","0LXRgNGC","INin2LfZhA==","INGB0LXRgNC10LQ=","IOmB","INC30LDQuw==","2YbbjNmG","55+l6YGT","2KLZhg==","INC60LDQvw==","IOC5hOC4oQ==","xa92b2Q=","INm+2KfbjA==","0YLRgNC4","IGlodA==","4LmK","INCy0YHRlg==","IHRoYXk=","5Ya1","INi52YbZiNin2YY=","IM6l","4Lid","zrXPhM6xzrk=","aXlvcmR1","77yM6ICM","55qE5Lq6","IOCkuOCkrQ==","4LmJ4Lit4Lii","zrnOus6/","44KT44Gn","7KGx","2YbYrNmE2YrYstmK2Kk=","IMW+w6Fk","0YDQsNCy0Lg=","zrPOsw==","5rWL","0L7RhtGW","44CA44CAIOOAgA==","IOCkpOCksOCkuQ==","IOuG","4KWA4KSa","4LmI4Lih","IGfhu5Nt","IGtp4buHbg==","6Lef","zqY=","ZXNpbmlu","6aU=","6auU","0L7Rh9C90L4=","4KSw4KSj","5pil","57aT","INio2KfYsQ==","6re8","6ZmF","INiz2Yo=","0YHRg9GC","7LWc","5bGF","IMSNZXNr","zpHOnQ==","IGRp4buHbg==","IM61zq8=","4LiH4LiX","44Kp","IHbhu7Fj","0LLQsNCy","dMSxxJ/EsQ==","IOuqhQ==","zrfOvQ==","0LLQuNGC","INij2YM=","INC/0YDQvtC/","cmFr","0YDQsNGC0Lg=","IMSRw6FuaA==","0YDQtdC/","6rSR","0LXRhtGM","IOCkrOCkpA==","IOWMlw==","IHPDoXQ=","bGVkaQ==","7KCB7Jy866Gc","xa9q","27DbsA==","IG5hc8SxbA==","INmI2LM=","IM61zr4=","0LLRiw==","572X","2KfYsduM2K4=","4Lib4Lil","zq/Oug==","IOq4iA==","5Yeg","5by3","6L+U","IG5o4buP","5b6A","INC00LDQttC1","IMOnZXY=","0LrRlg==","INij2YU=","4Li14Liq","4Liq4Liy4Lih4Liy4Lij4LiW","INCE","0YXQvtC00LjRgg==","65Y=","IHRydXnhu4Fu","INGB0YLQsNC9","65Ok7J2A","2KfZhNiq","4KS84KWH","IOCkheCkrA==","5pW4","INC00ZbRjw==","INmF2KrYsQ==","IOu4","776N","IOqzvA==","INiy24w=","65+8","INCf0LXRgA==","IHPEsWs=","0L3QvtGB0YLRjNGO","IGVkZW4=","2KfYr9ix","44Q=","INC70LXRhw==","INmH2LDZhw==","2LbZiNi5","IOyVhOuLiA==","aXJrZXQ=","INin2q/YsQ==","INGF0L7Rhw==","INCx0LDQvQ==","7ZSM","5oCO5LmI","6Js=","IOCkrOCkmg==","INqp2KrYp9io","54mM","INC00LLQsA==","2KzYsQ==","INC/0YDQvtGB0YLQvg==","IOCkhuCktQ==","IG3hu6lj","jbw=","IGrDrQ==","7Y6Y","IHRhbWFt","5Yib","4LiS","0L/QtdGH","4KWL4KS4","INGB0LXQvA==","IHTGsMahbmc=","5LiB","ibQ=","INGA0L7RgQ==","INC80LDRlA==","5q2M","INiv2KfZhtmE2YjYrw==","IExvw6Bp","IGVkaWxt","IGtvbnU=","INin2YTZhdix","IHVsYcWf","IHnDvGtzZWs=","zr/OuQ==","2Y7Zhg==","IGLEmw==","44K344On44Oz","77+j77+j77+j77+j","IGfDvMOn","INin2YjZhA==","INC80LA=","INio2K7YtA==","4KS+4KSW","INCy0LjRgQ==","xb5lbsOt","IHpwxa9zb2I=","em5hbQ==","INix2YjbjA==","5Yud","4KWkCg==","2YTZgg==","INC20LjQtw==","0YDRltCy","INGD0L/RgNCw0LI=","IHBo4buR","aWNyb3M=","IOC5geC4lQ==","IOuwlQ==","2YjYp9iq","77yM5LiA","0LDQvdGB","57Sa","4Lii4LiZ","4LmB4LiC","IGdpw6Fv","5LqM5LqM","IMSwcw==","7Iq5","IG9sYWNhaw==","IEPDoWM=","INGA0YPQsQ==","4bq5cA==","xJ9pbml6","44Gq44Gp","INC80L7RgA==","INGB0LTQtdC7","2YTZhdin2YY=","bsOpbQ==","5bCN","IGRuZQ==","7Lac7J6l","2LnYqA==","Ojo6Ojo6Og==","zpI=","ZWtldA==","INGA0LXRiA==","6Iis","IO2ZlA==","2LXYrw==","INC80LDRgA==","0Y/Qtg==","2LTYp9ix","44Gy","INin2YTZig==","2Y0=","4KSC4KSc","0LzRiw==","IGthcmFy","2YTbjNiz24w=","4Liy4LiT","576k","IG9sbWFzxLE=","IGhhesSxcg==","zrPPgc6xz4Y=","r3U=","0LLQvtC7","INGB0YLQsNGA","b3ZhbGE=","INCy0L7Qt9C80L7Qtg==","INC00LDQsg==","6aKo","2LHYpw==","INC00L7Qv9C+0Lw=","6rKD","IOyYrA==","IOWO","IOuquw==","dcOn","7Zo=","bMO8aw==","5Lit5b+D","IOCkpuCksA==","IOKXhg==","IHRheQ==","INio2LPbjA==","IM+Dz4TOsQ==","INmF2K4=","0Y/RiQ==","5beu","4LiJ","66C5","4LmD4LiZ4LiB4Liy4Lij","INmH2YY=","44G2","0LvRltC0","5Y2w","IHNhbw==","xZlhZA==","66as64qU","0YHQu9C10LQ=","5ZSu","IHw6","5pWZ6IKy","INC80L7Quw==","INmH2Yo=","64E=","INC60YPQu9GM","J25pbg==","INiu2LE=","IGdlbmVs","IHThu60=","IGt1cnVs","0LXQvdGC0Lg=","4KWL4KSc4KSo","6L+Z5qC3","INC80ZbQtg==","IG5naGnhu4dt","IM+Azr/Ouw==","5oub","IOCkl+Ckjw==","4bqneQ==","IGPhuqNt","57Sw","csSxY2E=","INi52YTbjA==","4Li04LmJ","aHVy","IGNoxrBh","0YPRlNGC0YzRgdGP","44Gp44GG","0YPQuw==","4Li04Lij","IOaciQ==","5Ly8","0YTQtdGA","0Y3RgtC+0LzRgw==","5peF","INmF2YjYrA==","IOuzuA==","IGdp4bud","IGtp4bq/bg==","4LmI4Lin4Lii","IGTDvG55","INiy2YU=","0L7QstGW","INGG0YzQvtCz0L4=","4Li04Lia","IOyGkA==","6JCl","INGA0ZbQtw==","IGjhu5c=","0YDRltCx","IOOAgCDjgIAg44CAIOOAgCDjgIA=","7Jy866mw","5LqG5LiA","INmC2KjZhA==","6b6Z","INqv2LA=","INmC2K8=","44Gq44GL44Gj44Gf","IOC5gOC4ow==","5biM","INGB0YU=","INCz0YDQvtC8","5Zui","IOynkQ==","INC70YPRhw==","5YW1","INCe0YI=","IG114buRbg==","44GY44KD","b3Zuw60=","66m07ISc","67OA","INC90LXQsQ==","0YHQuNC4","2YbZhQ==","xJ9pbg==","IHRvaG8=","ZW5peg==","2KjYp9i0","INGB0LvRg9C2","IGLhu59p","IMO8emVyZQ==","IHNhZGVjZQ==","IM+AzrHPgQ==","wqDCoMKgwqDCoMKgwqDCoMKgwqDCoMKgwqDCoMKgwqA=","6IyD","z4TOuc66z4w=","IOS6jA==","44KI44GG44Gr","6J4=","44Gu44Gv","INGD0LrRgNCw0ZfQvQ==","IGLhuq90","0LLQsNC9","INGB0YLRgNCw","6KGA","bnV0w60=","b2t0","4Lij4LiH4LmA4Lij","INi12YE=","5YWa","z4TOrw==","77yB44CNCgo=","INGC0LXQvNC/","6aGM","IHNsdcW+","0YPQutC+0LI=","IG5naMSpYQ==","55Sy","IGThu4U=","0L7QstC4","z4TPhQ==","2LHbjNqp","IEFuaA==","INCy0YHQtdCz0L4=","4oCM2qnZhg==","dGXFmcOt","IG3hu6Vj","2YfZhtqv","INCf0L7RgQ==","2pjZhw==","INin2YTYug==","5p2+","eXNs","IHlhcMSxbGFu","54i2","IG3huqFuaA==","2LHYp9mH","4LiU4LiH","b8SN","66eQ","5aCC","0LzQsNGC","IGXFnw==","2YjZhdin2Ko=","INiz2KfYrtiq","5Zug5Li6","INC/0YDQuNC5","xLFsbcSxxZ8=","6aSo","4Li44LiH","IOuB","4LiV4Liy4Lih","5ZCJ","zrzOrg==","IOacrA==","IHrDoWtsYWQ=","2KrYrQ==","6L68","INCy0ZbQuQ==","INmF2YbYtw==","IHRvw6Fu","0LrQsNGA","INCX0LA=","INC/0YDQuNC80LXQvQ==","44KL44Go","4Lix4LiX","24zYsw==","INin2YbYrNin2YU=","INi52YTZig==","4KS84KS/","ZW7DvWNo","IExpw6pu","INC/0LA=","6ZqK","IG1vaG91","INC60ZbQu9GM","IM6kzr8=","2KfZhNio","zq3OvQ==","IG5hYsOt","w6dp","bGVyZGVu","IHRoYW5o","IGLDvHTDvG4=","IOWf","7Lig","IHphdA==","2YrZiA==","IM68zrnOsQ==","dXnhur90","0ZHQvQ==","5YiS","0LvQuNCy0L4=","4LmI4Lit4LiH","5LuW5Lus","INCx0LDQsw==","4KS/4KSt","INGC0LDQvA==","INC/0YDQtdC/","4Li04LiK","4oCZ0Y/Qtw==","IFBow6Ju","0LbQtdC9","4KWI4KSV","INGB0LvRg9GH0LDQtQ==","IC46","5a2m5qCh","xLBO","576p","INGB0YLQvg==","IOCkueCksA==","z4XOvQ==","IHhlbQ==","INCx0YPRgtC4","0YHQuNGC","56qB","4KWN4KSb","5ZGi","77yM5Lmf","ZW7Emw==","IM66zqw=","aXlvcnVt","INqv2YHYqg==","4peP4peP4peP4peP","4Lix4Lih","INCa0L7QvQ==","0L3QvtGI","0L3QuNGG0YI=","w7x6ZWw=","c8Ot","5bir","2LXZiNmE","54Ot","IMSR4bun","44Ku","5pWF","IMWha29s","0YfQtdC9","4LmA4Lii","4LiZ4LiZ","2YDZgNmA2YA=","IMO8w6c=","5b+1","44Oq44Ki","IO2ZmA==","IOmHkQ==","54+t","INGB0LrQu9Cw0LQ=","0Y/QvNC4","w7xm","IGjDow==","IMSQ4bqhaQ==","woI=","5YSq","IGJ1bHVuYW4=","INin2YTZhdit","5oiP","IOip","INC90L7RgNC8","IGNodeG6qW4=","INC30LDRgdGC","IHbDrWNl","0JY=","IOCkhuCkpw==","IMSNYXM=","INCx0L7RgA==","z4HOuc6x","INmF2KfZhw==","IO2F","xZllbA==","0Y/QstC4","z4TOtc+C","aW7Emw==","INC/0LXRgNC1","6ZWH","4KWN4KSe","IOmY","4LmI4Liy4Lin","4Lij4Lij","INiz2Yc=","0LLQsNC70Lg=","55WZ","INGE0YPQvdC6","IO2WiQ==","2YHZhw==","55Sf5rS7","6IGe","b2t1ZA==","IOyCtA==","xLF6xLE=","INC/0L7Qu9GD","77yM5L2g","2LTYp9mG","5rG6","0LHRgNGP","0L7RgdGD0LTQsNGA","IG95dW4=","0LDQvdC40Lg=","IHByxa8=","IG7DoXY=","INC80LXQvdGP","IOyemA==","IMSwbg==","IHRow61jaA==","IMSR4bqjbQ==","5ZyS","INCy0LbQtQ==","IGxvw6Bp","INCe0L0=","0LzQtdGB0YI=","IM6+","44CF","IGNoaeG6vw==","0YfRlg==","IO2RnA==","64us","IOuLrA==","4KWA4KSh","0YDQsNC70Yw=","ZGlr","IO2GoA==","65+J","INi12YY=","IHN0ZWo=","INCw0LrRgtC40LI=","IOmm","IOC5hOC4lA==","5oqA5pyv","IHByb3N0xZllZA==","5a6z","44GQ","IG9sdcWfdHVy","ZWxvcA==","44Gh44KD","6YOO","2LbYpw==","INiu2Lc=","67CV","0LXRgdGP","INmH24w=","0L3QsNC0","IG5nw6BuaA==","0YDRg9GI","44GE44GE","IMO8csO8bg==","4Lit4LiV","4KWL4KSq","IHNhecSx","4KWA4KS4","0LXQvdC40YU=","INGB0LjQvA==","4KWA4KSm","5aSJ","4LmI4Lin4Lih","IOC5gOC4gg==","5bey57uP","0LDRgtC+","INGA0LDQudC+0L0=","7YOd","INGC0YDQsA==","bGF5YW4=","4bq/cA==","4KS+4KSf","2K7Yp9io","5Lq65rCR","5a6d","6IY=","6KqN","bmHEjQ==","IO6g","INCa0Lg=","IGJhxZ9rYQ==","Y8Wv","2LbYuQ==","6Iiq","4Li14Lih","0YvQvNC4","zpnOow==","INi02LHaqdiq","4Lii4Lin","IG11c8Ot","INC90LDQuw==","4Li14LiX","IMOhcA==","4Lij4Liy4Lii","5rK5","bGVtZQ==","IOCkruCkqA==","4LmE4Lif","0LDRgtC40LI=","uI8=","6K2w","z4PPhM6x","7Zal","0LXRgtGD","INGB0LLRj9C3","0LXQtNC10YDQsA==","INiu2KfYsdis","4Liy4Lip","4oCM2b4=","0ZbQsw==","6aGe","IGto4bqj","INGB0L/RgNCw0LI=","6KGX","44OV44Kh","INC80LXQttC00YM=","0YPQu9C4","INio2LLYsQ==","0YbQtdC9","IGVrb25vbQ==","2K/Zhg==","2KfZhduM","4Liy4Liq4LiV4Lij","IG7Em2tvbA==","Z8O8bg==","0LfQuA==","IMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKg","56a7","IHRyxrDhu59uZw==","j2k=","7Y64","INGA0LXQsQ==","5bqV","INiq2KfYsduM2K4=","0L3QuNC80LA=","IHRow6Ju","0LDRgtC10LvRjNC90L4=","INin2YTYsNmK","2YjZhtuM","IOmD","IGLDrG5o","zrnOus6uz4I=","4Lie4Lil","2KrZhdin2Lk=","IFByYWhh","INGB0YLQsNCy","2K/Zitiv","IGdp4buvYQ==","INC/0YDQvtCy0LXQtA==","wqBr","2YbYr9qv24w=","0YbQuNC5","55I=","INin2YTYo9mF","IOi0","2KXZhtis2YTZitiy2YrYqQ==","IOyeiOyXiOuLpA==","57eo","4Lix4LiZ4LiY","INGA0L7QutGW0LI=","IGPDoW8=","IGtow7M=","INmG2YjYuQ==","2LPZhA==","INGD0YHQu9C+0LI=","IGPhu6l1","0L7QstC+0LPQvg==","4KS/4KSX","lOuLpA==","5p2O","IGLDtmxn","IG5ndQ==","IGjhu691","0L3QuNC4","7KCI","INC/0YDQvtC8","5Y+M","IGTGsOG7m2k=","0K4=","2YrYtA==","5rip","64+F","INC30LzRlg==","zrjOt866zrU=","IGJhxJ9sxLE=","IMO8emVyaW5kZQ==","INiq2Lo=","INC/0YDQvtCz0YDQsA==","acW+","IOel","IHlhcmTEsW0=","woDCgA==","INGD0LI=","IHLFrw==","IGNoaeG6v24=","zr3Ov8+C","44Go44Gq","2KfZhtiq","6LC3","w61zaw==","aXNpbmRl","INC00L7Qsw==","6L+9","INC/0YDQvtGC0LjQsg==","z4HOv8+F","44Gu44GL","IGJhesSx","xLFyYWs=","4KWH4KS3","INmF2LTYp9ix","IOyWkQ==","INC90LXQtw==","INiw2YTZgw==","6Kq/","5YKZ","INGC0YDQsNC9","IM+AzrHPgc6x","24zZhdiq","IHRp4bq/bg==","INmH2YXZhw==","ZWZvbg==","wrsuCgo=","INmG2K8=","2KzZhA==","INiv2KfYr9mH","INCy0LXQtA==","IHPEsW4=","INGB0LLRltGC","ZWxlcmlu","4oio","IHnDvHI=","0LTQsNC9","INCe0YE=","IGjhuqFuZw==","6K64","z4PPhM63","dXnhur9u","INC90LDQsQ==","INC+0YU=","z4PPiQ==","IGJ5bHk=","0YHQutC40YU=","bGFtYWs=","0LjRgtC+0YA=","IHlhdMSxcg==","INC/0YDQvtC40LfQstC+0LQ=","INis2YXYuQ==","xaA=","5o+Q5L6b","IHBydm7DrQ==","IM6xz4A=","7Zmp","INC/0YDQsNC60YLQuA==","bGVyaW5kZW4=","INC90LXQvtCx0YXQvtC00LjQvNC+","5bq3","2Y7Ypw==","INiz2YY=","xLBM","IOq0kQ==","IFDFmQ==","554=","INGC0LXQvNC/0LXRgNCw0YLRgw==","IGthYnVs","IGJ1ZG91","0YbRltC+0L3QsNC70Yw=","772c","IMOnb2N1aw==","INGC0ZbQu9GM0LrQuA==","Ynl0","44Ok","INGB0YLQsNGC","IOadsQ==","bGXFvml0","2KfYs9i32Kk=","4Li44Lij","acOqbQ==","INC60YPQu9GM0YLRgw==","INC/0L7QvQ==","xKluaA==","5Zac","0L3QtdCy","0ZTQvQ==","INGB0L7QvtGC","650=","54i+","IHR14buVaQ==","a2FuxLE=","4Liq4Liz4Lir4Lij","2KfYudiq","44CA44CA44CA44CA44CA44CA44CA44CA44CA","0LTQtdGA0LY=","INC+0LHQu9Cw0YHRgtC4","IHbhu6th","INmF2YU=","4LiB4Liz","4LmB4Lih","aXZlcnNpdA==","4LmB4Liq","5qyn","bGFuYW4=","2YrZhtip","2LPYqQ==","INC70Y7QtNC10Lk=","4Lij4Lij4Lih","IOyxhA==","IOWkqQ==","0LXQvdC90YvRhQ==","4LmB4Lir","IHNwcsOhdg==","6K2m","77yc","4Lix4LiS","aWxlY2Vr","IOaf","IOiLsQ==","INGF0L7RgNC+0Yg=","66CH","27LbsNuw","5oqk","IGzDow==","xZnDrXplbsOt","INiq2YjZhNuM2K8=","6Zqb","44KM44Gw","w6HFoQ==","2KfYsdmK2K4=","5pS7","IGtob+G6o25n","6ZmN","0L7QstCw0L0=","IGfDonk=","4oCZbsSxbg==","2KPZhg==","bWnFn3Rpcg==","IHPhu6lj","0LrRg9GB","IMO8emVyaW5l","xJ/DvA==","2KfYqNix","77yM5bCx","zKM=","IOuPjA==","IHRy4buxYw==","5pS25b2V","5om/","IE7hu5lp","IOeZvg==","0YjRjA==","2KzYqQ==","67Kg","4KSJ","4LiP","IMO8bGs=","INmH2LPYqtmG2K8=","4Lix4Lia4LiB4Liy4Lij","INGP0LrQsA==","444=","INCv0Lo=","INCz0LTQtQ==","dGl2","44CI","0LvRjtGH0LA=","4KS+4KWkCg==","INmF2KfZhg==","IGRsb3U=","IOODlQ==","4KSb","IHBo4bulYw==","YWthdA==","0Kw=","YXPEsW7EsQ==","IOaKleeovw==","0YDQtdCy","IHZ5dA==","IHptxJs=","z4TPiQ==","6Kw=","INGD0Lw=","IHV6dW4=","IHByb3Rp","INGB0L7RgdGC0L7Rjw==","4Lix4LiS4LiZ","YXRpaw==","IOC4oA==","IOCkhuCkpg==","bGFyxLFuZGFu","5oCl","44O844Kv","INmE2YTZhQ==","2YHYqtmH","Ljou","w7zDp8O8aw==","0L7Qu9C10LLQsA==","4LmMCg==","INC/0LXRgNC10LI=","INmG2LPYqA==","0LXQu9C10L3QvdGP","J8Sxbg==","zr3PiQ==","6KGj","INiv2Yo=","5Y2H","IGJlbGlydA==","IC86","6JGJ","IHZ5aA==","55qE5LiA","6IOM","IOyXtA==","0L7Qu9Cw","INiq2Kg=","w6FjaQ==","4KS+4KSJ","4LiO","55Si","4KWI4KSy","INmC2Lc=","64SI","4bqvbQ==","0YDRj9C0","IHBo4bul","INmI2KfZgti5","IG1lcms=","IGNo4buRbmc=","5a+f","2KfYqNi3","dXN1bmRh","INC+0LTQvdCw","xb5lbA==","INGB0YPQvA==","IHBow7k=","IM62","IHphdg==","ZWRu","IHBvdMWZZWI=","INqp2YbZhtiv","INGA0LDQt9Cy","v6A=","INin2YTYsg==","IG3Em2w=","INGB0YLQsNC90L7Qsg==","INiv2LHbjA==","IHTGsOG7o25n","44G1","INC00LLQuA==","0YzRjw==","6KO9","INiq2YQ=","xaHFpQ==","44Gq44KJ","IOCkleCkiA==","xaFp","4oCM2KfYs9iq","IGvhu7k=","66ed","IOCkhuCknA==","44O0","IGLhu48=","ZHXEn3U=","IOav","0L/QtdGA","2KfZhNmK2Kk=","5omA5Lul","5YWw","IG9yYW4=","IO2e","z4POr86x","IHBo4bun","INCx0YvQu9Cw","0YfQuNCy0LA=","IOqwhA==","0L7Qu9GW","2YPYqg==","5YWn","4KWC4KSf","IOuW","INmE2Yc=","66CI7J20","IGjEsXo=","5aSP","IOaKleeov+aXpQ==","6Zq+","k7A=","0LPQu9GP0LQ=","w6xu","INC80LXRgA==","IOOAkQ==","INC+0LHRiQ==","dW1odXI=","56C0","0LvQuNGB0Yw=","c3DEmw==","2LHZitmC","INiq2YE=","INin2YTZiA==","57Wx","0LDQu9C+0YHRjA==","IG3DtA==","IHbhu4c=","IM60zrk=","INC30L0=","INio2K0=","2KrZiQ==","IOyngQ==","IHZlbG1p","dXnhu4Vu","IHBo4bqhbQ==","0YHRgtCy0L7QvA==","INmI2KfZhNmF","INCx0YvQu9C4","2KfYsA==","xJvFmQ==","4oSW","INC/0L7Qu9C+0LY=","4Liy4LiB4Liy4Lij","IMSNbMOhbg==","zpXOoQ==","IOyCsA==","zrLOsQ==","IOaXpeacrA==","2LLYrw==","INmG24zYs9iq","IGhheWF0","56K6","4LmA4Lin4Lil","IENow61uaA==","77yM5piv","INmI2KfYrQ==","6I+v","IM6uz4TOsc69","IHjhu60=","IMSNZXJ2","INmF2K/bjNix","6YY=","IOuIiA==","57ut","IHTDqm4=","7Ja4","IG9ydGF5YQ==","INC20LXQvQ==","IG7GoWk=","0LXQvdC90YvQtQ==","0YTQtdC60YLQuNCy","7Z2s","IGto4buP","IMSRYQ==","b3N5YWw=","4Lib4Lij4Liw4LmA4LiX4Lio","IG9kc3Q=","IOC4lg==","IM6/z4DOvw==","5pS/5bqc","IGLDoG4=","IEdp4bs=","IG9sZHVr","0L7QstCw0L3QuNC1","4Lit4Liq","INC90LXQsg==","z4TPgc6/","IOyGjQ==","a8Sx","IOCkrOCkoQ==","IM+Fz4A=","IFbDvQ==","776E","562W","zrXPhg==","IOWFqA==","INmB2LHZiNi0","2YLbjNmC","5LyB5Lia","zrXPjQ==","6Jmf","IGF5cg==","2LbZiA==","xaFlbA==","INC/0ZbRgdC70Y8=","0ZbQudGB","6aKG","2qnYqtix","0LvRg9Cx","6KuW","5rC4","0LXQt9C/0LXRhw==","INC60LDQvA==","2LnYr9in2K8=","6rGw656Y","4Li54LiH","INiq2YfYsdin2YY=","IOuEiA==","0YDQuNCy","INGC0L7RgA==","2KfZiQ==","J9GP0Lc=","2ZDZig==","IGtow60=","INGI0YI=","IM6czrU=","IGJpcmk=","6Ie0","0YPQstCw0LI=","44GI44KL","INC00LjRgQ==","0LDRjtGC","2LXYqA==","5Z2H","0L7Qu9GO","6Iul","INin2Ks=","c291","5ZCD","44Gu44Gg","dWJsaWs=","0LvQtdC5","wqBt","IO2Pieq3oA==","4bqheQ==","zrXPgA==","dMSxaw==","IHZ5dQ==","2LnZiNiv","INC00L7Qtw==","IGzhu4tjaA==","6LOq","4KWB4KSI","4Lix4Lie","IHTDqW0=","IGthw6c=","IGPDoWk=","IM68zrE=","4oCm4oCm44CNCgo=","7Yis","2LHZiNmH","IHJ5Y2g=","zpHOpA==","INGA0ZbQsg==","67OR","5YGl","IHpkcmF2","INi52K/Yrw==","6I2J","zrTOuc6x","IHbhuq1u","0YvRgg==","INC60L7Qu9C40Yc=","z4zPhM61","IGLEsXJhaw==","INit2YU=","IGNo4buL","6buE","INin2YTZhdiq2K3Yr9ip","4Li34Lit4LiB","INC30LDQu9C4","IG5oYW5o","4oCM2KrZiNin2YY=","6529","INiq2YjYs9i3","6KaB5rGC","0LDQu9GD","w7xua8O8","44Gq44KT","IFRyb25n","4LiZ4Liw","5ZG8","INmK2YU=","0LjQutC4","INGC0YPRgg==","IHlhxZ9hbQ==","IG3hu41p","6ZuE","INit2LY=","INCw0LLRgtC+0Lw=","IOCkuOCkrOCkuA==","IHnhur91","44K544K/","z4fOrg==","0ZbRjg==","6Jg=","4Li04Lii","IG1ldg==","aWNrw6lobw==","4KS/4KS5","5a2j","zrjOrg==","IOCkrOCkog==","INin2YTZhdiz","z4TOv8+F","ZWtsaQ==","INC00LXRgNC10LI=","5bit","5rKZ","44Gr44KC","IG9ibGFzdA==","IGjhu5k=","IOW5sw==","LjouOi46LjouOi46LjouOg==","IOmW","INis2LI=","INmH2YXahg==","5Lim","0YbQtdC/","4KS+Cg==","5Lit55qE","J27EsW4=","IO2VmOuKlA==","0ZTRlw==","INio2LQ=","5Y20","5Lmg","INin2LfZhNin2LnYp9iq","IOuyoA==","INqp2LHYr9mG","4KS+4KSh","IOCkheCksA==","IEjhu40=","INCz0YDQvtC80LDQtA==","INiz2Ko=","z4TOuc+C","IGFuY2Fr","INC+0LM=","IGt0ZcWZw60=","IOas","IE5naA==","IHRlZHk=","IM+Azr8=","IHF1w6Ju","INCx0YPQu9C4","6K+G","IHThu6tuZw==","5Lq655qE","4Li14LiB4Liy4Lij","IM66zrHPhM6x","IHBvdXpl","oW5n","INii2LE=","INGC0YM=","IHThu7c=","IERhbmg=","0L7QvdC+0Lw=","0YHQuNC5","IOC5gOC4lA==","o6g=","xaFr","44OD44OJ","YXJkxLFy","IHnDtm5ldA==","0YPQstCw0LvQuA==","5YWI55Sf","INCQ0YA=","IHByb3Rvxb5l","IO2BrA==","IGplZG5vdA==","IHTDvQ==","6YeH","IOC4q+C4ow==","IOWcsA==","57qi","INC80L7Qu9C+0LQ=","acOqbmc=","IM+Mz4TOuQ==","INiv2KfYtNiq2Yc=","IHV5Z3Vu","INC+0L/QtdGA0LA=","5Y+r","INCw0L8=","INC60YPRgA==","2KfYudip","dW51eg==","IOyCrOynhA==","IHbDtA==","w6dvaw==","IOiB","0YLQtdGA0LXRgQ==","INin2LPYqtin2YY=","0LDQu9Cw0YHRjA==","4KWB4KS1","4buz","IGzGsHU=","INCi0LA=","IGzhu7Fh","J9GU","IMO8eQ==","INuM2qnbjA==","5r4=","0L3QtdC8","INiu2KfZhg==","INGN0LvQtdC6","2YLYp9mE","0LvQvtC6","IMSR4bq5cA==","4KWJ4KSy","IG3Fr8W+","64uk64qU","IO2VmOuCmA==","2YTYqg==","546w5Zyo","0LzQvg==","z4XPgw==","44Gf44Gh","IOyghOyEuA==","4KWN4KSf4KSw","2LnYp9iq","2K/ZiA==","5L+6","5qW9","5qOu","INC70LjRgdGC","zrTOuQ==","5a+M","IMSRxrBh","0LLQtdGB0YLQuA==","0LTQvg==","0LDQvdC90ZY=","IMO8cmV0","IGfhu41p","INGB0LLQvtGO","4burbmc=","IHThuqV0","5Lqa5rSy","w6FjZQ==","TsON","INGA0Ys=","5ruh","z4HOtc+C","5YWN6LS5","0LvQvtGC","5pm6","IM6xzrM=","IOCkheCkrg==","IOe0","0L7QtNC+","0YXQuA==","IG5ndeG7k24=","6YOo5YiG","0LLQsNGC","INGC0LXQsQ==","0LfQsNGG0ZbRlw==","INCf0YDQvg==","2LnbjA==","INmI2Yo=","656c","IG5lYnk=","INis2K/bjNiv","xJ9pbWl6","o70=","IOCkhuCkpA==","IOCkreCksA==","5omY","5a6J5YWo","IOuTpOyWtA==","2KjYsdiv","IOqyg+ydtA==","5Lqy","5rCP","0LDQu9GW0Lc=","bGFjaw==","INmF2K7YqtmE2YE=","2KfZhtmK2Kk=","IOyyrQ==","INCy0LjRgg==","IGhhcmVrZXQ=","6ag=","4LiZ4Liz","INio2LHYrg==","5aOy","0YfQsNC5","IGFubGF0","IOCkheCktQ==","INin2YHYsg==","IGjhur90","INqG2YbYrw==","6Zec","0L/RgNC40ZTQvA==","Z8Sx","IGtvbXA=","IGzhu5tw","IG3hu5dp","4Lib4Lij4Liw4LiB","IGhhZg==","IGVkZXI=","INC30LTQvtGA0L7Qsg==","4KWC4KSu","66C4","IG9udW4=","INmF2LHYr9mF","INCc0LDRgA==","IOyWtOuW","0LzQsNC9","INGB0LjQu9GM","57ay","67iU","0LvRj9C10YI=","INC90LXRgdC60L7Qu9GM0LrQvg==","bGFuZMSxcg==","INCy0LQ=","INmG2Yg=","44GO","0YLQuNC9","2KrYtA==","0LDQvdC40Lk=","IHTFmQ==","0YHQuNGF","0LvQvtC8","5q2p","446h","INit2LE=","5ouN","ZW5vdQ==","INCy0LXQu9C4","IM60zrc=","c2th","5Li76KaB","2KfZgdip","INCx0L7Qu9GM0YjQtQ==","4Li04Lio","55uK","INmB2YLYtw==","5aiB","IGjGsOG7n25n","IERvxJ8=","IGTDoGk=","INCz0L7RgtC+0LI=","INCy0LDQvA==","4oCJ","4KS+4KSa","5YW4","4LmD4Lir4LiN","IOer","ZWt0w7Zy","INCy0LXQuw==","INmE2Yg=","2LTYqtmH","5pi+","4bqjeQ==","4LmC4Lih","IHThu5VuZw==","INC/0L7QstC10YDRhQ==","0ZfQsg==","IHBow6lw","55qH","INC/0L7RgNGP0LQ=","INGB0L7QvtGC0LLQtdGC","4KSd","INGB0LXQsdGP","IOuCoA==","INCx0YPQu9Cw","4LmJ4Liy4Lii","IOOAgOOAgOOAgOOAgA==","INmF2KzZhdmI2Lk=","77yM5Lul","INio2YjYr9mH","zrzPjA==","IO2OuA==","ZcWfaXQ=","0Y7RidC40LU=","0Y7RidC40YU=","5Z+66YeR","INiq2K3Yqg==","INCy0LvQsNGB","bGVybGU=","44Ky","64qY","6JM=","bWFuxLFu","7J6I","IHphc3Q=","INGH0LXQu9C+0LLQtdC6","4KWH4KSs","cGXEjQ==","INio2LHZhtin2YXZhw==","IHNsb3Y=","IG7Em2phaw==","6rec","4KWH4KS5","6Jek","INio24zYtNiq2LE=","aWxpeg==","IOuUlA==","2KfYstmH","2KrYrw==","IGV0bQ==","IOuLpOuluA==","IHbFrw==","5bCE","INC60LvQsNGB","0LLRgNC+0L8=","5rS+","IMSRw6xuaA==","0YPRjtGC","0YPQtdGC0YHRjw==","6Zyy","INGB0LrQvtGA","INCy0LDRgQ==","7ZWY7JiA64uk","INiv2KfYtNiq","IOeE","IOilvw==","IM66zrHPhM6s","4KWm","7JeG","INiu2K/ZhQ==","2KfYs9mF","zpHOoQ==","IEFtYQ==","5aWl","INio2LLYsdqv","INCS0ZbQvQ==","IMWY","IOC4iOC4suC4gQ==","INGF0LDRgNCw0LrRgtC10YA=","IMSR4buZaQ==","INGA0L7Qt9Cy0LjRgg==","INC/0YDQvtGE0LXRgQ==","INC60L7QvdGC0YA=","zp/Omw==","IG1pbmg=","5LyR","7Kq9","IGNoxqFp","0LfQsNGG0LjQuA==","INC00ZbRj9C70Yw=","64Y=","IG5nYXk=","4KWC4KSC","IGlodGl5","6Zuq","IOq0gOumrOyekA==","IGPhu6U=","IOyniA==","2YrYqw==","4bq3cA==","2YjYp9i5","44GC44Gj44Gf","IOec","IOyasOumrA==","4LmI4LiH4LiC","IOet","KdiM","w6Bt","2YTbjNmE","IOqxuA==","0LDQu9GM0L3QuNGF","5pe25YCZ","dW5kYW4=","IEfDvG4=","IHRvcGw=","INGA0LXQutC+0LzQtdC9","INin2YbYqtiu2KfYqA==","w6B1","xI1rYQ==","67CA","INC60YDQsNGB","0LvQvtC/","5by1","INin2YTZhdi5","bcOtbg==","IHZp4bq/dA==","IOqwmeydgA==","dXRlxI0=","IG5lY2g=","57WC","44Gq44GM","YXnEsW4=","IMSNaW4=","Y2jDoXrDrQ==","2KfZgdi4","0YDQvtCy0LDRgtGM","4LmE4Lij","IOOCpA==","INC30LDQsdC+0LvQtdCy0LA=","IOWxsQ==","IGthZMSxbg==","z4TOt8+C","0LDQu9C40YHRjA==","IGjDvGs=","5ZOl","INC/0LXRgNC4","xZnDoWQ=","IOCkheCkuA==","INGB0YLQstC+0YA=","INmI24zaqduM","IOyh","IGPhu61h","IGhp4buDdQ==","5ri45oiP","0YzQvtC80YM=","IGfDsw==","IHRvaA==","INCx0LvQsA==","IOWR","INC/0LvQvg==","0LjRiA==","IMSR4bqldQ==","c2tvdQ==","44KI44KK","4Li54Lib","IHLhu5Np","0L7Qv9GA0L7RgQ==","0L3QvtC70L7Qsw==","INGC0YDQsNCy","IFdheWJhY2s=","IOC5hg==","INGD0YfQsNGB0YI=","INC/0YDQtdC/0LDRgNCw","IGThuqFuZw==","IMOcbg==","4LmE4Lil4LiZ","INiv2KfYrg==","IHPGoQ==","IGtveQ==","65286rOg","IMSRw7puZw==","4KWH4KSCLA==","IGdlw6dpcg==","INGP0LrRidC+","0YHRgtGA0L4=","0LXQvdGC0L7Qsg==","0ZbQtg==","0LrRg9GO","IGXEn2l0aW0=","4KWN4KSw4KS4","INCh0L8=","2KfYqtuM","44GR44KL","z4TPic69","INC60Lw=","4paN4paN4paN4paN","amlzdA==","0YLQsNC6","IOWQjeWJjQ==","6aGU","0LvRiw==","IGto4bqjbw==","4oCZ0Y8=","INmF2YTbjA==","bG/Fvg==","IOyWuA==","IGfhuqdu","IOCknOCksA==","4KSs4KSw","zpXOow==","4Liy4Lib","IG7DoXM=","Zm9ybWFjZQ==","IGV0bWVr","0LLQtdGB0YI=","7Ja07JqU","IOCkpOCkpQ==","INGB0LXQug==","zr7Otw==","5q+b","Qmly","IOyehA==","IHZhcmTEsXI=","2YjYp9mE","xLBS","b3ZhbsOp","0L3QsNGA0L7QtA==","4LiE4Liz","ZW1law==","IM6Vz4A=","IMWZZQ==","44G+44Gb","dXnhu4d0","IOyWvA==","csWv","IG9udQ==","4LmA4LiV4Lit4Lij","0L7QtNCw0YA=","2LLZhw==","IGthdg==","0L7QvdGL","INCy0LXRgQ==","7IKs7KeA","INCz0LvQsA==","w50=","INmC24zZhdiq","55Wl","4LiW4Liy4LiZ","xI1pbA==","IOS4hw==","6L6D","5YWF","INGA0LXQtA==","4Lih4Lir","YW1pbGlh","4KWH4KSV4KSw","IHThu5Fp","2YHbjA==","0YDRltGI","7JWg","4LiZ4Liq","4LiI4Lij","4KWH4KS24KSo","INmF2YjYttmI2Lk=","5om5","IG9ic2Fo","INC90LDQstGH","IGRlc3Rlaw==","IHphcw==","5ZON","w7xtw7x6","IOef","IOio","2aw=","57uI","IHpkZQ==","IHrDoXA=","4KWC4KS44KSw","7J207KeA","55qu","bG9t","4KWn","2YTYp9mC","4LiZ4LiV","7YyF","0LvQsNC00LA=","bWFzxLFuYQ==","44Gu44Gn","65Ok7J2E","INC90LDQsw==","bWFzxLFuxLE=","44Kd","xLFuxLFm","5Zu0","IGLDtmzDvG0=","5aWW","5qiZ","2YTYp9it","INCz0L7RgdGD0LTQsNGA","2K/Yp9mG2YTZiNiv","INC/0L7RgtGA0LXQsQ==","INGA0L7RhtGW","0L7Qs9Cw","INGB0LvQtdC00YPQtdGC","INC/0LDRgNCw","6bw=","44GN44Gf","zq/Otg==","IGLhu5E=","0YLRltCy","77yM5aW5","ZmFtaWxpYQ==","6aCF","INiv2YQ=","IHNrdXA=","0LXRh9C10L3QuNC1","44GT44Go44GM","4KWA4KSs","4Li44Lil","qOu2gA==","INin2YTYudix2Kg=","IOe+jg==","INin2YTZhdmI","INil2YY=","IG7DoXNsZWQ=","IHRvbXU=","zoQ=","INC30LDQstC4","IG5odQ==","IHDFmWVkc3Rhdg==","7KCV67O0","b2tvbA==","INC60YDQuA==","YWR1","INC60LDRgg==","INGN0YQ=","0LLQsNC7","bWF5xLE=","INGH0LDRgdGC0L4=","IHRyYW5o","2KfYptmE","44KI44GG44Gq","IHBvaA==","7IOB7JyE","IHPhuq9j","2YPYsw==","INC80YM=","Ljo6","64g=","wrsK","INmG2q8=","2ZDZhg==","0L3QuNC60L7QvA==","0YXQsA==","IM68zr/PhQ==","IE5ndXnhu4Vu","INCy0YvRgdC+0Lo=","INCf0L7QtA==","INC/0YDQuNGA0L7QtA==","4KWL4KSn","4KS/4KSV4KSy","0LjRgNCw","64uk6rOg","IG1hasOt","IHbDuW5n","IHRhcmloaW5kZQ==","INCy0LDRgA==","0L3QuNGC0Yw=","zrXOuc+C","IOWHug==","ZHnFvg==","z4TPjs69","5L2T6IKy","IOC5gOC4pw==","IOCkheCkmg==","INin2Ybar9mE24zYs9uM","4KWN4KSv4KSu","IGdlbGnFnw==","5rmW","INin2qk=","INC/0LvQsNC9","a3l0","2KfYqNuM","zrrOuQ==","IGNodW5n","4KS+4KSo4KSV","c8Sx","IHRpbmg=","INGB0YLQvtC7","0YHRgtGA0YM=","INC70LjRiNC1","INCy0LjRgNC+0LE=","aWxtacWf","INC30ZY=","57uG","5YCS","44K344Oj","5a2p","IOC5guC4o+C4h+C5gOC4ow==","7Zmc","INCx0YPQtNC1","IHlha2xhxZ8=","6Ieq5YiG","INmB2Yg=","0KHQog==","IHNvcnVu","4LmA4Lig","IGPDtA==","0LLQuNGH","65Ok7J2Y","IHRyaeG7h3U=","IHLDtQ==","IOOBqw==","xJ9pbQ==","aXlvcnV6","6Jw=","4KWN4KSw4KS1","INiz2b4=","IOyEnOyauA==","zrTOtQ==","0LXRgNGI","INij2LM=","5Lqe","6K+N","0L/RgtC+0Lw=","4Lik4Lip","INiz2KfYstmF2KfZhg==","IGx1w7Ru","2YfZiNix","Y8O8","0LDRgtC60YM=","IG9sYWJpbGly","IOyXsOq1rA==","0LXQvdC90L7QuQ==","IOaIkQ==","INC90LXQs9C+","IC4qKioqKioqKioqKioqKg==","4Li04LiY","IOOCtw==","2KrZgQ==","0J/RgNC+","IGhha2vEsW5kYQ==","xI1uxJs=","IE3hu7k=","6b0=","IM+Dz4TOv869","IMOibQ==","wqfYuA==","IMWfaXJrZXQ=","5oOF5Ya1","INii2YXZiNiy2LQ=","zrvOtc+F","2YXZhw==","6KaP","44Go5oCd","INmI2Lk=","z4jOtw==","z4HOv8+N","IMKgCg==","zrTOtw==","0YjQvtCy","5Yik","IG3huq90","5ou/","4LiZ4LiU","6ZmE","4LmJ4Lih","IMSR4bqhdA==","IGfDvHplbA==","bcO8xZ8=","0J7Qkg==","54us","66as66W8","INC/0LvQsNGC","IG5naOG7iw==","INGC0LDQutC40YU=","0LHQuNGA0LA=","INC90LXQug==","0YHRjNC60ZY=","2LHZitin2LY=","b251","4KWL4KSu","IEdp4bubaQ==","6J6N","6bI=","IEdlbmVs","5Yq/","INCy0ZY=","5aeQ","6Kmm","INC20LjRgtGC0Y8=","IOyYqA==","5Ye65p2l","IHThu5E=","IGxhbw==","zq/Ovw==","IM6gzrE=","0L3QuNGC0LXQu9GM","6ZqO","INCy0LjQutC+0L0=","INmB2LnYp9mE","4LmA4Lio","z4zOsw==","INC+0YDQs9Cw0L3QuNC3","INC10LzRgw==","INmK2Lk=","INmF2Kg=","4KS+4KSy4KSv","IM6cz4A=","6bg=","w7lh","6ri4","IMSQaeG7gXU=","zrXOr86/","5LqJ","xrDhu6N0","0YDQsNC30YM=","INC+0YLRgNC40Lw=","INi32Kg=","IOS7pQ==","5paX","67Cx","4KSH4KS4","66eM7JuQ","44CB44Gd44Gu","IOuVjOusuA==","INii24w=","0KHQoA==","2LbZhA==","5pON","a2F6eQ==","4Liq4Lin","w6JuZw==","4KSC4KSt","0L3RltGH","4Lix4LiH4LiB","INio2LHYsdiz24w=","2LHYr9mH","IG3huqt1","4LmI4Lin4LiH","INiv2KfZhti02q/Yp9mH","ZMSxxJ8=","IFThu5VuZw==","56ys5LqM","Y8OtbQ==","IGLDtnlsZQ==","67aI","INmF2YbYp9io2Lk=","4KWD4KS3","0LXRgtGL","5Ya3","5Zut","INiq2YjYrNmH","5Yi7","5p6B","4KSf4KSo","0LvQsNC9","IO2DgA==","5L2Q","INC+0LHRiw==","5bid","7Luk","5a6I","6LW35p2l","IOODrA==","546J","4LmA4Lir4Lil","0LjQvdC1","4Lir4Liy4Lij","6ZqP","INCz0LDQtw==","INin2YTYudmF2YQ=","4KWB4KSd","z4HOuc6/","IHbDoW0=","INi52YbYrw==","2YbYr9qv2KfZhg==","77yM6YKj","INC90LDRhdC+0LQ=","w6Fubw==","24zYp9mG","INij2Lk=","INGA0LDQtNC4","INC80LXQvdC1","IMO6ZGE=","z4fOvQ==","0YPQu9GP0YA=","4KWA4KSq","IHBvdcW+w60=","IOS4","INmC2KfZhtmI2YY=","zrnOus6/z40=","w6F5","IMOnw7Z6","z4TPgQ==","2YbYp9mF","4Li44LiV","5ZOq","2YrYqA==","5Lmw","0JTQu9GP","IOugiOuyqA==","4Li44Lia","0L3Rg9GC0Lg=","6L27","IM6czrE=","IOim","0LDRgtC60L7Qsg==","IOuIhA==","IHR1eeG7g24=","2Y7ZhQ==","INCy0YvQv9C+0Ls=","IHN0dWRp","IHDFmWVr","INC30LDQvA==","IG1hdGVyaQ==","5Y6L","INCw0Ls=","IOC4muC4ow==","2LfYrQ==","INmF2LHaqQ==","IOyLrA==","INmC2KfYqNmE","INCQ0LvQtQ==","xLFudMSx","IOW7","xLBL","64WE64+E","0YvQstCw0YLRjA==","IGRldmxldA==","56S+5Lya","64Kg","IGtvbGF5","INGA0LDQt9Cy0LjRgtC4","0LDQtNC4","2KbZitiz","YWTEscSfxLE=","zpHOmw==","IGhvYQ==","IOC4qA==","xLHFn3TEsXI=","0YDRjg==","INC60LDRh9C1","vOWQiA==","5YW0","IOq3uOufrA==","INC80ZbRgdGC","INC80L3QtQ==","44O844K6","56eA","INi52YTZitmH","IOyLnOqwhA==","IOCkmOCksA==","INGD0LM=","5Y+R5bGV","xLHFn8Sx","IOyInA==","IO2ZnA==","5qGj","IG5va3Q=","bMOpbQ==","0LXQvdC90YvQuQ==","INio2YU=","4KWH4KSv","0L7QtNCw0LI=","4LmC4Lij","77yM5pyJ","2KfZitin2Ko=","2KfbjNmH","IOCkieCkquCkrw==","IHNtxJs=","2LTYrw==","0KjQkA==","INin2YXYp9mF","5r+A","IGhv4bqhY2g=","0L7QsdGA0LDQtw==","4KWL4KS5","INGA0LXQsdC10L0=","0LjRgtC10LvRjw==","44Gq44GM44KJ","2LPYp9mE","IOC4iOC4sw==","INiu2KfYtQ==","IGdlcmk=","4KSY","IOy6","4LmB4LiX","4oCM24w=","2q/YsduM","2KfZhdio2LE=","0YjRgw==","IHBob25n","0LjQvNC+","0L/QsA==","IOy1nOqzoA==","INC90LDQvA==","b3N0w60=","aXNpbmk=","INC00YPQttC1","0YHQutC+0Lw=","INC/0YDQvtC00YPQug==","z4zPhM63z4TOsQ==","YWxu","aXNpbmU=","6L+c","0LDQu9GM0L3QvtC5","4KSk4KSw","dMSxxJ8=","IOuS","6L+Y5piv","INmF2KvZhA==","7Jyo","776Y","5Yi4","57aa","2KzYp9iv","INC60YM=","5YCR","b3Z1","IHPEqQ==","IOygkA==","INGD0YDQvtCy","4KS/4KSa","b3ZhbGk=","INmI2YY=","IOydjA==","INC60LM=","4Liy4LiY","z4TPgc6x","xb5keQ==","4LmM4LiV","IG7Em20=","INCm0LU=","bm9obw==","IOuLpOyLnA==","IHTDqXRv","IGJp4buDdQ==","IFnDtm4=","IHByw6FjZQ==","4KWJ4KSw","IGNow60=","0L7QstC+0Lk=","IG3hu58=","6Kqq","z47Pgg==","0LLQvtC70Y8=","44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA","5a+m","6bue","IOCkj+CktQ==","z4XOvc6x","5bKh","a2jDtG5n","IHDFmcOtcGFkxJs=","5Zc=","INio2K/ZiNmG","z4POus61","IGRpa2thdA==","IEFuY2Fr","IHRp4buHbg==","6Z2Z","IOydvOuwmA==","IMSNbGVu","7JWF","4KS+4KSH4KSo","44Gj44Gm44GE44Gf","IOydtOyaqQ==","2YjZhduM","aW7DoQ==","4bq3bmc=","z47Pgc6x","2YbZitip","0LLQsNC2","6I63","5ama","IMWfdQ==","IOOBig==","INiv2LHYqA==","IGRp4buFbg==","xZllYmE=","YXPEsW7EsW4=","572R56uZ","0L3RjNC+0LPQvg==","INin2YTYo9mI2YQ=","zrnOus6tz4I=","IHrDrXNr","0L7Qu9C+","INGN0YLQvtGC","IHBva3Vk","6LK7","0LXRgNGW0LI=","44OV44Kj","0LjRgtGD0LA=","IHZ5ZA==","0L7Qu9C+0LY=","0LvRj9GC0Yw=","2YLZhQ==","5rSL","5qeL","INi624zYsQ==","IHN0xZllZA==","2LjYsQ==","IGhpw6diaXI=","zrjOtc6v","em5paw==","0LTRiw==","bHV2","INmF2KQ=","INqv2LHZiNmH","IO+8iQo=","dGVyaQ==","IM+Fz4DOvw==","dm9q","INio2LnYtg==","IGJpbGlu","INix2YjYtA==","INC+0LHRj9C3","IO+7","2LPZhg==","IM+AzrE=","7Y28","IHTDrW4=","IMK0","7IKs7J207Yq4","IHBvZG9i","0YnQtdC1","IOWNlw==","IGJ5Y2g=","0L7Qt9C4","IFbEg24=","2K3Zhw==","5a2m6Zmi","IMWZZWts","66a964uI64uk","INC/0YDQvtGB","zrrOrA==","IGJhxZ9sYWTEsQ==","4buneQ==","0Y7QtNC2","4KS+4KSP4KSX","4KSC4KSa","IOq0gOugqA==","INCy0L7Qv9GA0L7RgQ==","INGB0YLQsNGC0Yw=","IHlhdMSxcsSxbQ==","0L3Rg9C70LA=","2LHYp9mB","IMOnZcWfaXQ=","IOCkieCkpg==","5aSu","INC/0L7Rj9Cy","5Zu95a62","INGB0L7QvtGC0LLQtdGC0YHRgtCy","7JWh","INiu2YjYp9mH2K8=","xaHFocOt","wqDQvw==","IE5ow6A=","JycnJw==","772o","w4U=","IO+6","INii2YXYsduM2qk=","bGFyxLFtxLF6","2KzYpw==","2YHZgg==","IOG7","IOyVoA==","INiy2KjYp9mG","INGC0LLQvtGA","0L3QuNGH0LXRgQ==","INC60L3QuA==","2K7Yr9in2YU=","4Lif4Lij","IOy5mA==","4Lin4Liy4Lih","INmF2YfZhQ==","IHN0b2w=","IGVkaWxlbg==","IHBlaw==","2KfZhtin2Ko=","0LDQu9GM0L3Rlg==","INC90LXQvtCx0YXRltC0","4LmE4Lin","IOCktuCksA==","IO2MkA==","0pE=","INC90LjQvA==","IOC4mA==","5pig","5LqS","IGJhxZ9hcg==","xb5p","INC80L3QvtCz","bGVuZGk=","w6F2YWrDrQ==","bmljdA==","INC00YPQvA==","6Zmp","z4PPgw==","aWt5","0LDQu9GM0L3Ri9C5","INmF2YbYqg==","5a6u","LdC30LA=","0LXRgNC6","5aGU","IM68zrXPhM6x","b8SfdW4=","zpfOnA==","4KWI4KSC4KWkCg==","xI1reQ==","5bmz5Y+w","4KWL4KS2","IG9uYQ==","IGJlYw==","7KI=","IGPDonk=","a8O8bg==","IOCkiA==","IHLhu5luZw==","0LXRgNCx","5bm4","776Q","INC/0ZbQtNC/0YDQuNGU0Lw=","55Sj","IM+EzrU=","INmG2YLYtA==","0L7QstC40YU=","INmB2Yk=","0JrQsNC6","2Y7YsQ==","INCp","0LDQu9GM0L3Ri9GF","IGvDvMOnw7xr","6K23","5ouF","aWNhcmV0","INix2YHYqg==","INC+0LTQvdC+0LPQvg==","0YjQuNC8","INCx0ZY=","IHV5Z3VsYW0=","IOaL","5L2b","dWN1","ZMOt","xZg=","2KbYqQ==","6rG4","2Yw=","IM6gz4HOvw==","IHllcmluZQ==","INGW0L3RhNC+0YDQvNCw","IOWklg==","5LuV","0L3QsNCy","YXJhc8Sx","4Lit4LiZ4LmE4Lil4LiZ","2KfYtNiq","2LLZig==","5qmL","IOOCqw==","6IO95Yqb","5aWX","IHByb2g=","INC/0YDQsNCy0LA=","4bubcA==","IOC4guC4reC4hw==","IOu0","IGzDumM=","IOmV","2KjZiNiv","cnVwYQ==","2KfYstmF","INC60LDQvQ==","xLFsxLFt","INmH2K8=","44CAIOOAgCDjgIA=","0YvQstCw0LXRgg==","2K7Yp9mG2Yc=","0YPQutGC","IOeZvuW6pg==","IG7Em2Nv","0LXQvNC+0L0=","IOCkheCkqg==","IM6M","w7xuw7xu","5paH5YyW","5LmO","5LiK55qE","2YTZitmF","IHTEm2No","2KfYs9io","4oCZ0ZQ=","INqv24w=","IOq3vA==","IHRy4bq7","zrzOrc69zr8=","44GT44Go44KS","7J2064KY","5ZaE","IHRy4bqj","5YiG5p6Q","IGTEm2w=","0YPRgdC60LA=","INC80L3QvtCz0L4=","4KWI4KSw","zrzOsc+Ezr/Pgg==","IG3DrXN0bw==","IOqwgQ==","INC/0YDQvtCz","YmHFnw==","0LDQudGC0LU=","IGPhu5U=","5b+c","77yBCg==","w6fEsQ==","IGJpcsOnb2s=","IO2YlQ==","57WM","IEV2cm9w","INGB0L7RhtGW","5LuW55qE","IM68z4DOvw==","5aWI","INqv2YQ=","2YjZhNip","5rWO","INqp2Yg=","seS5kA==","44GX44GP","57qz","0YHRgtCy0LXQvdC90L4=","6Zui","4KS+Lg==","IGdlcsOnZWtsZcWfdGly","IGvEsXI=","7LM=","INCz0L7RgdC/","5bmV","7IS8","wrsuCg==","0LrRg9GA","INix24w=","5pu+","2YjYsdmK","0LvQtdC60YHQsNC90LQ=","2LXZgQ==","IGPhuqNuaA==","5bGC","44KG","INiq2LM=","7LC9","6riw66W8","IOC5gOC4hA==","55+t","INGB0YLRgNC+","IM+Dz4TOuc+C","4KWN4KSv4KS1","INi52YTZhQ==","INGB0LjRgtGD0LA=","INGJ0L7QtNC+","5ZCb","2YXYsw==","INC+0YLQutGA0Ys=","IHNwb2o=","IMSRxINuZw==","IHNhdmHFnw==","4Li14Lij","c2vDqW0=","IOihjA==","6bk=","INmK2YXZg9mG","0L7QstCw0L3Qvg==","INC/0YDQsNCy0LjQu9GM","IGNoaeG6v2M=","6Ii5","6ZO2","INC+0YLQtA==","IOydgA==","7YWU","IE5lag==","0L7QvdC1","IGvEsXo=","0L7Qu9C+0LPQuNGH0LXRgQ==","INC60YDQsNGX","4Lia4Lit4Lil","5qW8","INiq2YXYp9mF","INio24zZhQ==","INGB0YPQsQ==","dsO9","0YHQutC40LU=","64yA66Gc","Pz8/Pz8/Pz8=","YWJpbGlyc2luaXo=","0LDQvdGB0L7Qsg==","5Luj6KGo","IOunpOunpA==","0L7Qu9C+0LPRltGH","zrzOsc69","0LDQutGB0LjQvA==","44Kk44Or","IHThuqNp","2YXZiA==","5a6X","bmVt","IGtob+G6o24=","INC/0LDRgg==","0LDQvdGC0LA=","INC/0L7QvNC+0Yk=","IHZvZA==","IGtheW5haw==","z4PPhg==","4KWC4KSk","ZHXEnw==","0LDRgtC40YHRjw==","IOelng==","INGB0LvQvtCy0LA=","0YDRg9C60YLRgw==","IG3Em3PDrQ==","2Y/ZhQ==","0LfQvdCw0YfQsA==","IOiJ","5a2m55Sf","5rSl","2Y7Zig==","6KeI","IOWuiQ==","IGfDtnLDvMWf","w6FsbsSb","IOuUsOudvA==","INmF2YjYrNmI2K8=","IMSR4bup","IMOnYWzEscWfbWFsYXI=","INGP0LrQuNGF","INin2KzYqtmF2KfYuQ==","zrzOtc69","6I6J","56ev","7LaV","4KWN4KS24KSo","IHjDqXQ=","INCy0YLQvtGA","546p","wqDQnQ==","0YjQuNC1","0L7RgNC4","2KPYsw==","IHRodeG7kWM=","64uI6rmM","65WM","0YDRg9C/","0YHRj9GC","0LfRiw==","INGB0LzQtdGA","IHZ5Yg==","IOydtOyDgQ==","4KSa4KSo","IGdlbGRp","27HbsA==","zrnOus+Ozr0=","IMSQ4bupYw==","INC00L7RgdGC0LDRgg==","IMO2bmM=","6Kaq","IGFkxLE=","dW5jYQ==","INin2YTYqtix","55W2","INCk0LXQtNC10YDQsA==","0LvRj9GO0YLRgdGP","INmD2KfZhtiq","5o6i","INGD0LE=","IM66zr8=","4KS+4KSH4KSf","0LfQvQ==","IG3DtGk=","IOOCtQ==","INC90LDQstGW","57u85ZCI","INC80LjQvdGD0YI=","ZMSxaw==","0YDRg9C0","5ZyW","6rCk","IMSRb8Ogbg==","6KQ=","4KWN4KS14KSw","IMOcbml2ZXJzaXQ=","0LDQvdC+","6Zuo","IHbFoWVjaG55","IOuLpOydjA==","IEN1bWh1cg==","INC80YPQtw==","YcWfdMSxcg==","IOqxsOuemA==","IOmh","xb5pdMOt","IOC4nw==","IHRodeG6vw==","INC80YPQtg==","IM6Rzr0=","INiv2YjZhQ==","INGB0LjQvQ==","IM+Jz4I=","bWVsZXI=","IHBvxI0=","INC60L7Qu9C40YfQtQ==","IEvEjQ==","6LO9","INC+0YHRltCx","5Y+l","IELDtmw=","4LiY4Lij4Lij4Lih","IGPhuqFuaA==","5bCH","INC90L7RgQ==","6IS4","IGdlbGly","0L7RgNC+0L0=","4KWN4KSw4KSt","57uH","4Li44LmJ","4KS+4KSu4KSy","IGPDonU=","0ZHRgg==","IDp8","44KM44Gm","IHBvc2xlZA==","44K544OG","0ZbQu9GM0Yg=","0LXQvdGC0Ys=","2K7Yr9mF","INio2KfYtNqv2KfZhw==","IHRoxrA=","w6F2w6Fuw60=","64qQ","INij2K0=","2LHYp9iv","INio2LPbjNin2LE=","5Yiw5LqG","Ijsi","5bCO","IMO2cg==","4LiK4Liy4LiV","Z2VudXM=","IHlha8Sxbg==","IMOtdA==","cmVnbnVt","IGZpeWF0","0L3RltGF","5Zyw5pa5","IGJpbGdp","0LrQsNC8","IHNwb2w=","2KfYptmK","INmK2YY=","4Liy4Lir4Liy4Lij","INio2q8=","6ZiF","INin2YTYtNix","woE=","INGW0L3RiNC40YU=","IHRy4bqhbmc=","54Gj","IGPhu7Fj","0LrQsNC9","6IuP","w5Q=","IGzhu51p","0Y/Rhw==","INmI2K0=","7Iic","xbg=","INCy0L7RgdC/","7KGM","xI1uw61jaA==","2K7YsdmJ","2KfYptmK2Kk=","IHN14bqldA==","5oeJ","2KfYrduM","IG7DoXo=","6L+Z56eN","INC30LDQsdC10LfQv9C10Yc=","INCn0LXRgA==","INC30LTRltC50YE=","5Y+m","5ous","4KWB4KS3","zrzPhg==","64OQ","0JXRgdC70Lg=","6aw=","IO2DnA==","IOOAgOOAgOOAgOOAgOOAgOOAgOOAgOOAgOOAgOOAgOOAgOOAgOOAgOOAgOOAgOOAgA==","INC80Ls=","5bSO","2YHYuQ==","INmC2K/YsQ==","IHbhu5Fu","5aa5","INCd0LDRgQ==","4KWN4KSr","44K444Oj","IG3EsQ==","0LXQvdGB","0LHRg9C0","INit2KrZiQ==","IOyytA==","INGW0YHRgtC+0YA=","IGdp4bqleQ==","zrPOv8+B","65CY7Ja0","IO2C","INCe0LTQvdCw","INmG2YXZiNiv","INCy0LjQv9Cw0LQ=","IOyekOyLoA==","IGpzdGU=","IOuTseuhnQ==","ZWt0ZW4=","INGA0LXRhw==","cm9kbsOt","2LPYqtix","xLF0","5LmF5LmF","INiu2YTYp9mE","IOem","dWx1aw==","bGVuZW4=","aWxpcA==","6LSi","IOCkheCklQ==","IFnEsWw=","IOOAgOOAgOOAgOOAgOOAgA==","IOCknQ==","IELDrG5o","IG9sbXXFnw==","2KfZhNil2YbYrNmE2YrYstmK2Kk=","0LzQtdC90L3Qvg==","YWxuxLF6","INi02LHZg9ip","INiz2YbYqQ==","6LSf","5L2c5ZOB","IOyVvQ==","INC00YDRg9Cz0LjRhQ==","IGJhxJ9sYW50xLE=","0L7QtNGD","55qE5piv","4Lix4LiZ4LiU","INC60L7RgtC+0YDRi9GF","INin2YTZiNmE","6riA7IOB7JyE","IM+AzrXPgQ==","66as7JWE","aWJhcg==","IOiD","44Gf44GE","w6Fq","IOychO2VtA==","P+KAnAoK","IO2OmA==","INC90LXQuQ==","INCX0LDQug==","INCS0ZbQtA==","0LXQu9GW","6K++","5Ymv","bWFkYW4=","5pyr","IM+Az4HPjA==","INC/0YHQuNGF","INGC0ZY=","2YPYp9iq","IHZ5c29r","6rSA66as","w7xsdMO8cg==","IOC5gOC4rQ==","IO2VqQ==","552j","INGA0LjRgQ==","0LXRgNGM","INqp2YTbjA==","IOODng==","IHBow61h","5as=","2Kfarw==","IOmi","INmG2YHYsQ==","INis2KfZhg==","IHlhcw==","0LbQtdC90LjRjw==","INC70YPRh9GI0LU=","IOe6","INC80L7QvQ==","INiq2K4=","INi024w=","INC90LXQutC+0YLQvtGA","0LDQu9GM0L3Ri9C1","IG9iY2hvZA==","IO2VqOq7mA==","IHJpw6puZw==","44GV44KM44KL","0L7QutGD","INCh0KjQkA==","66eB","IE7hur91","IEHEnw==","INC00LLQtdGA","4KWL4KS3","IGtoaeG6v24=","0L3QtdCz0L4=","7LGF","4Lix4LiV4Lij","bWFsxLE=","INmK2Kc=","56eR5oqA","4Li34LiZ","4Lir4Lih4Liy4Lii","INiu2LU=","5Yac","w61tZQ==","INGN0YLQvtC5","IOyXhQ==","IOS5","5Lyv","J8K0","2YXZitmE","4Lit4LiH4LiE","a292w6E=","6L+Z5LmI","44CC5oiR","7JeQ7ISc64qU","IOyaqQ==","67mE7Iqk","IOymnQ==","SVRURQ==","IOuqqOuToA==","IHNwb2xlxI1ub3N0aQ==","INCy0LjQug==","IHTFmcOt","6bM=","INiu24w=","IHBvxb4=","INC40LzQtdC10YI=","IGTEm3Q=","INmF2K/ZhA==","INC80L4=","5Y2P","ZW7DrW0=","6Yk=","2KfYuA==","IHRlxZ8=","IHZlxZllag==","TElD","7KeA64qU","0YvQstCw0Y7Rgg==","INC+0YDQs9Cw0L3Rlg==","bsOtbWk=","zrjOrQ==","44Kv44Op","44O844Oz","0LvQuNGB0Y8=","aW1kaQ==","5oY=","77qO","IOyatOyYgQ==","zrrOsc69","IOuztQ==","INCG0L0=","cGxpY2F0aW9u","dGFo","INCQ0LI=","IGPhu5luZw==","0LDQu9GM0L3QvtGX","INiv2YjYsdmH","4KWN4KSw4KSv","INiu2Yg=","INCy0YDQsA==","2KXZhg==","6IKJ","IG95bg==","IFTGsA==","INmH2YXYp9mG","INCx0ZbQu9GM0YjQtQ==","5oyv","2KfZhdip","5bqr","INGA0LXQtg==","INiv2KfYsdmG2K8=","0YDQuNC5","IOaM","IHNvbnXDpw==","IHThuqM=","4Lix4LiH4LiE","67Cb","INC80L7QvA==","0LLQuNGH0LDQuQ==","LuC4hA==","IOCkhuCkiA==","5YGH","IHBvc2t5dA==","INGB0YPQvw==","xLF5b3JkdQ==","0LDQu9C1","0LjRhg==","IM64zq0=","44KH44GG","INGB0LLQvtC5","4Lih4LiZ","IG7hu69h","dm/FmQ==","2KfYs9mK","6ZKx","44GX44Gm44GE44Gf","IMSR4bqneQ==","2KfZitix","IGFyYcWfdMSxcg==","7KM=","44Go44Gv","INGB0L/QvtGA","IOqwgOyepQ==","6LyJ","4pah","IOyZhA==","0L7RgNCw0Y8=","z4HOtc6v","INGN0YLQsA==","66m07KCB","7J207Iqk","5L2z","5pma","IGt2YWw=","IG7hu5Vp","0YLQsNC80Lg=","INC/0L7Qu9GW0YLQuA==","IMSwbmc=","0L3RltGB0YLRjg==","IOC5gOC4gQ==","IOuvvA==","6JQ=","z4HOr86x","5o6I","IOeC","INmG2YXYp9uM","IOyeoQ==","5p62","2KfYqNmC","0YHQvtC9","0LXQvdC90L7Qs9C+","INmF24zZhNuM","IGt1cnVt","4LmM4Liq","IOy0nQ==","IG7Em2tvbGlr","INmA","INC30LDRgdGC0L7RgQ==","4LiU4LiZ","2YbYr9in2YY=","IEphcA==","6YOh","4KWN4KSt","IOC5gOC4ig==","IOKAqw==","6aOe","b3ZhdGVs","INGH0LDRgdGC0Yw=","IGLhu5U=","44Kv44Oq","4Li04LmM","INCy0LjQtNC1","dmFpbA==","zIk=","xJ9pbmRl","44Go44KC","4oCM2qnZhtiv","IOuFhA==","INin2YLYqti1","772X","z4HOuc+D","0LfQtA==","6Jm9","IHRob+G6oWk=","INmI2LI=","IG3DrXQ=","INGF0L7Qu9C+0LQ=","INC60YPQvw==","0LDQvdC40YU=","IG5ow6xu","44GL44Gq","INCa0L7QvA==","z4TOtc+B","77yM5Y+q","IG9sdXA=","IGjhu49p","65E=","IG7Em2t0ZXI=","aXPDrQ==","INCy0LjQutC+0YDQuNGB0YLQvtCy","7J6h","IOCkleCksg==","IOycoOyggA==","INC/0YDQuNCx","6Ium","INC80L7Qsg==","IOC4q+C4mQ==","65CY64qU","0L7QutC+","INC+0LHQtdGB0L8=","IGtleg==","0LvRj9GF","INC/0YDQvtC40YE=","INC/0L7QstC40L0=","INCa0L7RgA==","7LyA","INGB0Lg=","IOS5iw==","IOKAlAo=","0YHRg9GC0YHRgtCy","57A=","IOCkoA==","0L3QsNGC","IHN1eQ==","INGB0Ys=","INmG2LTYp9mG","INC90LDQv9GA0LDQsg==","INGG0YzQvtC80YM=","5piv5LiA","IG3DvG0=","0ZTQvNC+","INin2LPZhNin2YXbjA==","IHphbWFuZGE=","2YjZhdin2YY=","2KfZhNit","xaF0xJtuw60=","INCa0LDQug==","pO2UhA==","INm+2LHYrw==","Q8OhYw==","zrXOuc6x","INis2Yg=","IMSRb+G6oW4=","IOCkh+CkpA==","INC30LDQvQ==","INmF2YbYt9mC2Yc=","INmF2LnZhA==","IGRva29u","5ZC4","aWNrb3U=","5bCB","INC60LjRgQ==","4Lix4LiH4Lir4Lin","aXNwZWNpZXM=","INC90LDQv9GA0Y8=","5rqW","IOCknOCksg==","4LmA4LiJ","TEFS","INGD0YHQu9C+0LLQuNGP","IFdpa2lzcGVjaWVz","4Lij4Liw4LiU","IG1leQ==","44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA","4LmH4LiI","5b6S","dGFjaA==","dW11eg==","zrrOtw==","w4o=","IMO8bg==","IEJJVFRF","INmF2LHYqNi5","44K344Ol","4KS/4KS44KSV","2LfZiNix","INCy0L7RgQ==","776f","IHlhecSxbg==","44GL44KK","0LvQuNGP","INC/0YDQuNC9","kW5n","INmG2K4=","IGx6ZQ==","4KWN4KS34KSj","INCx0L4=","IOq4gA==","IGdlbGnFn3Rpcg==","4Lib4Lij4Liw4LiK","5b2h","IOOCqg==","44GI44Gm","0L3Rg9GC0Yw=","IOe9","INC80LDQsw==","44Gr44Gk","0L3QvtGB0YLQtdC5","INmE2Yo=","5oCq","0Y/RgtGB0Y8=","4LiR","4KS/4KSv4KSu","IOOAjg==","0YDRjA==","IG3huqFuZw==","dMSxbQ==","INC/0LXRgNC40L7QtA==","0L7Qs9GD","INC60L7RgtC+0YDQsNGP","66as6rCA","IOOFoQ==","INis2KfbjA==","INC/0L7RgtGA0ZbQsQ==","xaFlbg==","4Lit4Liw","2KjYuQ==","2J8K","IOuwqeuylQ==","INCz0L7RgNC+0LQ=","INCY0L0=","INC+0LrQsNC3","2LHZiNiy","IGlsacWfaw==","5a6j","Zm9ybWFu","YWRhxZ8=","2YrZhNip","INCa0LDRgA==","IG3huqV0","5oWL","0LzQvw==","4LmC4LiZ","INit2YLZiNmC","INC00L3Rjw==","IOuSpA==","4KS+4KSV4KSw","7LKY65+8","4oCM2KI=","aGFuZ2k=","6KGM5pS/","YWxpeWV0","IOyynA==","IFlhcA==","4LmC4Lij4LiH","7KeA64W4","2Y7ZkQ==","zpHOmQ==","w6FuYQ==","YW5kxLFy","4Lij4Liw4Lia4Lia","b8SfbHU=","4Liy4LiI4Liw","4bqpeQ==","2KfZiNmE","INC80LDRgtC10YDRlg==","zp/OnQ==","IGluZm9ybWFjZQ==","2KrYuQ==","4Lia4LiZ","IMSMZXNrw6k=","IHRlbWVs","Ojo6Ojo6Ojo6Ojo6Ojo6Ojo6Ojo6Ojo6Ojo6Ojo6Ojo=","IGNoaWE=","LdGB","0L3QtdGA0LM=","IOywvg==","0YDQuNC0","0LvQvtGB0Yw=","2LLZhA==","6rCA64qU","YW7DqQ==","INC90LDQstGW0YLRjA==","5LiT5Lia","IOqyveq4sA==","IHDFmWV2","0LXRgtC4","IO2UjA==","0L3Rj9GC","4KWB4KS2","0LvRjtC0","0LLQuNGJ","5bC+","55qE5LqL","IOuQnA==","2LHZiNmB","IOWlsw==","zrrOrg==","IFR1eQ==","IOqyg+ydhA==","IGJ1bnU=","INGA0LDQt9C70LjRhw==","IETDvG4=","44Kt44Oj","0YDRg9GB","INC80Lw=","bG92ZW4=","IG90ZXY=","bm9sb2o=","RVPEsA==","w7xw","IOiC","zrnOus+Mz4I=","2LbYp9ih","INC/0LXRhw==","xZnDrWtsYWQ=","44GT44KN","xaF0w60=","INio2LHarw==","44GM44GC44KL","0ZbRgdGC","4KWJ4KSV","z4DOtw==","INin2YTZhdiz2Ko=","INC30LDQuQ==","IGNoxrDGoW5n","0L7RgtGD","INCh0LDQvA==","xaFldA==","IOyeiOyXiA==","INmB2KfYsQ==","0ZbQvtC9","44OX44Ot","IG5oaeG7h3Q=","aW5pemk=","IGNvxb4=","IOCkhuCkqA==","IHN5c3TDqW0=","2LHZiNi5","YXlldA==","INmB2LHZh9mG2q8=","IOi2","6IG3","6KeC55yL","0L3QvtC6","4LiQ4Liy4LiZ","6rWQ7Jyh","a2xh","44KB44Gm","zpXOmQ==","5Z2X","IHNrdXRlxI0=","4KWC4KSc","44GR44Gm","TkdD","IOWA","INGA0L7Qt9C/","bsOta8Wv","44Oz44K5","INCS0LXRgA==","IHnDvHpkZQ==","IOuvuOq1rQ==","INmF2Yk=","0LTQtdGA","0LDQstCw","IG1lcmtleg==","jW5n","IOyCvA==","INGA0L7QsdC+0YLQuA==","INC90YzQvtCz0L4=","INC10LrQvtC90L7QvA==","INGH0LXQu9C+0LLQtdC60LA=","IOC4nuC4o+C4sA==","44OS","44Gj44Gm44GE","5LyX","INC/0YDQvtC00YPQutGC","IHlhbsSx","4KWA4KS14KSo","IGPhuq1w","IEF2cnVwYQ==","4KS+4KSt","IOyghOyaqQ==","5pWj","IOychO2VnA==","0YXQvtC00LjRgtGM","IHPEsW7EsXI=","w7xjcmV0","c3V6","5qiC","IOywvQ==","z4HOr86/z4U=","5Yia","2K7ZhA==","66CH6rKM","2KzYrw==","IM68zrHPgg==","4bqtbQ==","a2FyYQ==","44Kr44O8","IGt0ZXJvdQ==","7Juo","0YTQuNGG0Lg=","b8SfcmFm","INC90LDQv9GA0Lg=","44GR44Gp","IOma","2KrYqNin2YQ=","65+9","7JSo","7YyM7J28","z4fOsQ==","IHV6YWs=","IGTDsm5n","INCz0L7Qu9C+0YE=","z4PPhM6u","zrnOuw==","2LfZgQ==","IOq3uOuFgA==","44K/44Kk","2KfZhtqv","aW5vdQ==","0LvQvtC9","4LmH4Lih","IOCkrOCkpg==","IGtvbnVzdW5kYQ==","IG7Dom5n","44G+44Gb44KT","0YPRjtGC0YzRgdGP","5Z+5","0LXQvdC60L4=","7KCR","INGC0L7Qsg==","IHTFmWViYQ==","2LLYp9mG","aXN5b24=","INCz0LXQvQ==","IFBva3Vk","4oCM2KfZhtiv","INCz0YDRg9C0","INiu2LHbjNiv","zrvOu86x","IHDFmcOtbQ==","IOazlQ==","INiy2YbYr9qv24w=","4bqhcA==","IO2KuA==","IMSR4buZYw==","IOq3uOumrOqzoA==","0L3QuNC3","INmK2YI=","bGHFn3TEsXI=","INC/0YDQsNCy0L4=","0YPRgdC6","5bC9","IOCkquCkoQ==","6ZOB","IOy3qA==","INin2YTYqNmK","wrg=","4Li04Lih4Lie","IHN2xJs=","INCx0LDQuw==","IG3DtG4=","IEThu68=","INi02K/Zhg==","INmB2YQ=","IHZ6bmlr","IGNo4bup","INGB0YLRgNGD0LrRgtGD","57ij","IEhvYQ==","7YyA","INGA0ZbRiA==","INCy0L7Qt9C00YM=","0L7Qu9GM0Yg=","zr/Phc68zrU=","4Li54LiZ","INC/0YDQuNC0","aWxtZWs=","INin2YTZgtix","jJM=","IHXDpw==","5aiY","ZWNla3Rpcg==","IO2FjA==","IM61z4U=","IGjDsmE=","z4HPhQ==","4Li24LiB4Lip4Liy","INGC0LXRhdC90L7Qu9C+0LM=","w7pp","IGJpbGdpbGVy","INmC2KfZhA==","ZWRs","em7DoW0=","w6FseQ==","5bqU6K+l","0LDQu9GM0L3QuNC5","0LDRgtC10LvRjw==","4LiZ4Lin4LiZ","INCf0L7Quw==","4Lie4LiZ","56S8","IHRhc2Fy","INGC0L7QuQ==","INC80LXRgdGP","INC40YHQug==","IOCkquCkpg==","zrPOrg==","2KfYrtiq2Yc=","6L+Z6YeM","IGNo4buJbmg=","INmC2LPZhQ==","2Y7Zhw==","ZXJsaQ==","5Zu96ZmF","aWxpeW9y","INi02YfYsdiz2KrYp9mG","IHZlbGs=","5Zu6","INCx0ZbQu9GM0Yg=","44O844OX","5p+Q","7Kec","IMSMUg==","INC00LXQug==","2LHYqNuM","0L7QstC40Yc=","IGthcHNhbQ==","INmE2KM=","INCw0L3RgtC4","IMO8Y3JldA==","6rKs","0L7RgNC+0LY=","24zZhduM","6KmV","IOunng==","INGA0Y/QtA==","INmH2YXYsdin2Yc=","w6Jy","2KfYqNiq","INC40YHQv9C+0LvRjNC30L7QstCw0YLRjA==","0LrRgQ==","4omh","IG9sYXk=","6I2v","IG9wcmF2","INiv2LHYqNin2LHZhw==","IOS4reWbvQ==","0LjQu9GB0Y8=","5Y2r","INin2YTYp9iz2Ko=","2YjbjNuM","0YDQtdGI","INmG2LM=","44CC5Zyo","INmE2K0=","IGtvcnVu","INmB2LHYrw==","INC+0LHQvtGA","0LXRiNGM","IHBvZG3DrW4=","IOusuOygnA==","IGRlxJ9lcmxlbmRpcg==","5LiN5ZCM","5ray","4KS+4KS54KSw","7ZqN","4KWN4KSg","0LjRgtC40YHRjw==","2KfZhNi5","IGR2xJs=","INC/0LXRgNC10Lo=","IOWFgw==","IGFyYXM=","IGFsdMSxbmRh","INCy0LfQsA==","5pKD","IG1pbHlvbg==","IOWtpg==","INCy0LDRgNC4","INin2YTYudin2YTZhQ==","J9GP","2YjbjNiz","INC80L7QttGD0YLRjA==","44GR44Gf","7J207JeI64uk","zr/Pjc69","IOmf","IHBvc3R1cA==","w7x5w7xr","5YiK","INmC2Kg=","INin2LXZhNuM","2YjZiQ==","IHJlcHVibGlr","INCZ","gW0=","INCx0LXQuw==","4KS+LQ==","0YHQutC+0LU=","IGN14buRaQ==","6LK3","4Li14Lii4Lin","6YeN6KaB","4Li54Lih","INGA0L7Qt9Cy0LjRgtC60YM=","IOuwsQ==","5YO5","IOWJjQ==","4LmE4LiL","44CM4oCm4oCm","4KWM4KSk","2qnYsdiv","IHphxZnDrXplbsOt","4Liq4Liy4Lij","IGxldGVjaA==","bGVtZWs=","5Lq644Gu","IGTGsOG7oW5n","2KrZgg==","IOWT","5YW7","IOuPhQ==","IOujqA==","2LDZhNmD","IOydvOuzuA==","IEF5csSxY2E=","INm+2pg=","aXNpbmlu","IOyLtg==","2q/bjNix24w=","2K7Ytdi1","s+e0sA==","INC80LDRgtC10YDQuNCw0Ls=","a292w6k=","66eJ","44GV44Gb","INGC0LDQutC+0Lk=","IHRy4bqtbg==","INC70LjRhg==","IOWbmw==","0YfRgw==","IOawtA==","IGRvbGF5","5b25","0YDQuNCy0LA=","INCz0YDRg9C/0L8=","IG3DvG1rw7xu","0LvQtdC90LA=","652864qU","5Yip55So","IHJhaGF0","77yP77yP77yP77yP","5oGp","IO2VrQ==","IO2S","IOyKuQ==","IGNow6Ju","IOOCqA==","INC20LjQt9C90Lg=","55aR","44CC5LuW","66as7Iqk","0YfQuNGF","IOmmlg==","xJty","INC50L7QvNGD","IHRo4bqtdA==","IOyVng==","Y2lo","2LPZhNin2YU=","IHNpeWFz","IO2WiA==","INC60L7RiA==","z4POsc69","2YrYp9mG","IGTDtg==","4KS+4KS54KSk","0L7RgNC+0LQ=","0L7QstCw0Y8=","0YbQuNC+0L3QsNC70Yw=","2KfYptmH","IOCkluCksA==","IMSR4budaQ==","5LiN5Lya","2YPYsg==","4Li14LiE4Lin4Liy4Lih","bMSxeW9y","4KWL4KSm","IOy2qQ==","IGPhu5E=","4LmC4LiV","IM61z4DOrw==","INC/0YDRj9C8","5rOw","2KfZhNip","asOtbQ==","INCx0Lg=","xaFlbQ==","IEjhu5lp","4LiE4Lij4LiH","IGh1eeG7h24=","56+A","bGnFoQ==","INis2YfYqg==","56eL","INGG0LXQuw==","INC70ZbRgg==","IOa3","0LbRgw==","44GI44Gf","67SJ","IOuouA==","5aC05ZCI","6Z2p","44Oq44Oz","0LXQs9C00LA=","IGJlbmlt","55uf","44Gu5Lit","5Z2Q","IMOcbml2ZXJzaXRlc2k=","IGtvxZ8=","INC/0L7Qtg==","aeG7h3A=","IHDFmWlq","656o","INin2YTYo9iz","w6FybsOt","aeG6v20=","IOiK","IM60zrU=","5aix5LmQ","IMawdQ==","IOeEoQ==","INCz0YDQuA==","INC/0L7RjdGC0L7QvNGD","IMSRw7NuZw==","2KzYp9mG","IG5naGnDqm4=","INin2YTYp9mG","0YjQtdC5","4LmB4Lij4LiB","INqG2YfYp9ix","0Y7RidC40Lk=","z4zPgQ==","INix2YU=","7LKg","INiv2LPYqtqv2KfZhw==","INiv24zYrw==","44OD44Kv44K5","4KS+4KSu4KSo","IFRow6BuaA==","IHRo4bqpbQ==","IGPDoG5n","IGTDtm7DvMWf","INC/0YDQuNCz0L7RgtC+0LI=","IGtpxZ9p","2K3Yqg==","IOuylQ==","6aOb","IGl0aWJhcg==","INCz0LvQsNCy","IG9ydGFt","IG1hZGQ=","INC+0YHRgtCw0LI=","INmB2YjYqtio2KfZhA==","IGFubGHFnw==","bGV5ZW4=","57SA","IOmj","L2xv","2YXZiNmE","INC00YPRhQ==","INmE2Kg=","0LvQtdCz","IGfDtm5kZXI=","2YrYtw==","IOC4quC4sw==","IHbDoXM=","INCf0LXRgg==","0LDQu9C+0YHRjw==","7L+g","6Zm9","5Zau","6Iie","0L3Rg9C7","xJ9pbmU=","IGdoaQ==","IOe1","2YrZhtmK","xb0=","IGjDvGvDvG0=","IETEscWf","IM6tz4fOtc65","INGB0LrQsA==","INGC0LjQvA==","INC/0L7RgdGC0LDQsg==","4LiZ4Liy4LiU","ZMO8bA==","IGR2YQ==","IOC4hOC4mQ==","IGNo4buLdQ==","IOiP","4LmB4Liq4LiU4LiH","5rCj","IO2IrA==","INGH0LjQvQ==","44Gr44GK","0LXQvdC90L7RgdGC0Lg=","0JDQnQ==","IGhlbWVu","IGFpdA==","IOCkig==","5omn","IEFCRA==","IM66zrHOuA==","5rSb","44Ki44Or","4LmJ4Liy4LiX","xZlleg==","ZMSbamk=","IHThu4tjaA==","0LXQvdC90Y/QvA==","INCy0YHRgtCw0L3QvtCy","INin2YTYqNix","2YjZhdiq2LE=","a8OhY2g=","5bqK","0LvRg9C2","INiq2K8=","5Li9","2LHYrg==","4KSC4KSW","6Ieq5bex55qE","5a6Y572R","LdGP","4LmH4LiU","6ISa","IOeV","IGnDp2VyaXNpbmRl","IGJp4buDbg==","IOC4geC4pQ==","IHlhxJ8=","IOa0","INCx0YDQsA==","2LnYp9ix","5oiw","4KWACg==","IGzDqcSN","YWxhcsSxbg==","IM6W","0LDRgNGP","44Gd44KT44Gq","xYh1amU=","44CAIA==","IHNhxJ9sxLFr","INC00L7RgdC70ZbQtA==","w63FoQ==","4KWN4KSw4KS2","4KWJ4KSo","IGdp4bqj","2KjZiNin2LPYt9ip","5a6B","IHNvdWQ=","INC60YLQvg==","ZXNlbA==","INC/0LDQvA==","IMKgIA==","IMSNbG92","5re3","4Lir4LiN","IE9zbWFu","5qaC","IOWL","77yM5YW2","IOC4hOC4ow==","IG3hu4Ft","INGB0L7RgA==","54ax","IHRodcOq","2LHYrA==","4LmC4Lil4LiB","IO2VmOqzoA==","2YrYr9ip","IGHFn2HEn8Sx","IGvhu4M=","4LiV4Liz","zrvOtc65","55qE6K+d","5rGg","INGB0YLQtdC9","IGluY2Vs","5bqt","0YLQvtGH","IHByb2Jsw6lt","z4TPgw==","4LmJ4Lit4LiZ","67O064uk","IOCkhuCklw==","zr3Osc+C","44GE44KL","IGThu6Vj","IHRvaG90bw==","65CY7JeI64uk","VEo=","INCy0LjQt9C90LDRhw==","IEJ1bnVu","4KSC4KSs4KSw","INmH2YXahtmG24zZhg==","INCx0Y7QtNC2","0YPRgNCz","5Lqu","IM68zrXOsw==","IHRvcGx1bQ==","44Gj44E=","0L7RgtC+","Onw=","6Z2e5bi4","4Li04LiX4LiY","6YGV","4oCM2b7Yr9uM","INC30YDQvtCx","4LmM4LiU","INC00L7Qu9C20LXQvQ==","IG3Em3N0YQ==","24zYtNmH","dmF0ZWw=","IHByb3Zveg==","IGluYW4=","4KSC4KSq","IHBhcsOn","0YDQsNGB0YI=","w7xtw7w=","IGdp4buRbmc=","5qyi","2KvZitix","IEJha2Fu","IOKIqA==","INio2KfZhg==","27HbuA==","44KC44GG","bGFuZMSx","IHllbmlkZW4=","0YbQtdC90YI=","INC00LXRj9GC0LXQu9GM","0Kk=","IHJvdg==","5a6M5YWo","IEvhu7M=","c2x1","IGzhuqV5","6aSQ","INGH0L7Qu9C+0LI=","5Lyd","IGJhxZ92","5bCI","6rOh","44CB44Gd44KM","IFDFmcOt","0LTQtdC8","INC/0YDQvtC10Lo=","4Lij4LiW","5bu66K6+","INC80L7QttC70LjQsg==","5q66","44Gh44KD44KT","5pWR","IMSNdHk=","6aaG","0L7RgNGD","IOaE","IGvDrWNo","zrvOv8+F","44GE44Gk","IGPEg24=","4bq1","IGVsZGU=","6bq7","xJ9l","IGRvYsSb","4KS+4KSv4KSw","IOODjw==","0L3QtdC9","IG3Fr8W+ZXRl","INC90LDRgdGC0YPQvw==","7Iuc6rCE","INGB0LjQvNC/0YLQvtC8","IM+Dz40=","INiz2YQ=","zrXOug==","4Lij4LiT","w6F0ZQ==","ZWtsZXI=","INCy0YDQtdC80LXQvdC4","4oCM2YfYp9uM24w=","44GK44KK","0LbQuA==","0YvQstCw0LXRgtGB0Y8=","2YXYp9mG24w=","4LiV4Lil","INi12K8=","INCy0L7Quw==","7IqI","INmD2YXYpw==","IG5o4bqxbQ==","6IGv","b3ZhY8Ot","IOunjOuTpA==","2YjZvg==","IOu4jA==","2KjZitip","dXlsYQ==","0LvQtdC90L4=","6Iy2","0YDQtdC5","IGtsaQ==","IMO8emVyaW5kZW4=","0L3QtdGC","cmHEjQ==","INC/0YDQsNGG0Y4=","IGVkaXlvcg==","44GP44Gg","IMSNYXN0","aXlp","6YqA","IGTDuQ==","2Y7YqA==","2YjZitip","5ao=","IHPEsW7EsWY=","INiz2KfYudiq","IOC4o+C4suC4og==","INC30LDRj9Cy","IGfhurdw","4Lit4Lin","INir2YU=","IFrDoQ==","INCy0ZbQtNC6","aXppaw==","IG3Ds24=","INC/0L7QstGL0Yg=","IOC4muC4suC4lw==","INGB0LjQuw==","5oOF5aCx","wqB0","INCc0L7RgdC6","IOqyg+ydtOuLpA==","IOeQ","INmF2K/bjNix24zYqg==","0L7QstC+0Zc=","zqTOvw==","57qq","0L3RltGI0LU=","INCb0Y4=","zrfPg863","INmG2LPYqNiq","bXV6","4Lij4Lin","44CB44GC","INCx0L7Qu9C10Lc=","IHRyw6FjaA==","44Om","4LmA4LiC4Liy","IOq3uOuKlA==","2KjYsduM","5qCq","65287J20","IO2MqA==","7Yq5","nLQ=","4KS/4KSh","0YDQvtC80LU=","6K6y","INGC0L7QvQ==","0YHRlg==","IOeu","5Y+W44KK","7LCw","INmI2YTbjA==","INiz2LfYrQ==","6I+c","0L3QsNC80Lg=","VMO8cms=","5Y6C","IGZpbmFu","44Gr44Gq44KL","IG9ieQ==","VHJvbmc=","IHZ5cA==","4KWB4KSh","7J6Q6rCA","IOaJgA==","0JfQsA==","dW1sdQ==","65Od","INC80LXQvdGW","0L7Qu9C90LjRgtC10LvRjA==","IMO6xI1pbg==","IGJ1bnVu","INCg0L7RgdGB0LjQuA==","0LLRgdGP","INC90ZbQtg==","4Li04LiU4LiV","2LrYqQ==","xJo=","INiz2YU=","INCY0Lc=","4KWH4KSq","5aSn55qE","7Lmc","INC40YHRgg==","INC60L7QvdGB0YLRgNGD0Lo=","27Hbsg==","w6Js","INGI0LjRgA==","77yg","IGFydMSxaw==","5p+T","5Lmh","w610ZQ==","IE5o4bqtdA==","IM6Uzrc=","IMO2bMOn","6rW0","0L7Rj9C9","65Ox66Gd","IG5nw6Ju","INCx0YPQtNGM","zp/OoQ==","7LQ=","2YXZiNiv","zr3Ov869","zpXOnQ==","55Ge","IMWZZWs=","LeKAkA==","IE1lcms=","INC+0L/RgNC10LTQtdC7","z4HOuc69","0LvQsNCx","64Sk7JqU","INCx0LvQuNC3","IHBo4buRaQ==","INC00L7Qu9C20L3Riw==","INGN0LrRgdC/","4Lia4LiX","4Lib4Lij4Liw4Liq","INm+2pjZiNmH","IO2VnOuLpA==","z4TOv8+N","2YfZhg==","INC00L7QtA==","IGthecSx","n4E=","0YHQuNGP","4KSC4KSk4KSw","IHBvZG5paw==","ZXZp","24zbjNix","0KLQsNC6","0LrQvtC/","0L3QsNGF","2KfYs9mH","4LiT4LiR","IGtow6E=","IHlhcmF0","INin24zZhtqp2Yc=","2LfYqNmK","IHPEsXI=","INii2YXYsduM2qnYpw==","IOCkrOCksg==","a2HDpw==","IOWPrw==","IOWFtg==","LioqKg==","0LvRltC90L3Rjw==","5Lmx","b3E=","5qY=","44K8","IGbEsXI=","IGvDqg==","IOygnOqztQ==","IM+Dzrc=","0LDQvdGL","0L3QvtCy0LA=","4LiK4Liy4Lii","INi32YjZhA==","4KWI4KSv","IOy5nA==","7IK0","INC/0ZbQsg==","IGx14bqtbg==","IOCkieCkrg==","5bqD","4LmH4Lit4LiV","INiz2KfbjNiq","0LvRj9C9","IO2VhOyalA==","IGfDtnLDvGw=","INGC0LXRgNC40YLQvtGA","INmG2K0=","0LXQvNCw","IG1ub2g=","IOOBrw==","2LrZitix","INGB0LTQtdC70LDRgtGM","54G1","INCg0LDQtw==","INCz0LXRgA==","zrPOvM6x","7ZWY66m0","IGRlxJ9pxZ90aXI=","44Oz44OG","5biC5Zy6","5Liq5Lq6","7IOI","7Lmo","6Im6","2YLYqg==","INqv2LHZgdiq2Yc=","IOeOiw==","INin2YTYsNmH","zrvPhQ==","4KSc4KSw","INCy0L3QuNC8","66at","4Li04LiX","INi02KfZhw==","5oqV6LWE","5p2Q5paZ","INmG2YE=","6Kqs","5oqX","INCw0LE=","aXlldGk=","576F","0YDRltC3","IOC4quC4oQ==","aWPDrQ==","0LrRg9Cy0LDQvdC90Y8=","IOyVvA==","IOi9","4oCr","IM60zrnOrA==","INC00LXQvw==","44O844K/","IG9iamV2","bcOpbmE=","IGJlbGc=","IOal","IG7hu4Fu","INCz0L7Quw==","IHBvc3Rhdg==","INiq2qk=","0Ks=","INC/0ZbQtNGC","INC+0YLQvdC+0Yg=","INC/0YDQuNCy","IOWfug==","INC90LDQu9C4","xa/Fvg==","IHlhdA==","xZ9h","z4TOrs+C","0YbQtdC8","5qyh5pWw","IGLDoA==","2YjZgw==","IO2UhOuhnA==","IFBow6Fw","IOq1sA==","6LOe","IG9jaHJhbg==","IGdlcmVraXI=","IO2a","4Lia4Lil","w6FtZQ==","INio24zYsQ==","4LiC4Liy4Lii","0L7QstCw0L3QuNC5","IG1vxb5uw6k=","4pSB4pSB4pSB4pSB4pSB4pSB4pSB4pSB","w6FsdQ==","0L3Rgg==","puaDhQ==","4LmB4Lij4Lih","INGE0ZbQvQ==","IMSww6c=","4LmI4Lit4Lii","6rKo","IGhlZGVm","INin2YTZhdi0","4LmJ4Liy4Lih","5a+E","IOuLtQ==","IMO0","0LvQsNGB0Y8=","xLBU","4LiU4Liz","IGhlcmhhbmdp","IGdlcmVrZW4=","0LXRgNC10LY=","2YjYqQ==","IHDFmWVzdA==","56eR5a2m","0L7RgdGC0LDRgg==","w7xuZGVu","5YyF5ous","INiv2YfYrw==","0YjQuNGB0Yw=","0L3QtdGA","0ZbQtNC+0Lw=","IGJpw6c=","7Iut","IGhvZG5vdA==","IHplbcSb","INin24zYrNin2K8=","IHlpbmU=","4KS/4KSj","INin2YTYqNmE","IE7Emw==","IHBvbG/Fvg==","6ZiF6K+7","5biB","5byf","zr7OtQ==","IE3hu5l0","56M=","27Hbs9u5","INii2LI=","44Ge","INC80LXRhQ==","4Lii4Lih","IOao","IG90dXI=","IGThuqd1","IOuLpOyatA==","54yr","IEPDsw==","IGxpZMOt","IGFya2FkYcWf","IM6xzrvOu86s","6aG7","INmH2YXbjNmG","6Lui","IOKXiw==","64+E66Gd","woM=","4oCM2LTYr9mH","INit2YrYqw==","IG5ow7Nt","z4PPhw==","INGC0YDQsNC90YHQvw==","IHRhbsSxbQ==","57SN","IGJhaGlz","5Li+","INC40L3RhNC+0YDQvNCw","INGB0LvQvtC2","IGtyYWo=","INit2YQ=","IOODlg==","INmG2YLZhA==","INCg0L7Qtw==","IM6Rz4U=","bGFyZMSx","INm+2KfYsw==","IOyLnQ==","IOyghOyaqeuptOyggQ==","INin2YTYs9mK","2KjYp9i02K8=","4Lio4Liy4Liq4LiV4Lij","IGvDtnk=","IHJvaw==","IOyjvQ==","INGB0L7Qsw==","IGNow7o=","6Ziq","IMSNw6FzdGk=","INC30LLQtdGA","INC90LjQtw==","IMO2xJ9yZXQ=","IOODjg==","0L/QtQ==","55Kw","IOiq","2YjZhNmH","xLBN","L1JFQw==","5aGe","INCS0Lg=","L2xvb3Nl","INC/0L7RhQ==","IGdlbmnFnw==","IHRoaeG7h24=","dGnEn2k=","0YfQuNC1","0L7QvdC0","INC/0YDQuNGB","w6F6a3k=","IERldmxldA==","56aB","INCw0LM=","aWxlcmU=","0LjQvdC60YM=","IHZhcmTEsQ==","44CA44CA44CAIOOAgA==","IOuGkg==","4KSC4KSq4KSo","IMO2emVsbGlr","6Zqc","7Ja07ISc","2LHZitmD","2YjYqNuM","44Oz44OA","7Yyo","IOCkuOCkruCknQ==","776G776G776G776G","INmB2YY=","4KWd","IHV2ZWRlbg==","0YjQuNC80Lg=","IOC5gOC4pQ==","IOusuOydmA==","INit2LHZgQ==","INi52Kg=","44Os44OT","IOatow==","IOuYkOuKlA==","INqp2YbZhtiv2Yc=","IM6xz4XPhM+M","IOq4uA==","IGlmYWRl","IHlhcG1haw==","44OV44Kp","IG3hurk=","IHN0csOhbg==","IHN2b3U=","IHbFvmR5","IHRla3Jhcg==","4Li04LiN","IOyTsA==","b8SfdQ==","INqp24zZhA==","0LjQstGB0Y8=","IOunkO2WiOuLpA==","5Lid","4KSP4KS4","INGB0YLRgNCw0YU=","IHNvdcSNYXM=","IOq3uOufsA==","IG3DvMWf","zrvOv8+N","zrPPiQ==","IHTGsOG7n25n","IOW3pQ==","INin2LPZhQ==","0YDRltC8","4LmA4Lib4Lil","IMKgwqAgwqDCoA==","2YfYp9uM24w=","5a+6","INiz2LHbjA==","INC60LLQsNGA","INi02YXYp9ix2Yc=","INi12K0=","0L7RgdGC0LDQsg==","4KWo","IOC4hOC4p+C4suC4oQ==","7YOB","6YCC","2KjYrQ==","IGRlxJ9pxZ9paw==","6Yyy","0LXQtNC4","IG9rb2w=","INGB0L7Qvw==","IG9sbWF5YW4=","562R","27HbtA==","IGluY2x1","IOqyjOyehA==","24zYs9iq2YU=","IOep","INin2YTZiNmE2KfZitin2Ko=","aWxtZWt0ZWRpcg==","w4w=","2Y7YuQ==","IGHEn8Sxcg==","6KGb","IGVza2k=","6rCd","66C464uk","5Lq65ZGY","2pjbjA==","IOeo","INC80LXRgdGC0L4=","dsWv","4KWN4KSw4KS5","INi32LHYrQ==","INin2KjZhg==","IGhpc3M=","0L7RgNGP0LQ=","INiv2YE=","0YDQuNGB0YI=","4LiK4Lih","0LTQtdGC","4LmA4Lir4Lih","66eI7IKs7KeA","Oi46Ljo=","6YW4","IM6xz4HPhw==","IG7hu68=","INC/0L7RgdCw0LQ=","bHVt","7Lo=","44Gn44GN44KL","7Ja1","INin2YTZhdiv","0L3RltC8","2LHYp9mC","IOODiA==","IG9kcG92xJs=","IGJpcmJpcg==","IGjDo3k=","0L7QstC40Lk=","5q6L","6YO95piv","6L+q","IGFyYcOn","0LXQvdGC0ZbQsg==","5oqx","ZMOhbA==","IMSQw7RuZw==","IGhlc2Fw","INin2YbYs9in2YY=","INmK2YjZhQ==","INmG2YjYsQ==","5YmH","55eb","INmG2Yo=","0LDQu9GM0L3QsA==","2KrYqNin2Lc=","4KSy4KSs","IGtvbXVu","IHNuYWQ=","5Zuj","2LHZitiv","ZWxvcG1lbnQ=","INC40Y4=","4KWALg==","IGvEsXNh","IGRlxJ9pbGRpcg==","4LmJ4Liy4Lij","IHN2w6lobw==","IG9ibGFzdGk=","0YjQu9C4","4LmA4LiX4Lie","0YDQtdGC0Yw=","0L7QstC+","IO2CpA==","w6F0a3k=","INin2YTZgdix","6Jit","z4TOv869","INGB0YLQvtC40YI=","2YXYrQ==","IOC5hA==","INGC0LXQsdC1","7YG0","IG3Em2xh","5o6n5Yi2","IENo4bun","7Iqo","0JDQog==","2KfYrNi5","7JmV","56m/","0L7Qu9C10LU=","4Lir4Lil4Liy4Lii","IGR2b3U=","IOOAgOOAgOOAgOOAgOOAgOOAgA==","4Li44LiC","IGJveg==","4Li04LiZ4LiE","5aSf","IGZhYWxpeWV0","IMSNw61z","44G744Gp","IDov","0LrRltGB0YLRjA==","IOykgA==","z4HOsc+C","INC+0LTQvdC+","5qKF","0YPQsdC70Lg=","0L3QvtC3","4LmM4Lih","IHbDvXJvYg==","IM66z4U=","xZlldg==","wqBC","xa/FvmU=","5Lya56S+","zrnOsg==","0YDQvtCy0LDQvdC40Y8=","IGNldg==","7JuA","w6FsbsOtY2g=","INGA0LDQsg==","57Sn","5YCf","INGf","2YjZhtmK","0L7Qt9GP","INC30L7Qsg==","IGtvbGVt","66+86rWt","57+S","IHphbcSbc3Q=","IOygkQ==","INiy2YY=","INij2YE=","IOuouQ==","IHRvbXRv","IOyyqOu2gA==","c2FnZQ==","5LiN6L+H","0LXQs9C+0LQ=","0YDQvtC2","INC/0YDQvtGG0LXQtA==","4LmM4LiZ","c2FuxLF6","4oCe2Lc=","5rS75Yqo","0L7Rh9C60Lg=","67O06riw","5Z+65pys","LdGF","0LvQvtGB0Y8=","INmH24zahg==","7JeU","0YfQvdC+0LPQvg==","IOCkl+CksA==","IOCkheCklw==","44WL44WL44WL44WL","IOOCuA==","2KfYs9ip","5YqH","4LmJ4LiH","IOy7pA==","bsO9bWk=","44Os44K5","5YuS","INC+0LHQu9Cw0YHRgtGW","INC00ZbRj9C70YzQvdC+0YHRgtGW","44Os44Kk","z4fOsc69","4LmI4Liy4Liq","INCk0YDQsNC9","2YfZhA==","bGFyZMSxcg==","2K3Yp9iq","xa9zdA==","INCy0L7QtNGL","INiv2YjZhNiq","INGB0L/QtdGG0ZY=","IHRo4bqldA==","4Lit4Liy4Lir4Liy4Lij","6aCY","IHRlcmNpaA==","IM+Az4HOv8+D","IMWZw616ZW7DrQ==","6KeJ5b6X","IGRuZXM=","0LXRh9C90L4=","44OY","INiv2KfYsdin24w=","IMWfYXJ0","67Kk","IOu2gQ==","0LXRjw==","0L3Rj9GC0Yw=","IGt2xJt0","INiq2LrbjNuM2LE=","6b6N","INix2Ybarw==","77yM5Y+v","IHBpeWFz","IHV5Z3VsYW4=","2Y7YqQ==","2KjZitix","0LjQstCw0YLRjA==","IO2XiA==","5Li2","6L+Z5Lqb","INqv2LE=","572q","5LiA5qC3","IOODqg==","INCy0L7QuQ==","IHNvc3lhbA==","4Li44LiX4LiY","4Lir4Lih4LiU","57ud","INin2YTYrNmF","INir2KjYqg==","INis2Ybarw==","0LvQtdC90LjQuA==","0LLQsNGP","INCy0L7Rgg==","5Lyk","IOC4q+C4pQ==","INmF2YLYp9mE2Yc=","0LzRltC90ZY=","7Jis","0YfQuNC5","INmF2qk=","4LmC4Lib4Lij","a3J2","IMOtY2g=","z4nPg863","0LXQutGC0L7RgA==","0K/Qug==","IHDDrXM=","IMOWemVs","IHTGsOG7m25n","INCU0L4=","zrTOuc6/","4Li54LiU","IHTDvGs=","2LHbjNmC","LtCS","IOWQiA==","5L+C","IG9iZG9i","IGlzdGVkaQ==","0YjQu9Cw","5pyJ5LiA","INCy0LrQu9GO0YfQsA==","INiq2K3ZgtuM2YI=","INmI2YM=","IOiI","xpI=","zrzOtc+B","IOWB","IOyXhuuKlA==","wqBk","IELhuq9j","4LiB4Lil4Liy4LiH","INGH0YPQsg==","IGPhuqV1","IEjhu5M=","INmB2KfbjNmE","z4TOt86zzr/PgQ==","57GN","INio2Ko=","INC+0LHRgNCw0LfQvtC8","5rGJ","6ISR","IGdp4bqjbg==","zrXPgc6z","INCc0ZY=","6Jm954S2","IEtoaQ==","0YfQuNC90Lg=","IOCkheCkl+CksA==","7ZWY66mw","67KU","44GB","0LLQuNGF","INCy0YHQtdCz0LTQsA==","IOe2","0YHRgtCy0LXQvdC90L7QuQ==","IHnDvGtzZWw=","5ris","IHPEsXJhcw==","IM+Az4HPjg==","6ICz","2KfbjNix","2K/ZiNiv","IEFsbWFu","IHZlcmRp","INin2YTZhdis","INin2YTYqti5","2LXYqQ==","IHPEsXJh","xI1pbg==","INC/0LXRgNGI","5oqY","56mN","INGC0L7QsQ==","IO++iQ==","4Lis","5p2A","aXlkaQ==","4Li14Lie","55Om","INCw0LLRgtC+0LzQvtCx","5Lit5paH","4KWC4KSm","IGLEm2hlbQ==","IFDFmWVk","44GT44GG","4Lix4LiI","IO+9jA==","INmH2KfZig==","IHPhuqFjaA==","5pa56Z2i","55Ww","0YPRgNC9","IHbDvXNsZWQ=","IHRo4bqnbg==","77yM5omA5Lul","0YPQutCw","7ZWY64uk","IOCkrOCksA==","INC20ZbQvQ==","xI1uw61obw==","IOOBjA==","YWLEsQ==","dsOhbsOt","5rSX","INC40YHRgtC+0YA=","7J207YSw","INC10LvQtdC6","0LDQu9Cw0YHRjw==","IHpuw6Ft","INi32LHZgQ==","IHNla3TDtnI=","6rmA","2YjZgti5","INmF2YM=","0YDQtdC20LQ=","IGtuaWg=","INiq2LnYr9in2K8=","5Y2g","0YHRjNC60LU=","IOeUtQ==","5Lqs6YO9","INix2KfbjA==","Z8Sxbg==","INmG2LjYp9mF","IM6gzr/Ouw==","5LiA6Iis","IHN0w6FsZQ==","INC40YHRgdC70LXQtA==","IHpwcsOhdg==","INGH0LjRgdGC","44O844Oe","0J7RgQ==","0YHRjNC60L7QvNGD","IHDFmWlwcmF2","64yA7ZaJ","IGhhbGs=","54iG","44CB44GK","77yf4oCdCgo=","6YCP","56ue","0L3QuNGG0Yw=","55uY","4LmA4Lit4LiH","7J+B","4KWH4KS14KSy","5LmL5ZCO","44Or44OI","IHN0cnU=","IO+8vw==","zpXOmw==","aGxl","INmG2YjYtA==","7J21","INmF2YE=","5oiW6ICF","IMO2bGQ=","6YCU","44Oz44OX","7Zi8","IHXEnw==","IMSRw6E=","IHZsYXN0bsOt","INmF2KzZhNiz","5Y2U","z4TOuc66zq7Pgg==","IHBvdmlu","xa9s","INin2YTYrdmK","IHNtbG91","44OD44OB","INmD2YY=","IGNo4bqlcA==","6JCs","2KzYqA==","P+KAnA==","0LTQsNCy","4Lij4Lin4Lih","2Y7Yrw==","INin2YTYr9mI2YQ=","IOuEpOydtO2KuA==","IOCkhuCkuA==","2LjZitmB","44O844Op","44Gg44KN44GG","INmI2KfYrdiv","2LHZiNiz","IHrDoWtvbmE=","INC/0LXRgNC10LE=","4KWALQ==","4LmI4LmE4LiU","5Li65LqG","zpnOnQ==","IOyblOyEuA==","4Liq4Lit4LiH","IOaJiw==","INCS0YHQtQ==","4LmC4Lii","IGthbGTEsXI=","z4TOrc+C","IO+/ow==","IO2WiOuLpA==","44KB44Gf","IMSNZXI=","Y2VsYQ==","w7xzw7w=","6rOz","7JeQ64+E","2LLYqQ==","44Gq44KL","2YjbjNmG","54mb","IHZvag==","IOuKkA==","INmD2YU=","5rOJ","0LfRjw==","6KOd","INii2YQ=","IM6xzr3OrA==","wqDQkg==","IHlhcMSxbA==","5o+b","INGB0YPRidC10YHRgtCy","IG7hu5Fp","2YjYpg==","IOuEpOydtO2KuOyYqA==","IHBvbGl0aWs=","xaFrYQ==","ZWJpbGlyc2luaXo=","bGRrZg==","0YPQsdC70ZY=","IGVvcQ==","INmF2K3YtdmI2YQ=","a3J2bGRrZg==","IGVvcWtydmxka2Y=","z4POtc+Jzr0=","2KjZhNi6","jJPquIA=","INGB0YDQvtC6","IFV5","IE7Em2s=","INC00LjQsg==","44K144Kk","IOyCrOydtA==","IOmX","INCx0LDRgtGM","INC/0LXRgNGW","wpY=","5Lqk6YCa","0LXQvdC3","2YjYs9iq","4Li14Lii4Lia","IOC4iOC4sA==","66GA","w7xmdXM=","2ZHZkA==","57i9","4Lix4LiU4Liq","6rKA","INGC0LjRhQ==","INii2LLZhQ==","INin2LY=","7KG0","2ZLYqg==","5oi4","IOyeiOydhA==","IOeUtw==","0YnRlg==","0L7QvNCw","INin2YHYstin24zYtA==","IFRow7RuZw==","INin2KzYqtmF2KfYuduM","0LXQu9GO","INGF0L7RgNC+0YjQvg==","4Lig4Liy4Lip","IHLDoW0=","5b6h","44O844OE","IEzhu5tw","INi02Yo=","IGhp4buDbQ==","zrjOvQ==","zr/Phc+D","5b6p","IMO6emVt","4LmB4Lic","5beo","4LiI4LiZ","2q/Ysdin2YY=","INiq24zZhQ==","IGlsZXQ=","4Liy4LiC4Lit4LiH","INiq2YjYsQ==","INC00L7Qs9C+0LLQvtGA","IHRlbnRv","0LLRgw==","INC30LDQtNCw","IHN0b2xldMOt","wqAg","4oCM2KfZhA==","y5g=","xZ9pdg==","0L3Rj9GC0Lg=","44KJ44KM44Gf","IFNi","INin2YTZhdi1","INCj0LrRgNCw0ZfQvdGW","INi02qk=","aeG6v25n","0YzRgtC1","6LCi","INmF2KrZhg==","INGA0LDQtA==","INmF2YjYp9iv","7LGE","6aG2","IGJvxZ8=","2KrZiNix","IMSRw6FuZw==","IGtpdGFw","IGhvZGlu","IHRhcmloaQ==","44KE44KL","0YHRgtC10YA=","INGF0L7QtA==","0LLQsNC90LjQtQ==","INC+0YHQstGW","INGB0LjRgdGC0LXQvNGL","4KS84KSo","z4fOvw==","IOWPsA==","b8WZ","57uP5rWO","IOS9nA==","IHRodeG6rW4=","m4g=","IHlhbG7EsXo=","YWxldA==","7Kad6riI","INC30LDRiQ==","INC10LrRgdC/","4oSW4oSW","IOOAgCDjgIAg44CAIOOAgCDjgIAg44CA","INqv2YjYtA==","44Gr5YWl","IHVkxJts","IOG6","4KSG4KSI","4oCM2K/Zhw==","5oKq","IHRyw7I=","5pqX","zrvOu863zr0=","INC/0YDQuNC30L3QsA==","INiz24zYs9iq2YU=","IOCkheCkpA==","w6hv","6L+O","INC30YPQsQ==","INC30LDRgdC+0LE=","INiz2YE=","INmF2KfZhtmG2K8=","2K7YtA==","dmFqw60=","bml0xZk=","5q+S","5qSN","IGdpcmnFnw==","IMSRw6Fw","QG4=","0L7QstCw0YDQuA==","INiu2K/Ypw==","IHbEm3TFoQ==","IM6jz4U=","2YHYqQ==","0LDQvdC90Y/QvA==","INGH0LvQtdC9","5pSv5oyB","5aic","bGFyYXJhc8Sx","zqHOkQ==","IHppeQ==","IOq1kOycoQ==","IGjhu5Np","4Liy4LiE4Liy4Lij","aW1sZXJp","6LO8","INis2YfYp9mG","INGA0L7Qt9C80ZY=","0YXRltCy","zrPOtQ==","5qiq","zpnOkc6j","57at","IGJpcmF6","INGC0LDQutC+0LPQvg==","7YOE","INCx0YPQtNGD0YI=","INGI0LLQuNC0","INC90LXRgQ==","INmF2LnZhNmI2YXYp9iq","4KWH4KSv4KSw","INC00LLRg9GF","5b+F6KaB","5aeG","IHBvaGxlZA==","7Iqk7YSw","IOWNgQ==","INij2Kg=","0LLQtdGA0LTQtg==","IOCknOCkrg==","4KSy4KSk","5Zyw5Yy6","IHxb","INCy0LzQtdGB0YI=","INqp2KfZhQ==","IOODkA==","44O844OW","44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA","IOyDge2SiA==","4LmA4Lil4Lii","xI1uw6k=","INGB0YDQtdC00YHRgtCy0LA=","INGC0LDQsQ==","INmF2KfYsQ==","IGhsZWQ=","0LTQsNGC","2YjbjNiv","IOODqQ==","INiu2K8=","6KSH","56eY","INio2LHYrw==","IM+DzrHPgg==","z47Pg861zrnPgg==","5p2v","zrvPjQ==","5a6/","IOuCnA==","77uf","IMO2emVsbGlrbGU=","INC60L7QvdGB","INmF2Lo=","2LnZig==","4LmM4LiB","INmK2Ko=","INmF2LTYp9mH","IFRoYW5o","4KS+4KSc4KSo","paQ=","IHZsw6E=","INmB2LY=","zqTOmc6a","INC90LDRg9C60L7Qsg==","0LXQu9C10Lw=","IGTDoG5n","INCz0L7RgdC/0L7QtNCw0YA=","wqBT","0LjRh9C10YHQutC40YU=","INiq2YbZh9in","4KSc4KSo","INC/0LDQvQ==","5Yag","IOuCmOuKlA==","dMOt","5LiA6LW3","IGzDo25o","wqB2","b3bDvW0=","2LLYqA==","INis2YXYuduM2Ko=","IOa1tw==","INC+0YHRg9GJ0LXRgdGC0LI=","w6Np","2KfYptix","IOuzkQ==","4buLbmg=","IHPhu61h","4KWH4KSC4KWk","xJtqxaHDrQ==","INC00ZbRgg==","IOaD","bcSxxZ90xLE=","2LHYrQ==","IOyngOq4iA==","5aa7","4peL","IOyngOyXrQ==","2ZLZhg==","IHVyxI1pdA==","2ZLZhQ==","esOt","6JU=","INi02YjYsQ==","IEtow7RuZw==","24zYstuM","INC30LM=","INCy0L3QtQ==","IHByw6F2xJs=","6KuL","2KfZitiq","4Lix4LiB4Lij","IG9sZHVrw6dh","44KB44KL","IFTDonk=","65287J24","6JmV","IHPGsA==","INC90LjQug==","2aA=","2KfYtNuM2YY=","ZWxlcmRl","7Iuc7JWE","INGD0LzQvtCy","IMOnYWzEscWfYW4=","IOu4lA==","INGC0LDQutC40Lw=","0YDQuNC9","INiu2YQ=","YXlk","IOODoQ==","0LXQudGH0LDRgQ==","IGRvcHJhdg==","44GT44Go44Gv","IOy2lOyynA==","5bu2","IGvEsQ==","5Y+2","0YDQuNCz","7YWc","55Sz5Y2a","INCy0LXRgg==","INC/0L7QvNC+0YnRjNGO","INin2YHYsdin2K8=","z4DOtc65","4LmA4Liq4Lij","IGdpw6Ft","6Y4=","aGxhcw==","bWFuxLF6","0LDQvdCz0Ls=","IG11xb4=","wqBL","0YDQtdC00LjRgg==","6K6+5aSH","zrnPg868","IGPhuqNp","IOmAmg==","INmD2KfYsQ==","INC/0L7QtNC+0LE=","INC80LXRgtCw0Ls=","INGB0LDQvNC1","0LvRg9GH","5YKz","INmI2YfZiA==","IOmHjQ==","0LLQuNC5","5rOB","IOadjg==","IGlsacWfa2lu","IM61zq/Ph861","54qv","xZllam3Emw==","6K2Y","56ix","zrzOvM6x","INmE24w=","2YfYp9mK","INC+0L/QuNGB","2q/Ysdiv","INCz0YA=","IEFuaW1hbGlh","0J/Qvg==","IGLDs25n","INC00LXRgtC10Lk=","IGzDonU=","IOaVmQ==","INC/0L7Rj9GB","INin2YTYog==","4Lix4LiZ4LiV","INC00LXQsg==","INGG0LXQuQ==","0YzQsg==","5oOg","bWFsYXLEsQ==","aW1sZXI=","4KWI4KWkCgo=","INC90L7QvA==","enY=","IOC4geC4ow==","IHBheWxhxZ8=","wqBz","4KS/4KS44KSu","0YHRgtCy0LXQvdC90YvRhQ==","c3RvdXA=","0L7QvdGW","c3TDrQ==","INit2qk=","INqv2LHZgdiq","4Liy4LiE4Liy","0LTRjw==","2YTYp9ir","IHpkcmF2b3Q=","5LiK44GS","44G8","ZWxlcmU=","2LjZhQ==","INGB0LLQtdGC","0L7RgNCz","56ul","INC/0LXRgNC10L8=","IOCkruCkpg==","0LDQt9Cw","5aaC5L2V","0YHRjNC60ZbQuQ==","IGLGsOG7m2M=","IGdlcmVrbGk=","5aSn5a62","IHRyw6Fp","6YGp","5Lit5aSu","IHBo4bqjbg==","INi52LHYtg==","INmD2KrYp9io","5oup","0YjQtdCz0L4=","5biu","INmG24zYp9iy","6L+3","4Li44Lib","4Li04Lib","INiv2K4=","z4TOuc66zq3Pgg==","IFV6","INiq2YjZhdin2YY=","INmI2KfZhNij","xZllcw==","0ZHQvA==","IOW4gg==","INGC0L7QttC1","IHlhcGFu","5b285aWz","INmF2K/YsQ==","toE=","IOaXtg==","4LmA4LiY","INmF2KfZhA==","IELDvHnDvGs=","INmE2Ko=","5bCa","ZGVtZQ==","w7xi","INGF0YPQtA==","IGzDqWth","55ub","55u05o6l","0L3QuNGG0YLQstCw","INC/0YDQuNGH0LjQvQ==","0LXRgNCw0L8=","INGB0L7Qt9C00LA=","5qKw","IG3DvHo=","57O75YiX","b3V6","IOCkk+CksA==","0YDRg9GH","IOG9","zrzOrc69zrE=","INC/0YDQtdC00LzQtdGC","IOWy","44Oz44OB","zrzOrc69zrc=","0LvRg9Cz","wqBu","IFRhcmlo","IOOAiA==","IGJhbmE=","IGPDrQ==","IHbDvWtvbg==","5Zug5q2k","IHTFmWk=","4Liy4LiL","dmFpbGFibGU=","IGlzdGVt","44Ol44O8","0JXQnQ==","INCz0LDRgA==","zr/Phc67","4KWb","INmI2LbYuQ==","4Liq4Liw","6Led","INit2YE=","4Li04LiX4Lii4Liy4Lil","5aW555qE","0L3RltGI","0LbQtdC90LjQtQ==","6riw7JeQ","IOmYvw==","INmF2KfYsdiz","IMOnZcWfaXRsaQ==","IMWfZWhpcg==","w6F0b3I=","4LmJ4LiX","7J2064qU","IOiy","6aGN","55mC","INC90LjRhw==","IOqwgOyngA==","5Lym","csOhbg==","b3N0YXQ=","INmE2YM=","6Lo=","IE5nw6BuaA==","IOCkuOCkpg==","5pyX","54S25ZCO","44K444Kn","0LvQtdGA","INCe0L3QsA==","2LPZiNmG","z4HOv869","INiv2LHbjNin2YHYqg==","4Lit4Lit4LiZ4LmE4Lil4LiZ","IGTDoWw=","INC80ZbRgdGG0LU=","INC00L3QtdC5","INin2YTYp9iq","IOCksOCkueCkpA==","77yM5a+5","6LOH5paZ","5Lu75L2V","6YQ=","dGFq","zrLOrA==","INC90LDQtNC+","INGB0YLRg9C0","IMWfZWg=","4Lix4LiN4LiN","4KWL4KSs","44Op44O8","27HbtQ==","ZXB0","IGJpbGRpcg==","4Liq4LiW4Liy4LiZ","0LXRgtGM0YHRjw==","c2vDvW0=","INC+0LHQu9Cw0YHRgtGM","IOyeoA==","IEfDtnI=","IGRheWFu","INuM2KfYrw==","55Sf5Lqn","7ZiR","5b6B","INin2KzYsQ==","INC/0YDQtQ==","5LiJ5LiJ5LiJ5LiJ","5Z+O5biC","INC/0YDQuNC80LXRgA==","xI3DoXN0","6IGY","INmF2LHYqNmI2Lc=","5p6a","5YiA","5p+l55yL","IOuqqOuRkA==","7J6Q66OM","Le+9pA==","IOqwmeydtA==","IOyhtA==","0LXQs9C+0YA=","ZWRpaw==","0LjQvNGD","IEFydGg=","5bqU55So","bWnFn3Rp","IGto4buPZQ==","INGW0LQ=","zrvOu863","w6Jo","0LzQsNCz","6ZqG","INCy0L3Rg9GC0YA=","INio2Lc=","KOaXpQ==","xLBZ","0LvQuNC6","IELhuqNu","INiq2YjYsw==","4KS84KSk","YW1haw==","5ZWP6aGM","INGB0LDQvNC+0YHRgg==","77y8Cg==","IOemjw==","2aE=","INGE0L7RgNC80Lg=","INGA0L7Qt9GD0Lw=","INmF2LfYp9mE","5Lmf5piv","576O5Zu9","65Oc66a964uI64uk","IGzEqW5o","INC/0L7RgtC+0LzRgw==","0Y/QsdGA0Y8=","5ryr","IG5nb+G6oWk=","4Lit4Liz","2YrZhtin","IG1sYWQ=","z4PPhM6s","2KfYqtix","7KO87J2Y","0LXQvdC90ZY=","0L7Qt9Cw","2YLYp9iq","INCS0LDRgQ==","6K6t","6ZA=","0YPRjtGH0Lg=","INqp2LE=","IC58","IGdlbsOn","6Kmy","5LuB","0L7QtNGL","INij2YjZhA==","IOyCrO2ajA==","IOC5gOC4qg==","IOuVjOusuOyXkA==","4oCM2Kg=","INC70LjRiNGM","INC40LzQtdC90L3Qvg==","bWFkxLE=","IOmC","INmI2KfYsdiv","IHRha8SxbQ==","IOC5gOC4qw==","IOC4reC4og==","IGtvbnVzdQ==","2K7ZiA==","INGB0LjQtA==","6LWk","0L7Rj9GC0LXQu9GM","64u1","zrXPiQ==","0ZbRhQ==","IOCkr+Ckpg==","INqp24zZgQ==","zrzOv8+C","IGFsZMSx","IO2ZjQ==","0LrRg9C/","INmG2YXYp9uM2LQ=","44Gl","IO2VqeuLiOuLpA==","IOuMk+q4gA==","0LHQvtGA0LA=","6YmE","IOC5gOC4iA==","4LmJ4LiB","wqfYtw==","2LHYqNmH","INGD0Lc=","INC80LDRjtGC0Yw=","IGJ5bGk=","4Li14LiV","IOyngOybkA==","6Ieq54S2","w7l5","IMOnYcSf","0LXQtNC40L0=","64m0","5Y2x","INC/0L7Qt9Cy0L7Qu9GP","2K3Yp9iv","INGH0LXQs9C+","4Li14Lii4Lij","IHnDtm50ZW0=","IGRlcnM=","INGB0YLQvtGP","INC60YDRg9C/","IPA=","INC00L7QvNCw0Yg=","0LXQvdC0","57un","IMSRw7Q=","IGNodMSb","6K6h5YiS","zq3OsQ==","IGRvYsWZZQ==","4Liq4Lit4Lia","0LXQu9C10L3QuNC1","IMSRw7RuZw==","44G+44KK","IGJveXVuY2E=","4KWB4KSX","INGE0LjQtw==","44Kz44Oz","IGRlbmV5","0YfQtdGB0LrQuNGF","zrvOv869","5Lul5Y+K","2KfZiNiq","wqDCoMKgwqDCoA==","IOykhA==","4KS/4KSr","INGC0L7Quw==","IOuCtOqwgA==","4paP","IHBow6E=","INGB0L/RltCy","INis2YXZiti5","IGJlenBlxI0=","IOaXoA==","IHbFoWU=","0YHRgtCy0YM=","ZHVzdA==","b8Wh","INiq2KfYsdmK2K4=","2KfYrdip","INmF2LTYp9ix2YPYqQ==","IM6xzro=","4Lix4LiZ4LiZ","6YGK","INGB0L7Rgg==","INC60LDQtw==","INGC0LXRh9C10L3QuNC1","6ri0","YWNha3TEsXI=","6rGw64KY","4Li14Lii4Lih","INGB0YPRhQ==","IOuEiOustA==","44GP44KL","INC60L7RgtC+0YDQvtC5","2KfZgtip","ecSxbA==","44K744OD44OI","INGN0LvQtdC8","5oGQ","2YbYp9ih","5YWp","IHRlxI8=","5Lil","IOyniOusuA==","IOS4ug==","7Iuc7ZeY","INC/0YDQvtC6","dWplbWU=","w7xjw7w=","INin2YTZhdi6","INit2LPYp9io","44GX44Gm44GE","0LrQvtCy0LA=","IMSRw6Bv","INC/0YDQuNC3","INmI2YXZhg==","INC+0YA=","4LiB4LiV","0LDRhA==","IOC4nuC4ow==","0YbQuNC10Lk=","5qo=","IHDFr3NvYg==","5a2p5a2Q","IGLDoW5o","INGE0L7RgNC80YM=","IOG7lQ==","INC80LXQvdC10LU=","4LmJ4Liy4Lir","0L3QuNGG0LA=","4Li1Cg==","INCy0L7Qu9C+0YE=","INin2LHYp9im2Yc=","56ys5LiJ","65CY7JeI","IGvEsXNt","44O844OK","bGVyaW1peg==","2YbZitmG","IE5nxrDhu51p","INC+0YLQtNC10Ls=","55qE5pe25YCZ","0L7QvdC+0LI=","xI1hbg==","aXpt","INGB0L7QsdC+0Lk=","4LmH4LiV","INGB0LvRltC0","IOCknOCkuQ==","77yM5oiR5Lus","44CC44Gd44Gu","z4DPic+C","54af","4Liv","64SQ","5pyL","IOu5hOuwgA==","642V","IG3DoG4=","7J206rOg","656c65Oc","6YKE","xLHFn8Sxaw==","IOS4qg==","IG7DoWQ=","0LHRgNCw","5oyH5a6a","bGFyxLF5bGE=","INCe0L3QuA==","IGhyYQ==","INGA0LXRhtC10L8=","INCg0L7RgdGB0LjQuQ==","5b2x5ZON","IEtkecW+","IMO2xJ9yZW5j","5Ym1","IGppc3Q=","6IiI","6Kem","5Y+R546w","4Lih4Liy4Lii","ZXJrZW4=","INC30LTQtdGB0Yw=","INmF2LPYpg==","QG5hdGU=","IOuCtOyaqQ==","IG5hYsOtZA==","24A=","INC80L7QvNC10L3Rgg==","44Gg44GM","zq/OtM6x","VGFr","IOuztOqzoA==","Ojo6Ojo6Ojo6","xJ9tZW4=","INC/0L7QvNC10Yk=","44Gr44Gk44GE44Gm","INmB2YjZgg==","INi52LbZiA==","INmF24zYp9mG","IG3DvGM=","INC/0YDQvtGP0LI=","0YfQtdGB0LrQuA==","44Gg44GL44KJ","6YKm","IOu2hOyEnQ==","6Z+p","jag=","IERhaGE=","IM66z4w=","INC90LDRh9C40L3QsA==","INCf0L7Rgg==","z4POus61z4U=","INGA0LDQvQ==","2YjZitiz","Ojo6Ojo6Ojo6Og==","27Hbudu5","IGFyZMSxbmRhbg==","4LmC4LiU","2KfYsdin2YY=","2K/Yp9iv","IHF1w70=","INij2YPYq9ix","4peG","INij2K7YsdmJ","IOuniOydjA==","66a0","INi52YTZiNmF","IGXEnw==","0LLQvtGA0Y4=","IOODlw==","0YPRh9Cw0YE=","INio2KM=","z4bOvw==","0L3QuNC60LDQvNC4","4LmD4LiV","xI1ldG7Emw==","4Lia4Liy4LiH","54mZ","44Oq44Kr","7ZI=","5Ye654mI","zrPOuQ==","44CC44Gd44KM","IHlhbmk=","bGVjaA==","IEx14bqtdA==","55qE44Gq","IG5lZGVuaXlsZQ==","ZGVq","INGB0L7QstC10YDRiA==","IHBo4buV","xLFzxLFuZGFu","IGNo4bqvYw==","ZGXFnw==","INC60L7QvNCw0L0=","5pu/","IHBsw6Fu","IGThu68=","IOq1reqwgA==","IHRha2lw","IHRo4buneQ==","0YHQu9GW0LQ=","4omn","IElJQw==","zrjPhQ==","w6F2YXQ=","INGB0L7Qug==","INCx0LDQs9Cw0YLQvg==","Ozo7Ojs6Ozo=","z4HOuc6/z4I=","aWxtacWfdGly","IHpuYW0=","IM6kzrE=","YW1heg==","4LmB4Lie","44OB44Oj","IGt1bGxhbsSx","5pS+6YCB","0LTQvQ==","INmI2KfYqA==","IHRy4bqvbmc=","0YHRj9Cz","INin2LHYqtio2KfYtw==","INCy0YXQvtC0","5bee5biC","IOCkuOCkpA==","0YfQsNC10YLRgdGP","7YyM7Yq4","IE5o4buvbmc=","5LiN5Y+v","5bGK","IOOCrQ==","2KfYsdmH2KfbjA==","IGFyxZ9pdg==","INin2YTZiQ==","4KS+4KSv4KSV","44GX44KH44GG","IHVsdXM=","YWxheHk=","6riw6rCA","446hKA==","zrzOrM+Ez4nOvQ==","w6hu","w7lp","INC90LDRgdGC0L7Rjw==","INCh0LI=","INC+0YHQvtCx0Lg=","0LrQvtCy0L4=","INGA0LXQsdC10L3QutCw","INGC0Y/Qtg==","IHh14buRbmc=","IOq2jA==","0L7Qs9C+0LQ=","IOG6pXk=","6LKg","4Lin4LiZ","IHN0YW5vdg==","IGtyw6Fs","IOCkh+CkuOCksg==","ZWJl","5a6+","INC00L7RgdGC0LDRgtC+0YfQvdC+","SUlJSw==","z4DOrA==","IGJpcmthw6c=","INin2YTZhdmC","44O2","IEJhxZ9rYW7EsQ==","IOyyqOu2gO2MjOydvA==","IHlhcmFy","5Lqh","IM+Az4w=","wqDRgQ==","zrTOrg==","ZWxlcmluaQ==","IHN1w6c=","INC00L7QvNCw","INC90LDRgNGD0Yg=","IM6v","IOq3uOydmA==","55S15b2x","2KfYqNmH","0LrQvtC80YM=","IOCkpOCkrA==","4KWI4KSg","IOuqqOynkQ==","IOaxnw==","IOqyg+ydgA==","zr/Ovc+EzrHOuQ==","INin2YTYsdmK2KfYtg==","6Kix","IGhhbGluZGU=","INin2LTYp9ix2Yc=","INC60YDRiw==","0LvQtdC90LjQuQ==","bHXEnw==","IGRvYnU=","c2lr","4KWB4KSf","INC60ZbQvQ==","44Go44GN","4KWC4KS4","5oWi","IGTEscWfxLFuZGE=","57eP","IGLDrQ==","IENMSUlJSw==","IElJQ0lJSQ==","IGhlcms=","44KP44Gb","IOOAgOOAgOOAgOOAgOOAgOOAgOOAgA==","wqDCoMKgwqDCoMKg","2KfZhNiv","IGRhdnJhbg==","xI1lcg==","INif","44GY44KD44Gq44GE","IGRhaXI=","IO6lpA==","4Lix4LiH4Liq","IOuLtA==","5b6e","INGN0YLQuNGF","6K+6","4bu3","0LXRgNC40YHRgtC4","0L7QstGL0YU=","IOODhw==","2LbZig==","IOCkieCkoA==","IG5hcMWZw61rbGFk","6LSd","IMWhaw==","INio2YjYr9mG2K8=","dsWvbGk=","6YGH","INC30L3QsNC5","IFRoYW0=","cmFuaQ==","2KfYrdiq","2LTZhw==","0LzRltC90ZbRgdGC0YDQsA==","4LmL","IM6Rzr3OsQ==","4KWL4KSa","57uE57uH","0YHRgtC40YI=","aW1saQ==","5ZCN54Sh44GX44GV44KT","2ZHYqQ==","zrjOvA==","0L7Qu9C+0YI=","4Lii4LiH","44KJ44KM44KL","INC70LjRhw==","0L7QstGL0LU=","6YCD","IOW5vw==","7Iqs","2YXbjNmG","IOyghOyytA==","IM6tz4c=","IOyxhQ==","IGhsYXM=","0LXQutGC0LjQsg==","IM+AzrvOtw==","bHXEn3U=","5aW955qE","INqG2YjZhg==","IEJlbGVk","IGVuZ2Vs","0L3Rj9GP","IHlhxZ9hbg==","0YfQvdC40YU=","2KfYsdmK2Kk=","4KSu4KSk","44OL44OL44OL44OL","5Yui","IOWGhQ==","IO2PrO2VqA==","INC+0LHRgQ==","IHRo4bqlcA==","IGTDonk=","44OW44Op","0LDRgtGL","INGB0LLQvtC10Lk=","44KJ44Gq44GE","5Y+R55Sf","ZXJlY2U=","IG9kYm9y","INCy0L3QtdGB","IMSQ4bqjbmc=","IOuPjOyVhA==","xJtsaQ==","xLFzxLFuZGE=","IOCkrOCkpuCksg==","dm7DrQ==","44Gu44Gr","INC/0L7RgtC+0Lw=","aW1kZQ==","YWxhbWE=","4oCq","IHN0ZWpuxJs=","0LXRgNC1","6ZKi","5py65p6E","IOiz","5ZSx","IOuFuOy2nA==","INC70LjQsdC+","4oCK","IGNleg==","cm9txJs=","zq/Pic69","z4bOrg==","IO2ZqQ==","IGRsb3Vo","6aqo","5YWs6YeM","5Ly4","IOODkQ==","5LuZ","IG9sbWFkxLE=","0LXQu9C40Yc=","0L7QttC00LXQvdC40Y8=","IHPDtnlsZWRp","w6F0ZWs=","7IO1","4Lii4Lin4LiB","IOmbuw==","INC/0LXQsg==","INC00YDRg9Cz0LjQtQ==","w6F0a3U=","INi52Yg=","b3bDoW5h","2LbYsQ==","IOuBnQ==","IO2Gte2VtA==","zpY=","IHZ1cg==","5Yay","INC/0YDQtdC6","IOCkquCklQ==","IOC5gOC4lw==","44Go44GL","2LnZhg==","5a6H","z4TOtg==","IG7hurFt","INGB0LLQvtCx","IM60z40=","55aX","LdC5","6aaZ5riv","2KrYpw==","z4POuc68zr8=","7ZWE","IOivpuaDhQ==","5Lih","2Y7Yp9mE","IFRyxrDhu51uZw==","ZW7DqWhv","INGA0LXQutC+0LzQtdC90LTRgw==","24zYsdmH","4Liy4LiW","INqp2KfZhdmE","2KjYtw==","2LLbjNmG2Yc=","INC00L7Qu9C20L3QsA==","IOunjuydgA==","4peP4peP4peP4peP4peP4peP4peP4peP","bGVwxaHDrQ==","0LDQu9C+0LM=","44Kq44Oz","IOuzhA==","xLFyxLE=","INis2KfZhdi52Yc=","5puc","b2rDrQ==","INGI0LvRj9GF","IGjEsXpsxLE=","INiu2LXZiNi1","0JDRgA==","5ZyY","INC20LjQstC+0YI=","6bE=","IG5n4buv","IHbDsm5n","6I6r","INC30LDRhdC+0LQ=","7JmE","INGB0LvQtdC00YPRjtGJ","6Ze7","0ZHRgA==","IGNodsOt","6IOc","44Gq44GX","IHRla25vbG9q","ZWptw6luYQ==","IOygiA==","7LOQ","5pmu6YCa","IHbDvXJv","IGF5csSx","INC/0YDQtdCy","IGfDs3A=","4LmC4LiB","4LiX4Liz4LmD4Lir","5Y+O","5ZiJ","IHRlbGV2","44Go44GT44KN","64+M","cGh5bA==","4Lij4Liy4Liw","IOeI","0YHRgtC40YLRgw==","77yM6L+Y","IM6RzrM=","xI1rdQ==","5o+0","4KS+4KSv4KSk","5o+P","44KC44GX","INC/0LXRgNC10YE=","IOyYge2ZlA==","aWRsYQ==","5Y6F","77yPOg==","2KrYsduM","4Lib4LiP","INC90LDRgdC10LvQtdC90L3Rjw==","IGFtYcOn","IGtkbw==","INC40LfQstC10YHRgg==","0YjQuNGA","7KOg","xaFpdA==","IHThu5Fj","7J6Q7J2Y","0YfQsNGC","5Y+D","6Zu2","5bC6","IGluZGly","INC90LDRhtGW0L7QvdCw0LvRjA==","IHhhbmg=","24zYr9uM","INC40L3RgtC10YDQtdGB","INii2LPbjA==","6YKj5Liq","IGJpbG0=","0LDQvdC1","IHTEm2NodG8=","0YfQuNC6","INC00L7RhdC+0LQ=","6IKh5Lu9","5YWz57O7","44Gr44Gq44Gj44Gf","INC/0YDQtdC00L/RgNC4","IGdlw6dlbg==","INio2YI=","IHbDvXpuYW0=","IOC5gOC4hOC4ow==","INGF0YLQvg==","2LTZig==","5Y+C5Yqg","0YHRgtCy0LXQvdC90L7Qs9C+","0YLRgNC+0L0=","woDCgMKAwoA=","5qKd","0LHQsNCy","27Hbtg==","6aG6","IGpheg==","INin2YTZhdmE","INin2KvYsQ==","INC/0YDQuNCy0L7QtA==","0LDQvdGD","4KWB4KSt","5pen","0YzQtQ==","4Liq4Lil","0LvRj9GO0YI=","4Lin4LiU","xrDhu5tp","2YrZhdip","44Kv44Ot","0LvQuNC5","zrPPgc6s","IHBlcmZvcm1hbg==","6K+J","5L2g55qE","7IWU","0L3QtdC90LjRjw==","4butaQ==","2YjYstuM","6Z+/","4KWI4KSm","IOuquA==","IGVzZXI=","INmB2LnYp9mE24zYqg==","0L3RltCy0LXRgA==","zrrPgc6x","6Ki8","IG5lbW9j","IHlhcmTEsW1jxLE=","IOeJuQ==","INC60L7Qvw==","INCc0L7Qtg==","4KS84KSV","IOuc","INGA0LXQsNC6","IHBvem9y","wqDQkA==","INmK2YM=","INGB0LDQtA==","IOWFqw==","INC/0L7Qu9GM0Lc=","IHJhxJ9tZW4=","dGVybsOt","c2l5b24=","0YHRj9GH","b3ZhbsO9","IOuMgO2VnOuvvOq1rQ==","INCy0ZbQtNCx","INCQ0L3QtA==","c3R2YQ==","6YyE","IOuR","4Li04LiE","asOtdA==","IGt1bGxhbsSxY8Sx","IOafpeeciw==","2YHZhA==","INCv0LrRidC+","55yL5Yiw","0YDQtdGF","INin2YTYudix2KjZitip","66Gc6re4656o","IOCkrOCknA==","INC/0YDQuNC/","IHNjaG9w","INio2KfZhNin","5a6F","INin2YTZhdmH","zrHOvc6x","4KWL4KS1","5YG0","5byA5Y+R","2YXYp9mE","IOCkp+CksA==","IGRhaGls","44CB44GT44Gu","4Lix4LiI4LiI","0YHQv9GW0LvRjA==","IOCkleCkqg==","INCy0LXRhw==","INCy0LjQtNCw","INmF2LnZhg==","INC+0YLQu9C4","aeG7hQ==","0LvQuNGI","INCf0L7RgdC70LU=","44GT44GT","IGvDvGx0w7xy","INis2LE=","IOa8","6Ie6","IG1ldmN1dA==","2b7bjA==","INin2YTYs9mE2KfZhQ==","0LjRgtC10LvQtdC5","INGA0L7RgdGC","IGVkaWw=","IOW3sg==","57K+5ZOB","5LuF","4oCZeWU=","4KWI4KSCLg==","IOWGhg==","64iE","IOyZlQ==","5pit","IM6azr8=","bWVkZW4=","IG9sYWI=","INqp2YjYrw==","4LiE4Liy4Liq","0LXQvdC90LDRjw==","5oq8","eWzDvGw=","IHNldml5","IGTEm3Rp","4oCsCg==","INi52LI=","IHXhu5FuZw==","INiz2LHZhQ==","0LXQvdC1","INC80LDQu9C10L3RjA==","INCy0ZbQtNC+0Lw=","4Lix4Lia4LiX","IFRow6Fp","IOCkhuCkteCktg==","cm92ZcWI","55uj","INGP0LfRiw==","IE95","5aOB","0LLQsNGC0Yw=","0LvQsNC00YM=","2KfYtdmE","b3TFmWVi","2K/Zitir","7Y+w","zr3Ov868","0LPQvtGA0L7QtA==","IG11aA==","4oCZbA==","0YHRgtCy0L7RgA==","5YWE","0JXQoA==","2LfZhA==","6ZyH","2Y7Yqg==","IGJsw60=","IGVkaWxkaQ==","6Z2g","5LqM5Y2B","5peX","IMOnaXo=","IMSR4bqjbw==","IG9wYXQ=","b8SfYW4=","67KM","IOmg","IHNlYmVw","0YPRgtC4","5Yi6","2LfYqA==","ZXbFocOtbQ==","Y2hvcA==","55Sa","IG5naOG7gQ==","INC/0LDRgNGC","4Li44LiE","2qnbjNmE","ZHVt","IG9ydGFr","44Gf44GX","IG9ieXZhdGVs","IHbDvWNo","IHZlcmVu","INCy0LXRgdGM","INCU0LA=","IO2VmOyngOunjA==","5aaC5q2k","IOCkruCkueCkpA==","4Lix4LiH4LiB4Lik4Lip","44CC6L+Z","INCz0LDQuw==","IHNhbmF0","6aCG","INGB0LDQvNC+","5Zuw","4Li14Lit","IEJhxZ9rYW4=","z4TOv8+Fz4I=","IHlhcHTEscSfxLE=","xZlpdA==","INGB0ZbQu9GM","4KS+4KSo4KSk","INmG2Ko=","IGtoxINu","4LiK4LiZ4Liw","0LzQuNC90Lg=","44Os44O8","64Ks","6YWS5bqX","INin2YTZitmI2YU=","5LmX","4LiE4Lij4LiH4LiB4Liy4Lij","2YHYp9mC","IOCkj+CkuA==","IOah","2q/YsA==","IOCkh+Cksg==","0LXQu9C10L3QuNGP","4LiB4Lij4LiT","5Lic6KW/","zp/OnA==","IG3huq10","IHNuw60=","wpA=","4LmA4Lij4Liy","7ZW07JW8","IOyEnOu5hOyKpA==","INiv2KfYrtmE","IHRo4bqvbmc=","7YOI","0LDQstGB0Y8=","INGW0Lw=","2KfZhdiq","INmI2YLYqg==","4KWC4KSB","IOiQ","INiz2YTYp9mF","IHZ6ZMSbbA==","5biM5pyb","5a2Y5qGj","IOC4l+C4sw==","INCy0ZbQudGB0Yw=","0LDRgNCw0L0=","INGA0ZbQug==","INC/0LjRgdGM","IOG8kA==","6riw64+E","INC/0L7RgdGC0L7Rj9C9","IOWMl+S6rA==","IE7Em20=","2LTZhtin2YXZhw==","IGRhbMWhw61jaA==","INio2KfYuQ==","IHBvaHk=","2KfZhNmB","4Lie4Lin4LiB","6Ys=","IGNpaA==","2aI=","5Li0","44Kv44OI","0L/QvdGP","INC00LDQuw==","2ZLYsQ==","44CA44CAIOOAgCDjgIA=","5oql5ZGK","2YjYr9uM","4bujaQ==","0YbRltGU0Y4=","IOODgA==","INGB0YLQtdC/","cmHFvg==","IFNhxJ8=","IHR1eeG6v24=","IGFsbWFr","INC30LDQsdC+0LvQtdCy0LDQvdC40Y8=","IM+Dz4c=","IO2L","INCy0LjQvA==","56Gs","IOS6lA==","IGlraW5jaQ==","4Li44LiN","4Liq4Liy4Lin","IOyEuOqzhA==","INmF2K3ZhA==","4Lij4Liw4Lir4Lin","IGVsZWt0cm9u","IGjhuqFp","5pei","IO2WpQ==","IGppbsOp","IG5naGU=","5pGp","INGB0L7QsdGW","xq8=","0YLRg9GA","5rG96L2m","2LTYp9mH","IGTDoG5o","5Li5","5LuK5pel","44OQ44O8","0LLQsNC90LjRjw==","INiz2KfZhQ==","546v5aKD","INin2YTZhdmG2Ko=","INGB0LXRgNC0","6YGg","zrXPhA==","INCw0LLRgg==","4Liy4LiH4Lin","IHZ6dGFo","cnXFvg==","0LDQu9GM0L3QsNGP","INi32LHYp9it24w=","4LmC4Lij4LiH4LmB4Lij4Lih","IMSNYXN0bw==","IOq8","z4PPhM+M","IGJ1cmFkYQ==","IMSweg==","IOq3uOuemA==","5bKb","INi02YjZhtiv","xaFlaw==","IOydtOyVvA==","44KM44Gq44GE","6re5","bGFtxLHFnw==","5LuN","Y2jDoXpldA==","INGB0YPRgg==","5peg5rOV","5rWm","xJtsYQ==","4LmD4LiZ4LiK","IGPDom4=","zp/Okw==","IHp2w70=","INm+2KfYsQ==","INC60LvRlg==","IG5vdsOp","55SY","67mg","bcOh","INGB0L7Quw==","4KSV4KSw4KSj","0L3QvtGH","IGZpaw==","IOCknOCklw==","4LmH4LiZ4LiV","INmF2KrYrQ==","IHBoacOqbg==","IG9sc3Vu","INC60LDQsQ==","IGjDunQ=","6ISx","IOWW","IEjhuqNp","IHTEm8W+","IHRow6Fp","INiq2KfYqA==","LdCf","2KvYp9ix","54aK","INC90LjQvNC4","IHpwcmFj","IOCkpOCkuQ==","INC80LDQutGB0LjQvA==","bWV5aQ==","INGB0L7RhtC4","5rKS","IOyViuuKlA==","77y/Xw==","5ZWm","INin2YbZiNin2Lk=","5pq0","5LiK5rW3","5YW35pyJ","4KWB4KSs","7JWZ","IO2BsA==","IO2emA==","IHRyw6FuaA==","4KS/4KSv4KSo","44G+44G+","0L/QvtGH","bcSbcg==","5bOw","INmF2LXYsQ==","INGN0YTRhNC10LrRgtC40LI=","IOeP","bGVyaXlsZQ==","4oia","IOy2lQ==","IOqyjOyLnA==","7J2R","IHBvxZnDoWQ=","INi02KjaqdmH","2KfZh9i0","INiu2K/Zhdin2Ko=","IG5hxaFl","zr3Ov8+N","IHnDtm5lbGlr","IGtvcms=","2KfZh9mF","6LCI","IM68zrc=","IGRvbGFy","57Wm","IM6Vz4U=","IG9iZG9iw60=","IM68z4w=","4LmA4Lit4LiB","INm+2KfYs9iu","6KGl","2KfYudiv","44KJ44GE","zq3Ouw==","0LjRgtGL","IOuFvA==","IF57Ww==","zq/Osw==","5qCR","bMSxbmRh","IOyXrOufrA==","wqPCow==","xZlpbA==","INCw0LLRgtC+0YA=","z4TOuc66z4zPgg==","dWR1cg==","IGPGsA==","IGvEsXk=","0YHQtdC8","INij2KjZiA==","z4TOuc66z47OvQ==","27Hbtw==","6LK4","INC/0YDQvtC2","w7xuY8O8","INC90ZbRhw==","IOCkruCkpA==","44GV44KM44Gm44GE44KL","2KfYtdix","INi52YI=","INC60LDRh9C10YHRgtCy0LU=","INCT0LXRgA==","5bqG","2bk=","YWxhcmRh","INm+2LHYsw==","0LjRh9C10YHQutC+0Lk=","IHBoaW0=","zq/Ovc63","5LiH5YaG","aWxlcmluaQ==","44CB5aSn","IG9sc2E=","5qC55o2u","4oCM2LM=","IFRo4bun","cm9qZQ==","0L3RjNC+0Zc=","IHNsb3U=","4Li14Lis","xLF5b3J1bQ==","xJtq","INiu2KjYsQ==","6K6K","IOebuA==","ZWxlcmluaW4=","7ZWZ64WE64+E","0YfQtdGB0LrQuNC1","IMWfZWts","INiy2YXYp9mG24w=","IHhpbg==","4Lix4LiB4LiH4Liy4LiZ","IEVraW0=","5oS/","INC+0LTQvdC+0Lk=","zr3Org==","5pyA5paw","h7w=","INC90LjQtg==","IOuzvA==","6LeR","INC90LDQv9C40YE=","6IGW","IOKAjA==","5qCH5YeG","IHZyw6F0","IFbDrA==","INmB2LHYp9mG","5p2l55qE","5ae/","0YXRgw==","INio24zYsdmI2YY=","INC00YPRiA==","0LLQsNGO0YI=","IHNlYmU=","6buY","IGthecSxdA==","z4HOuA==","44Go44Gu","INC/0YDQvtGG0LXRgdGB","5oyB44Gh","0ZbQvdCw","INGC0L7Rgg==","INGC0LDQutC40LU=","VGhlbw==","INmG24zYsQ==","0YbRgw==","IGF5YWs=","4LiZ4Lit","IHNpdGVzaW5kZQ==","INqp2YbbjNmF","INGB0L7RhQ==","IOCkruCknA==","IG9sdXlvcg==","572R5Z2A","INm+2LLYtA==","IEV5bMO8bA==","ZMO8xJ8=","INio2LHYrtuM","INmF2LnYsdmB","IG9iZWM=","IMOnYWzEscWfbWE=","7IS87YSw","INGB0LLQvtGU","0L7RgdGC0LXQuQ==","Ojo6Ojo6Ojo6Ojo=","INCw0LvRjA==","56uf","INio2KfYtNmG2K8=","2KfZhNir","INC90LDQudCx","INC/0L7QutCw","zp4=","INmI2KU=","INiu2YjYp9mG","4KWB4KSq4KSv","IOC5g+C4qw==","INCx0YvRgdGC0YDQvg==","IHRo4but","64G8","IOWkmg==","5Lik5Liq","4Lih4LiV","2LLYp9ix2LQ=","IOuf","4KSv4KS5","0YnQuNC90LA=","4bqnbmc=","772X772X","4LmA4Lie4Lil4LiH","dHZydA==","INGW0L3RiNGW","zrvOtc6v","IHZp4buHbg==","kbg=","IOeZvQ==","2Y7ZiA==","IGNo4bupYQ==","c3R2bw==","IGRvxJ9y","IGlsZXI=","4KWLLA==","4LmD4LiZ4Lib","INix2YjYs9iq","2YjZhNmI","xaFsbw==","0LDQu9C40YHRgg==","5YWx5ZKM","4Lie4Lii","IOyZgA==","2YTZitmE","INGP0LrQvtCz0L4=","0LXRgdGC0Yw=","INGE0LjQvQ==","INij2YbZhw==","IE3DvGTDvHI=","IM6UzrnOsQ==","INGC0LXQuw==","4KS/LA==","0YPQutC4","INCg0KQ=","IE1hecSxcw==","4LmI4Lit4Lih","YXJrZW4=","5oCV","2KjbjNmG","0YLQsNGF","ZWJv","67O07Kad6riI","INm+2YQ=","INCz0YPQsQ==","INCy0LrQu9GO0Yc=","5pS/5rK7","IM61z4DOuc+D","INmB2KfYsdiz24w=","6K2J","z4bOtw==","KOmHkQ==","4Lio4Lij","5Ymn","4oCZeWE=","5bm05bqm","INmG2LHZhQ==","2YPZiNmF","6KKL","IG5lZGVubGU=","4LmJ4Lit4LiH4LiB4Liy4Lij","44CM44GC","INC/0L7RgdGC0YPQvw==","7JyE7JuQ","5Y2Y","6I6x","IHVtb8W+","cG9r","0YPRgdGC0Lg=","IOmF","INGE0ZbQtw==","5buj","4Li04Lir4Liy4Lij","INC20YPRgNC9","INC00ZbRgtC10Lk=","0YPRjtGJ0LjQtQ==","5LuK5aSp","7J2065286rOg","57KJ","6JKZ","IETDvG55YQ==","0LXQs9C+0LTQvdGP","IG1pbW8=","INCy0LjQvQ==","44Gd44GT","5q+V","INij2K4=","IOWQjA==","2LPYp9mG24w=","IGthaA==","4KS/4KSv4KSw","z4DOv8+C","amV6","2YrYrA==","IHNhxJ9sYXk=","2KfYrNmH","IOeg","75w=","INis2LPYqg==","IHThu6lj","xrDGoWk=","2LTZgQ==","4Liq4LiV","INGA0LXRgQ==","IOWj","IGJpemlt","IOq3gA==","4KS/4KSs","66Gc7Jq0","INGB0YLQsNC7","INGA0YPRgQ==","IE9jYWs=","5Zyj","IMO6xI1hc3Q=","aXZlcno=","64KY64qU","0L7RgNC+0YI=","0YfQuNC90Ys=","IGlodGl5YcOn","0J3Qng==","INCd0L7Qsg==","4Li14Lii4LiU","INC/0L7RgtGA0ZbQsdC90L4=","2q/Ysg==","INGB0LrQsNC30LDQuw==","IEdpYQ==","bWVzaW5p","IGJ1bHVudXI=","5rih","0LPQvtGC","IGh1a3U=","64S3","44Y=","INin2YM=","INiv2YTbjNmE","INin2LPYp9iz","7Jew6rWs","IM6YzrU=","INiz2YjYsQ==","IOyigA==","INin2YTYr9ix","INGB0YLRgNC+0LjRgtC10LvRjA==","INGD0Lo=","IOyZnA==","0LXQu9C40Lo=","T1ZJRA==","IHRlbWl6","5Lqm","IHRoaeG6v3U=","INC/0YPRgg==","0Y7RidC10Lk=","IHVyxI0=","IMSQw6J5","5qW1","zrzOv8+F","IOC5gOC4mQ==","0LXQstC10YA=","wqDQlA==","7LSd","6Laj","IOCkheCksuCklw==","xrDhu51u","IOODrQ==","IOqzsw==","6bKB","INix2LPbjNiv","6Lqr5L2T","4Lix4LiT4LiR","eW7DrQ==","2KzYp9iq","7KeA66W8","4KSo4KSy","7JWM","0ZbQvw==","IHbDoG5n","INC/0LvQvtGJ","0L7Qt9C80L7Qtg==","5Ymy","IHRo4bqjbw==","0LvQsNC00Lg=","IOWd","INCc0Lg=","INC00LXQu9Cw0YLRjA==","6ZE=","IGh1eQ==","2KfbjNi3","INC/0L7QstGC0L7RgA==","w7xsZW4=","INmI2YE=","INmK2KrZhQ==","INGA0LXQttC40Lw=","IOy6kA==","IMOHw7xua8O8","2LnYr9iv","0L3QuNCy0LXRgA==","INCd0LjQug==","5biW","z43PgA==","YW5sYXI=","2LPYqtuM","IGJ1bHVubWFrdGFkxLFy","4LmB4Lia","dmVr","INCz0LvQsNC30LA=","5bmF","IMO6ZGFq","INCz0YDQvg==","INC60L7QvdC60YPRgA==","IGTFr2xlxb5pdA==","INi32YjYsQ==","4LiY4Liy4LiZ","INmE2YPZhg==","2LHZgg==","0JrQkA==","IOmdkg==","IOyCrOuekQ==","INGF0LLQvtGA","c3VudXo=","INmF2LTYrti1","6Zm4","IOCkog==","IHZheg==","5Lqk5piT","INGC0LXRgNGA0LjRgg==","0YfQtdGB0LrQvtC5","4Li14LmC","cm9wb2Rh","xLFsZMSxxJ/EsQ==","IOuJtA==","7ZWZ6riw","67O07ZeY","INC30LDRgtC10Lw=","wqDQsg==","44O844OG","INCe0YHQvdC+0LI=","44aN","INiv2Lk=","0J/QvtGB","5rKJ","INC70L7Qtg==","55S15a2Q","INix2K8=","INGB0YDQsNC30YM=","ZWp0ZQ==","IOCkkeCkqw==","IHTDoHU=","w61r","bGFubWFzxLE=","0LrQsNGC","4Liy4LiB4Liy4Lio","44Ki44Kk","z4TOuc6/","IOWn","4KSq4KSk","RVk=","IGptw6k=","IG9ka2F6eQ==","IOqwnOyduA==","6YG/","YsSbaA==","0KDQng==","54OI","IHphcmFy","2q/ZiNmG2Yc=","IHRyw6w=","IG3huqFp","0LXQvdC90YvQvA==","INGN0LrQvtC90L7QvA==","6Zuj","IO2E","5o6J","IHNvcnU=","INCk0LXQtNC10YDQsNGG0LjQuA==","INGB0LjRgdGC0LXQvNC4","5paZ54Sh5paZ","IOCkleCkrQ==","INmH2YbYrw==","4Li44LiH4LmA4LiX4Lie","IE9zbWFubMSx","INC/0YDQvtC00L7Qu9C2","INmI2YTYpw==","IMSNbMOhbmt1","IGFkxLFt","IM+AzrHPgc6s","IHrDocWZw60=","IOC4iOC4s+C4gQ==","INC/0LXQvQ==","bWVuaW4=","IOyYpOuKmA==","ZW1peg==","zr/Pjc+C","LeCkuA==","7ZWY7Iuc","INGF0LLQuA==","44Kw44Op","INC/0L7RiA==","INCe0LTQvdCw0LrQvg==","0ZbQtNC90L4=","7Zic","0YnQuNC80Lg=","6IO4","IMSwbGs=","bWV5","INC30LTQsA==","zrrOu863","0LDQu9C+0Lw=","4LmA4Lio4Lip","2KfZhtin","IM6fzrk=","IOWPjA==","4Li14LiC","INio2LM=","6KeE5a6a","aXNheQ==","dWthcsSx","5rWB6YeP","dsOtbQ==","zrvPjg==","5LmZ","IOCksuCkoQ==","INmG2K/Yp9ix2K8=","0LXRgNC+0Lw=","IHPEsXJhc8SxbmRh","IHLEg25n","xqFt","IGzhuqFuaA==","4KSD","4KWB4KSj","dXpleQ==","INGD0LLQsA==","dsSbZA==","0YvRgQ==","IM66zrk=","0ZU=","24zYpw==","4LiH4LiE","cGh5bHVt","IGJlcmFiZXI=","4Li14LiU","5rWu","4KS+4KS44KSo","b3ZpY2U=","6Kan","IOCkuOCkqw==","5bCR5aWz","0LDQvdGC0Lg=","6aiT","IHNvw6F0","6ay8","bGFubcSxxZ8=","IGLhur9w","2ZDZhA==","IHNhecSxc8Sx","INmC2K/ZhQ==","4KWI4KSu","4KS54KSu","INGA0YPQutC4","INi12YHYrdmH","xaFreQ==","6buS","6IGa","44GL44Gr","IHPDonU=","0LXQtNCw0LM=","INGB0YLQvtGA0L7QvdGL","IHJ1aw==","4oCM4oCM","INii2YjYsQ==","INi52K/ZhQ==","w7Vp","44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA","INio2KfYstin2LE=","IGVkZWI=","IHbEjWV0bsSb","0L7Qv9Cw0YE=","INC90LXQsw==","bWF5YW4=","0LrQvtGB0YLRjA==","IHN2xa9q","xJ/EsW5kYQ==","2LDbjNix","TeG7mXQ=","0IQ=","IHlhcHTEsQ==","4KS/4KSl","INmF2YfYsQ==","INC00L7RgdGC0Lg=","INi12YjYsQ==","bWVzaW5l","IETDom4=","5LiA5LiL","542O","INCc0LjRhQ==","INC+0YfQuA==","44Km44Kn","INGW0YE=","IGdpw6Fj","5Zyo57q/6KeC55yL","INin2K/Yp9mF2Yc=","0YbQvtCy","INC60L7QvNGD","IMSwbmdpbGl6","INCz0YDQsNC2","44Gm44KC","IGNo4buv","0L7Qu9GM0LrRgw==","bcSbdA==","0Y/Qs9C+0Lw=","0YfQsNGB0YI=","7Ja8","IGtow7Nh","INCQ0LQ=","INii2YI=","IGt1cnVsdcWf","zqzOtg==","INC20L7Qsg==","INCy0YHRgtGA0LU=","INmI2YTZgw==","IHR1eeG7h3Q=","ecSx","INCS0L4=","IHbhu41uZw==","2LnZitip","IG9wxJt0","2KfZitiv","4KWILgo=","INGB0LDQvNC4","5aqS","IHN2w71jaA==","IOuCmO2DgA==","7IaQ","INmE2Lk=","IGV0a2lu","IE7DoQ==","IHNvdXTEmw==","7Li17J2Y","IOetiQ==","INix2LPZhQ==","INiu2KfZhtmH","IOWutg==","aeG7gW0=","64WQ","6rCI","7LCp","xb5pbA==","0YHRgtC40YLRg9GC","b3J1xI0=","INil2LDYpw==","4LmE4LiC","4Li14LiK","0YDQsNCx","7ZWZ7IOd","IOyJ","cm5law==","INin2LPYqtiu2K/Yp9mF","44CAIOOAgCDjgIAg44CA","INCy0YHQtdC8","IOygleuPhA==","IHZ5ag==","6YCx","0LDQu9GM0L3QvtC1","IGNodXnhu4du","7KeA7JuQ","aWxlcmluZQ==","IOyVhOustA==","INC+0LrQvtC70L4=","4KS+4KS14KSo","4LiZ4Liy","0L7Qv9GA0Lg=","ZHLFvg==","INGB0YPRgdC/0ZbQu9GM","INio2YM=","dWt5","IM+Hz4k=","IHR14bqnbg==","bmljdHbDrQ==","INmH2K/ZgQ==","IGNoaeG7gXU=","zpfOnQ==","5bCP5aeQ","7ZWY7JiA","IGtsYXM=","4buZbg==","IOydtO2bhA==","2YbYp9mF2Kw=","xI1hc3Q=","INin2YTYrtin2LU=","bMSxxZ8=","INi52YXYsQ==","44CNCg==","0LjQsdC+0LvQtdC1","44KK44Gu","44Wg","5Lmf5LiN","0LrRgNC10YI=","IOyU","z4TOuc6x","INGD0L/RgNCw0LLQu9GW0L3QvdGP","5rKi","IGtlc2lu","7KGM64uk","66i464uI","55yf55qE","IGJha8SxbQ==","5p2x5Lqs","vrg=","2YXZhNmD2Kk=","0L7RgtGA0LXQsQ==","ZMSxbg==","IFDFmWk=","IG3Em2xp","IM60zrfOvM6/","5a+4","INmI2YPYp9mG","IOCkquCkog==","INCy0LXRgNGF","INC10ZE=","Q8OhY2g=","5L2c5Li6","INCa0L7Quw==","INCy0LU=","INC00LXRgNC2","ZW1vYw==","44G444Gu","INCw0YDRhQ==","IGtp4bq/bQ==","IOaYjg==","INC70Y7QtNC40L3QuA==","67c=","INmI2KfZhNiq","IOiw","54Gv","7ZmV","IOq1rOunpA==","IOenkQ==","aXRuw60=","0LjRh9C10YHQutC40LU=","INmG2YHYsw==","INiq2YTZgQ==","2KfZgduM","INit2LPZhg==","4pah4pah","w712w6E=","xJ/EsW4=","xLF5b3J1eg==","IENow60=","INm+2pjZiNmH2LQ=","IM+Ezq0=","IM+Dz4fOtQ==","0L7Qu9C10YI=","zrHOuc60","IGjhuqF0","4Lig4Liy4LiE","5Yaw","IHJ5Y2hsZQ==","aXRlbGk=","wqB6","4Lii4LiB","5qi5","INis2YjYp9mG","5piM","IMO8cmV0aW0=","4Lij4Liw4Lia","4Lib4Lij4Liw4Lih","zqzPgw==","5bKp","INGD0YHRgtGA0L7QuQ==","IHZlcmlsZW4=","aWNobmk=","IHDFmcOtbW8=","INin2YTYsNmH2KfYqA==","7L2c","5pyx","INiz2K4=","0ZbQu9Cw","0YPQvNCw","4Lir4Liy","24zYr9in","5bK4","5LiA5a6a","IOS8mg==","INCf0ZbQtA==","INGH0LjRgg==","0LjRjg==","INCX0LDQvw==","0YLQuNGP","IOqwnOuwnA==","INGC0LXQvtGA","0Y/RgdGM","IHDFmcOtcHJhdg==","KOWcnw==","2YXZig==","IHDFmWVkZXbFocOtbQ==","IFRlbW11eg==","INC/0L7QtNC00LXRgNC2","INC90LXQtNC+0YHRgtCw0YI=","IOydtOycoA==","IGto4buPaQ==","INin2YTYqtit","INmF2YXaqdmG","IHZob2Q=","0LXQstC+0Lk=","0L7QstCw0Ls=","INC90LDQu9C10LY=","77y8Og==","4Lii4Liw","INmF2KfYtNuM2YY=","IGfhu61p","YWzEsW0=","IOy1nOyggA==","2ZHZhw==","4buZcA==","4KWA4KWkCg==","INC/0LjRgQ==","INCy0YHRjw==","0YfQtdC8","b3plbsOt","IOS6mua0sg==","0LXRgNCw0LvRjA==","6riw64qU","INC/0YDQtdC3","INi52YXZiNmF24w=","0LjRh9C90LjRhQ==","IOaysw==","b2Ruw60=","5Y+q5piv","IHBvZHA=","4LmJ4Lit4LiH4Lie","4KS+4KSv4KSm","4KS+4KSH4KSy","4Lil4LiU","INGA0ZbRiNC10L3QvdGP","INGC0YPRgA==","0YHRjNC60YM=","IHNhbGTEsXI=","INCb0YzQsg==","44CBCg==","INm+24zZiNmG2K8=","5a2m5Lmg","zrvPiQ==","b3ZpdA==","w7xsZQ==","5aWz5oCn","wp8=","ZW1leg==","IGhhbGU=","4omm","IM6Vzro=","z4TOt86zzr/Pgc6vzrE=","a8O9","7ISx7J2E","IHTDvW0=","4KWHLQ==","IHplam3DqW5h","5pm2","IG5nb24=","44CPCgo=","6L2v5Lu2","6YKj5LmI","INC60LLQsNGA0YLQuA==","INmF2YbYuA==","b25lYw==","INCz0LvQuA==","4KWB4KSw4KSV","IFNva29s","IOS/nQ==","0LTQuNCy","w6FsbsOtbQ==","YWNhxJ/EsQ==","YcWfYQ==","INmF2KfZhNuM","IMOWbg==","0LjRgtC10LvQuA==","INiu2LHYrw==","IGt1bGxhbsSxbA==","INmF24zZhA==","IO2aqA==","w6Nu","IHJvc3Q=","IOuWoA==","dWJhdA==","IOWPgg==","INio2LHYp9mK","INC80LXQvdGM","4Lix4LiE4Lij","INC/0L7QvNC+0LM=","INit2LbZiNix","IHRo4buLdA==","5Lmz","IOyLoOyyrQ==","IO2YhOyerA==","IOu5oA==","0LLRgNC+0L/QtdC5","IG5lamVu","0ZbQutCw","IOyauA==","INmF2KjYp9ix","IMSNZWs=","IGthbGs=","IGFtYWM=","2KfYr9iq","INmF2KfYs9mH","IGFyYXPEsW5kYWtp","INCx0LXRgQ==","INC+0YLQtNC10LvRjA==","4b22","IM6kzrY=","dnlr","2KzZhg==","u+qyjA==","INC90LjRh9C10LPQvg==","INi02KfZhdmE","INGD0YHQu9C+0LLQuNGP0YU=","bGFtYXPEsQ==","6L2J","5769","INC20LjQtA==","INC+0YLQvdC+0YE=","INC30LTRltC50YHQvdGO","IFbhu5tp","2YjZhNuM","IHRpc8Ot","IM+Hz4HPjA==","IHByYWNvdm7DrQ==","INmK2YPZiNmG","IGJlxZ8=","2KzYsg==","4Lix4Lia4Lij","IFnDtm5ldA==","INi02LHYp9uM2Lc=","INiq2YjYs9i52Yc=","55eH","4LiH4LmA4Lib","5LiA5qyh","INCg0L7RgdGB0LjQudGB0LrQvtC5","5pyA6auY","IHNwb2x1","0LTQsNC10YLRgdGP","0ZbRgtGD","INC+0LHRgNCw0YI=","ZW5law==","IG1law==","5aaI","INC00L7Qv9C+0LvQvdC40YLQtdC70Yw=","IOey","INmE2YTYqg==","IEhhemlyYW4=","5riI","4LmM4LiC4Lit4LiH","INGE0L7QvQ==","IOqyg+ycvOuhnA==","IG5ow6k=","IGJ1Z8O8bg==","b3bDqW0=","INC30LDQstC10YA=","INC00LLQuNCz","5LyZ","IG51w7Rp","0LzQtdGA0LjQug==","INmG2YXZiNmG2Yc=","6I23","0YPQstCw0LvQsA==","57+7","IHPDom4=","0L7Qs9C+0Y4=","2KfYs9mK2Kk=","0YPQvdC60YI=","w6Fuw61t","0LXQvdC90L7QtQ==","IHBow7p0","IOCkruCksA==","INin2YTZiNi3","INC70LXQs9C60L4=","IOOAiw==","66Gc65Oc","IEthc8SxbQ==","2YrZhNmK","IGJhxJ9sYW50xLFsYXI=","INGC0YDRg9C0","2LfZhw==","IGt2xa9saQ==","0YHRgtC+0Y8=","IHNhdMSxxZ8=","IGjhuq11","INio2YfYqtix24zZhg==","INGB0LXQu9GM","4Lix4LiZ4Lin","b3N1","4KSv4KSo","5Zuz","zrnOtA==","24zYqtuM","IFF14bqtbg==","INC10Lk=","4LmA4Lin4Lil4Liy","7Iqk7YOA","7IKs66W8","INin2YfZhA==","zrfOsw==","IGvhu7c=","INC90LDRgg==","4oCh","0ZbRh9C90LjRhQ==","INGA0LDQt9Cy0LjRgtC40Y8=","ZWNpYWw=","INGF0L7Qt9GP","0LLQsNC10YI=","IMSQ4buZ","IOmT","IG9rYW0=","INCy0YHRltGF","IFByYXpl","66Wg","zrnOus6x","5qyy","IGdlcsOnZWtsZcWf","56WW","INC+0LTQvdC40Lw=","wqBN","IHJlbms=","IOCksuCklQ==","44OV44Kn","INmG2LLYrw==","5bm7","IMO6emVtw60=","5o+h","0LDQu9C40YHRjw==","IMOU","IHlvcnVt","IM+Az4HPiQ==","44Oz44OH","6ZaL5aeL","44O844Oq","IOyWvOq1tA==","27HbsQ==","bMO8xJ/DvA==","2YbYtA==","4LmI4Liz","6JuL","INij2K8=","IFdpbGxp","6Kqy","IHPDvHJkw7xy","IEV4dGVybsOt","IHDFr3ZvZA==","INiu2KfZhtmI","INC60L7RgtC+0YDQvtC1","IG1vaGw=","IHN0xJs=","5YeP","7IK8","YWJhbmPEsQ==","4LmB4LiZ","4Liq4Liz4LiE","5oKj","YWJpbGVjZQ==","6Ziz5Z+O","zpHOmg==","IGNo4buvYQ==","IOyVhOuL","2LfYqNmK2YI=","zpnOn86l","0YDQvtCy0LDQvdC40LU=","5Ye9","IOy8","0YDQvtGE","4LmH4LiZ4Liq","IOOCpg==","77ya44CM","4buLYQ==","IGhQYQ==","bWFuxLE=","w6FsbsOtaG8=","2YjYqtuM","INC70LXRh9C10L3QuNGP","anRl","LdC0","5YWo5Zu9","INCx0YPQtNGW0LI=","IHphdMOtbQ==","IMO2eWxl","7J206rCA","c3RhbA==","aXZhdGVs","IOacqg==","IHBvxb5hZA==","INGB0L3QuA==","IHBvc2xlZG7DrQ==","INGB0YLQsNC90LQ=","4KWA4KSP4KSu","INi52qnYsw==","0YDQuNGP","w6N5","4buLcA==","IG9rdWw=","4LiH4Lir4Lih4LiU","INCy0L7Qt9C90LjQug==","bcOt","56ef","IMSR4buRYw==","IHBvZMOt","IMWZw61q","INGC0LDQutGW","4Lia4Liy4LiX","IOuztOq4sA==","4Lil4Liy","0LXRgdGC0L4=","IOeUqA==","0LjQvdGL","INGA0YPRhQ==","INGA0LDRgdC/0L7Qu9C+0LY=","0YnQtdC90L3Rjw==","IGPhu60=","4LmJ4Lia4Lij","4KWN4KSv4KS14KS4","776a","INC00LDQu9GM","INi22K8=","2YTZitip","INC60L7RgtC+0YDQvtCz0L4=","IGR2ZQ==","IG5o4bqhYw==","0YTRltC60LA=","4KWI4KSf","6Ieq55Sx","INC/0L7RgNGD0Yg=","5pyL5Y+L","IGTDtnJ0","INGA0LDRgdC/0YDQvtGB0YI=","44Gn44Gv44Gq44GE","INC/0LXRgNC10LM=","IMOhbmg=","IFbDrQ==","2LjZuQ==","4KWN4KSw4KSj","IGJpbGlt","IGxpZMOp","IGTDrWt5","IMSQ4buTbmc=","IM61z4HOsw==","IHpub3Z1","z4POuc6x","0Z4=","4KS44KSt","ZWtr","IM68zrXPhM6s","0YHRgtC40Yc=","24zZhtqv","INGP0LLQu9GP0Y7RgtGB0Y8=","IOW7ug==","z4PPg86x","0LDQstC70LjQstCw","4LiB4Lij4Lih","56yU","INCz0LU=","INix2Yc=","INC80LXQuw==","INC90LDQv9GA0LjQvNC10YA=","INC80LjQug==","INin2YTYs9mD2KfZhg==","5qSc","INCa0YDQsA==","IHbDoGk=","2KfYptmF","IM+Hz4HOrg==","bGXFn21l","IGphcw==","6rKM7J6E","IG1hw6c=","IOynhO2WiQ==","4KWH4KSm4KSo","IHbFr2JlYw==","INmE2YY=","6KuH","4omh4omh","0LvQtdC90LjQtdC8","2LnZhtuM","44Oe44Oz","xLBa","IMOWxJ8=","IOyXrOyekA==","ecWh","INGB0YLQsA==","IOC4quC4s+C4q+C4ow==","IOCkqOCktQ==","44CC5L2G","0L7Qu9GM0L3Qvg==","IHlhbsSxbmRh","6LK0","IGplZG5vdGxpdg==","IOWOnw==","6aCF55uu","IOCkruCkpuCkpg==","66as7JeQ","INmF2KfZig==","INGH0LXRgNCy","IGTDoXY=","2YTbjNmH","PyM=","xI1uw61t","0YDQtdCz","INC/0YDQuNC80LXQvdGP","44KK44Go","6rCZ","IHRvcGxhbQ==","aWxlxZ8=","IGthdGVnb3I=","0YLQsNC7","44Gr44KI44KL","IGRvbcOhYw==","IOq3nA==","INmH2LLYp9ix","IHDFmcOtc3R1cA==","xLFsxLF5b3I=","0LbQtNC4","IETGsMahbmc=","IFBo4bqtdA==","IMOnw7xua8O8","6rWs6riA7IOB7JyE","b3ZhbsO9Y2g=","INi52LQ=","IOCkleCksOCklQ==","xb7DrXQ=","IHbEm3TFocOt","INin2YXaqdin2YY=","IG7DtG5n","IHrDoW0=","4KWM4KSo","0LXQutCw0YA=","wqDQog==","a2FtaQ==","INGA0LXRgdGD0YA=","0L/QvtGB","2Y7Zgg==","zq/Ouw==","INiz2KfYstuM","IMOnxLFrYW4=","IGTDrXTEmw==","INiq2LXZiA==","56+H","0L3QtA==","IHLDoW1jaQ==","aG9uZw==","INGB0ZbQvA==","c2Fr","0LrQtdGC","0LTRltC7","57mU","IHRoxrDhu59uZw==","INC90LXRlw==","0LfRlg==","xZnDrWQ=","4KS/4KSk4KSo","4KSP4KSV","IHPhu69h","INmF2LHYrQ==","6Z4=","IGPGsOG7nW5n","Oi46","0YLQtdC9","6Imm","IGto4bufaQ==","IOq4sOykgA==","bGFuxLFy","5b2p56Wo","2LbbjA==","IHV6YXY=","IGJvaA==","w6ht","IOaj","bmljaQ==","KOeBqw==","5YWz5LqO","0ZbRh9C90ZY=","4LiB4Liy4Lij4LiT","IOyyqw==","0YDRg9C10YI=","IGFyxZ9pdmxlbmRp","0YLQuNC8","4Liy4Lig","INio2LHYp9io2LE=","IOC5gOC4iw==","IMSRw6pt","6Lez","IHnDtm5ldGlt","IOmVtw==","44OG44Os44OT","0LzQsNGC0Lg=","6LSj5Lu7","aWNrw71t","6Lg=","4LmA4Lir4LiV","66CM","INix2Yo=","INCy0YvQtNC10Ls=","5Ye6546w","INC/0LXRgQ==","IOyii+ydgA==","IOCkieCkuOCkqA==","IEFyYWzEsWs=","INGH0LDRgdGD","bGF2YQ==","IO+9ng==","5oGL","2K/bjNiv","4oCZZGVu","IOWInQ==","2YjYr9ip","0YfQuNC70Lg=","INGF0LDRgNCw0LrRgtC10YDQuNGB0YLQuA==","2KfYs9iq2KfZhg==","4KSm4KSw","INio2YjYr9mG","INC/0LDQu9GM","INGC0YDQsNC00Lg=","INC00LXRjw==","INiu2LQ=","IHBva3JhxI0=","IOq1rOq4gA==","0LrQvtCy0ZY=","IHTEsWs=","IGjhuqVw","IHphbG/Fvg==","4KWn4KU=","IOuLteuzgA==","0LzQtdGI","7Zqo","IHNwb2x1cA==","y4Y=","6L6m","IGfhu5c=","IOWumg==","k24=","YXPEsW5kYW4=","LcSx","INCx0LXRgNC10Lc=","5aSn5a24","INC30L3QvtCy","IEhvw6BuZw==","INiv2YjZhg==","IGFubGF5","INmI2LLYp9ix","INi52YTZhduM","6KOc","IGTDvG55YQ==","INC30LDQu9C40Yg=","0LTQsNC10YI=","zr3OtQ==","0LjRh9C10YHQutC+0LPQvg==","7Iqk7YWc","INCR0LXRgA==","INC00LY=","INC+0L/QsNGB","z4bOsQ==","IHp2bMOh","IHTDtA==","0LHQtdGA","IM6czrHPgQ==","dGnEn2luaQ==","44Os44Oz","IEtobw==","INGW0L3RiA==","IO+/pQ==","7LCs","772h","INC90L7Rhw==","6KiK","xJt0aQ==","5b+Z","INqp2LHYr9mG2K8=","IMSR4bqpeQ==","INGB0LrQsNC30LDQsg==","64Ol","5bGs","IOCktuCkueCksA==","INqp2YXaqQ==","wqDQnw==","xLFuY2E=","0L3RltCy0LXRgNGB0LjRgg==","INqv2YjZhtmH","IFRvcGxhbQ==","IGnFn2FyZXQ=","5L2g5Lus","IGRlcmVjZQ==","IOyCrOyLpA==","IOyekOq4sA==","5a6e546w","55Sf54mp","44Gu5LiA","INGA0L7QvA==","2YjYstmH","IOOBqA==","7ZmN","2YrZgg==","IOWQjeeEoeOBl+OBleOCkw==","INm+24zYsQ==","INC/0L7Qu9C10Lc=","7Lap","INC60L7RgNC/","kOuLpA==","4burYQ==","zpXOpA==","INC20LXQu9C10Lc=","44Gj44Gx","IHh1ecOqbg==","IOul","4KWH4KWkCg==","INGB0YLQsNC70Lg=","IHBvbW9jw60=","IGR1cnVtZGE=","INC/0YDQvtGI","bGVuw60=","zrLOv867","IOaWh+eroA==","dMSbeg==","ZMOtbA==","IGRydWjDqQ==","INGC0L7Qs9C00LA=","IGhyw6E=","0L7RgtGM","4Liy4LiB4Lij","INiq2LXZhQ==","INmF2K/Yqg==","0LrQsNC00LXQvA==","IHBhdMWZw60=","5LmL5YmN","2LPYqNip","INC/0L7QutGA0Ys=","IG7DoXA=","IF97fQ==","65Ox7ZWZ6rWQ","INil2YTZig==","IMO2emc=","55qG","IGhheXZhbg==","IE5pc2Fu","2LrYp9iy","INiq2Ko=","INC00YPRhdC+0LI=","INCf0L7RjdGC0L7QvNGD","0YzQvtCz0L7QtA==","IGt1xZ8=","IOCkh+CkuOCkrg==","2KzbjA==","IOOCvw==","INCy0LrRg9GB","54A=","INCy0YvRiNC1","4oCZZGFu","INin2K3Zhdiv","IHRhbGVw","IM+I","IGRvbGF5xLE=","INqv2LLYp9ix2LQ=","0LHQvtC7","INin24zZhtiq2LE=","0YDQvtGH","KeKAjw==","IOuQoA==","IGtvdXA=","KOaciA==","6bG8","INC+0LPRgNCw","INGA0LDQt9C8","INiq2LPYqg==","IHDFmcOtc2x1","7ZuI","IOuMgO2VtA==","4LmB4Lib","0LDQvdC90YvQtQ==","IOyduO2EsA==","IGt1bGxhbsSxbGFu","IHp0cg==","5oqA6KGT","4KS/4KSb","INin2YTZhdik","b3ZhbHk=","dXN0b3M=","IMO2cmc=","IOWkqg==","zrXOuc6/","IHXEjQ==","INi02qnZhA==","5bu6562R","IGNo4bqheQ==","IM+Hz4HOtw==","0L3Rg9GC","INio2KfYudir","IE7Em2t0ZXI=","0YPRgtGC0Y8=","44Gn44GZ44GL","IHNhecSxbMSx","0LjQvNC+0YHRgtGM","INC/0LjRgtCw0L3QvdGP","IGvDrW5o","IGhyYW4=","b2tyYXQ=","IGVkaWxpcg==","IOCkleCkueCkpA==","IHBhY2k=","4KS+4KSy4KSo","INC40LTQtQ==","IFplbQ==","IHNsdcW+Ynk=","0YHRgtCy0LXQvdC90YvQuQ==","INii2YbYp9mG","INGC0L7QstCw0YDQuA==","INiq2K3ZhdmK2YQ=","IFnDvGs=","INC60LDRgtC10LPQvtGA","7YuA","INC60L7RgQ==","INC+0LHQvtCy","IHByb3N0xZllZMOt","INGB0L7RgQ==","INCQ0LvQtdC60YHQsNC90LQ=","IOC5gOC4guC4lQ==","5b+F6aG7","4Lix4LiK","INmE2K8=","44CB5LiA","IM6czq0=","0YPQstCw0YLQuNGB0Y8=","5pWP","44O844OQ","2KfZhNmE2Yc=","INio2YfYpw==","5Za2","6LS1","5pa55ZCR","IOy4","INmG2KfZhdmH","0YzQutC+","IHZvZHk=","dsOtYw==","4LmB4LiI","INi52YTbjNmH","4LmB4Lij4LiH","zq/Ovc6x","44Gs","INCe0L8=","IHNheWY=","77yM55Sx","5Ly0","INGD0LTQvtCx","44G+44Gg","INC90LXQv9GA0Lg=","wo4=","4KS+4KSc4KSq","cGxuxJs=","IOyXhA==","IHLFr3pu","IHjhur9w","44OW44Or","INC30LDRhdC40YHRgg==","INmF2LXYsdmB","IHbFoWVjaG5v","44Gu44GK","IFRo4buL","IG3DuWE=","v5A=","INC/0YDQuNC90YbQuNC/","INin2YbZgtmE","0LPQsNGA","IG1vxb5ub3N0","2YLZitmC","IG90ZXbFmQ==","IGZhaw==","IG5ndXk=","0LHQvtCy","bGFjYcSf","2KfYt9ix","44Gr44KI44KK","5piv5Zyo","IHThuqduZw==","7J247J20","YcWZ","56Kw","z4zOvM61","IOqwiA==","INij2K3Yrw==","2LrYsdin2YE=","INmK2K0=","772n","INin2YTYrdmK2KfYqQ==","IGxlcA==","IOC4rg==","dGFl","IGzGsMahbmc=","6L2u","INC30LzRltC9","INCa0LjRl9Cy","INC80ZbRgdGP","0LrQsNCy","4LiV4Liw","IG1ub2hv","IE5naOG7iw==","6JmO","IOODnw==","IHByw6FjaQ==","IGfhu5Fj","IFllbmk=","2KfYttmK","IOiR","INC60LvQsA==","j25n","z4TOtc6v","IGJlbmk=","INi52K8=","IGFrdHU=","INmI2YLYrw==","INC/0L7QtNCz0L7RgtC+0LI=","IGdpYWk=","KOawtA==","IHNhw6c=","INmF2YbYp9iz2Kg=","4paL","2ZDZhw==","6Y0=","4Lit4LiX","INiz24zYp9iz24w=","b2xpdA==","INin2YTYrNiy","2LfZhNio","IHNleQ==","ZXJlbmNl","7LSM","INCy0L3Rg9GC0YDQtdC9","IOC4meC4suC4og==","IOyViuyVmOuLpA==","b2xpaw==","5pyA5ZCO","5Luq","INGA0ZbQtA==","6LyD","INio2KfYqA==","0YPQtNC4","INGB0YLRg9C/","IMSR4bupbmc=","IMWfw7Z5bGU=","IO2VmeyDnQ==","INCy0LvQsNGB0YLQuA==","IGjDo25n","4LmJ4Liy4Lin","INqp2KfZh9i0","IOuTrw==","INis2YXZhNmH","INiv2qnYqtix","YWRvbHU=","INiq2KjYrw==","2LjYp9mF","IHpuYcSN","INiv2YbbjA==","IHPhuqFu","5byx","z4DOuQ==","IOeQhg==","INmB2LXZhA==","0LjQvdCz","0JrQng==","INCh0L7Qsg==","IHppeWFyZXQ=","INiv2YU=","56u5","IHNhaGliaQ==","aXNheWFy","xJ9h","INC/0LXRgNGW0L7QtA==","IHNuYQ==","KOacqA==","INC90LXQtQ==","INGE0LDQutGC0L7RgA==","0LzQtdC2","5bqE","csOhxb4=","0L7QutGA0LXQvA==","IMW+YWw=","4Li04LmA4Lio4Lip","6LGq","b3Vjw60=","IFVsdXM=","IHRha8W+ZQ==","2KfZiNmG","0L3QuNGC0Lg=","0L3RjNC+","6424","INmD2LHYqQ==","5Zyz","IEFydGhyb3BvZGE=","INGC0L7QtNGW","INiv2LHYtdiv","4Li44Lij4LiB","INGB0LLQvtCz0L4=","6K+06YGT","IGPDoW5o","5pOK","IOS4i+i9vQ==","6Im+","IG5pa2R5","2K7Ytw==","INGB0LXQudGH0LDRgQ==","2YjZitmE","YW1ldA==","66y47J2Y","IEXEn2l0aW0=","5aSn5Lya","IGLFmWV6","0LfQsNGG0ZbRjw==","IHR5dG8=","0L3QsNC5","2LrZhQ==","IOmp","6K6h566X","VMO8cmtpeWU=","IG1ub8W+","5ZCI5L2c","5pyN5YuZ","IGthxb5kw70=","INGO0YDQuNC0","IM6yzrE=","4KWC4KSa","5ZCM44GY","IOeL","zq/PhA==","2YjbjNmG2Ko=","2KfZhtiz","5pyA5aSn","IFThu6s=","6a2U5rOV","INCx0LvQuA==","INGC0LDQutC+0LU=","44Gc","44CAIOOAgCDjgIAg44CAIOOAgA==","7J2066mw","INmC2LPZhdiq","INC+0YbRlg==","0L3QuNC60YM=","IELhuqFu","INC+0YDQs9Cw0L3RltC3","w7xwaA==","IGl6aW4=","IO++ig==","zrXOr8+C","4LiH4LmB4LiV","44Gh44KJ","0LLQsNC20LA=","IOaspw==","zrnPjg==","z4DOrQ==","INC60YDQtdC/","INGG0LjRhQ==","5oSf44GY","55Wr","2YPZiA==","0LXQvNC+","xb5lbg==","5bmz5pa5","INmF2KzZhdmI2LnZhw==","INGB0LLQvtC4","IOOE","4Lib4Lij4Liw4LiB4Lit4Lia","INC/0YDQvtGC0Lg=","2YjbjNmH","6K6k5Li6","z4bOrQ==","0LjRh9C10YHQutC40Lk=","5qWa","INC/0LDQvw==","zrTPgc6/","IGt1bGxhbsSxbQ==","IHpibw==","IMO6c3DEmw==","INmF2LI=","IEZhaw==","0LXQu9GM0LfRjw==","5rS75YuV","INCf0YDQsNCy","pnk=","5YOV","5pGY","INix2KY=","IM+Gzr/PgQ==","0LzQuNGC","IHRpY2FyZXQ=","5rOV5b6L","5bm05Luj","7IiZ","5b+g","4LmH4LiZ4LiX","INGD0LY=","INmF2KrYrdiv2Yc=","IHRy4budaQ==","INix2K0=","INqp2Yjahg==","INC+0L/RgNC10LTQtdC70LXQvQ==","INiy2YXbjNmG2Yc=","IG7Ds25n","IG5n4bun","Tmjhu69uZw==","INC60LjRiA==","IGpkZQ==","IOS4iua1tw==","5YuH","IHRhbsSx","4LmM4LmB4Lil4Liw","INGA0LDRgdGC0LLQvtGA","INGB0YDQtdC00YHRgtCy","IGPDoW4=","IHN5c3TDqW11","24zYtw==","INGB0LjRgdGC0LXQvNCw","IOue","INGH0LXRgg==","6YOo6Zeo","5biw","IG1pbGxldA==","IM6VzrvOu86s","4KWH4KSW4KSo","IHJlcHVibGlreQ==","0YDQsNC80Lg=","IOCkuOCkruCkuA==","IGHDp8Sxc8SxbmRhbg==","2KfYr9mE","INCx0LXRgdC/","44O74pSB","5Zyt","b2N1","a8OhbsOt","2YjYsdi0","6561","IOeB","6LCB","IHPDoW0=","IM69zrXPhg==","YmlsaXI=","IG3DrXN0xJs=","IMW+ZW4=","IGlsw6c=","IOungQ==","44CRCg==","INmF2YjYp9ix2K8=","INin2YTYtNmK","IOq4sOuhnQ==","IHRhZHk=","4Lit4Liy4LiI","INGB0YQ=","IHNwb2xlxI1ub3N0","IHTDqW1hdHU=","2YXYp9mF","2YXYuQ==","INC70LXQtg==","INqG2LTZhQ==","IGnFn2xldA==","INmG2LPYrg==","5Lyw","44GN44Gq","44CD","5bKX","IOWtkA==","IGLhuqNuZw==","54yu","IGPhu6luZw==","INC60YDQsNC5","IOiLseivrQ==","0KDQkA==","2LLZhg==","6IOe","IHPDvHJlw6c=","44OV44OI","INC60ZbQu9GM0LrQsA==","bmXEn2lu","b3bDoW55","0LvRltC9","IHbDvXJheg==","INGB0YfQuNGC0LA=","INC/0YDQsNCy0LjQu9C+","INC40YHQv9C+0LvRjNC30YM=","IGvDqW8=","IHlha2xhxZ/EsWs=","INmI2KfYqNiz2KrZhw==","0L7QstCw0YLQtdC70Yw=","IOyyoA==","INin2YTYudin2YU=","5Z2P","IOC4iQ==","IFPGoW4=","zrvOuc6/","7LaU7LKc","IHNsdcW+ZWI=","INC00LXRj9GC0LXQu9GM0L3QvtGB0YLQuA==","0LfQvA==","INC/0L7Qt9C4","LjsuOw==","INC/0YDQvtC40YHRhdC+0LTQuNGC","4Liy4Lii4LmD4LiZ","55qE44Gr","IOCkh+CkuOCkuA==","0L7QvNC10YI=","IM6xz4E=","4KS+4KSX4KSw","aWPDrWNo","IHBvbG/Fvmt5","6rOo","5oOK","IMO2bmVy","IHjhuqN5","INmG2LjYsduM","IG5naOG7iQ==","IOC4nOC4pQ==","INGA0L7Qu9GM","INGA0LXQvNC+0L0=","2LXZiNix","VsO9","IFPhu5E=","INGB0YPRh9Cw0YE=","4Lir4Lii","INin2YLYr9in2YU=","IGVya2Vr","IOiN","IMSRw7Rp","INC60L7QvdC60YDQtdGC","5qyK","IOebrg==","2YjaqQ==","bMSxa2xh","IHBhemFy","zqzOvc+J","0YPRgdGC0LA=","44Gq44Gf","INmH2Ybarw==","0K7Qmw==","INCy0LXQu9C40Lo=","IG5o4bub","IOyLnO2XmA==","KeydmA==","2YPZhw==","IOC5geC4pQ==","27LbtQ==","INin2LHYs9in2YQ=","INC+0LrRgNC10Lw=","zqzPgg==","INCy0YvRhdC+0LQ=","dsSbdMWhw60=","INi32LHbjNmC","INC60L7RgNC+0YI=","0L3RlA==","44KK44Gr","IOS5nw==","2K3YtQ==","2LnZhdin2YQ=","b2xvamlr","INix2KfYqNi3","56qX","IGdpeg==","IGNo4bq/dA==","5qij","4Liq4LiH","2YjYqtix","INGP0LrRgw==","54++5Zyo","INC+0YLRgdGD0YLRgdGC0LI=","IOq0keqzoA==","0ZbQutC4","5YCk","6K6i","IGRsZQ==","IOWg","5qip","6K6v","5ZSQ","IOKWsg==","IGxpc3RvcA==","IGRhdG92w6k=","z4TPjM+C","INC+0Lc=","zrTPjA==","6JKC","27PbsA==","44Oq44O844K6","INmF2LHaqdiy","INC/0ZbQtNGC0YDQuNC8","INGB0LXQtw==","6aGY","IG9sYWNha3TEsXI=","5rqA","IM+AzrXPgc65zr8=","0YTQsA==","z4TOt8+Dzrc=","57uD","0J7QtA==","zrTPhQ==","4oSD","IGzhuq9w","IOuEmA==","2LfYp9mG","INm+2YbYrA==","2KrYp9mG","aWxlcmluaW4=","w4g=","INiu2YjYtA==","IOyKrA==","INin2YTYsdim2YrYsw==","4bq1bg==","INi02KfYsQ==","ZXJ1","0LbQuNCy","4LiZ4Liy4Lii","IHPhurs=","IOCkieCkmg==","44Gr44GL","56GA","IHnDvHLDvHQ=","INCh0LXRgNCz","INC60LDRgQ==","INCR0L7Qsw==","IOyWtOuWu+qyjA==","IOefsw==","IMO2bGTDvHI=","0LvRltCy","IGhvw6BuZw==","IGLhu5l0","556s","IOy5qA==","TuG6v3U=","IG5ldnk=","IOycpA==","IHNvdcSNw6FzdA==","xLFzxLF5bGE=","IHTDvGtldA==","Ym91","INC00LLQvg==","2LPYtw==","5b2T54S2","44Oo","INiy2KfYr9mH","IOmDqA==","INix2YjYrQ==","IO+8jQ==","INC80ZbRgdGG0LXQsg==","zrjOtc69","4LiG","0LvQtdC90ZY=","542y","IEhPSA==","c8Sxbg==","4Li04LiV4Lij","6LKh","IHDFmWlk","4LmA4Lir4LiZ","bMO9","6KiA6JGJ","4KST","4paN4paN4paN4paN4paN4paN4paN4paN","2KjYp9io","44O844OV","0LzQvtGA","6L+H56iL","IOODmw==","IEtpbmg=","7ZWc6rWt","IOyWtOuWpA==","INCy0LvQuNGP","IGZheWQ=","INi12YbYuQ==","IGFsxLFy","IGV0dGnEn2k=","zqzOug==","aW1pemlu","4Lix4Lia4Lic","INC30LXQvNC10LvRjA==","2YrZhNin2K8=","5rao","54+g","INij2Lo=","IHprdQ==","4oCeQQ==","4Liy4LiV4Lij","YXlp","44Op44K5","0LjQu9C+","IMSR4buN","Ls6V","65w=","IM68z4DOv8+BzrXOrw==","5bi2","IGFydMSxcg==","4Liy4LiN","5b+Y","dGFseWE=","IHBvemTEm2pp","IG5lcMWZ","IOa5","2KfZh9uM","IHNhdMSxbg==","IOuyjA==","2KzZiA==","5LiA55u0","7JWE7JqU","wqBQ","INib","INC/0LDQuw==","6KGo5oOF","IGNhbmzEsQ==","5oiQ5Li6","2YjZhtin","IOKArw==","4LiB4Liz4Lil","5Y2W","IM6xz4M=","0LjQvdC+0Lo=","0LDQvNC/","4Lil4Lit4LiH","2YLZgg==","INC/0YDQvtGF0L7QtA==","44KE44KL5aSr","z4fOtw==","6LKo","INmB2YrZhw==","2YrYsdmK","INCy0L3QtdGI","IGthcmFr","2KvZhA==","2YfZiNix24w=","2KfZiNix2b4=","IMSR4buP","amnFoXTEm27DrQ==","2KrYqNix","IOq3uOqygw==","IGfDvGw=","INC/0L7QutGD0L8=","bGlsaWs=","IHpkYQ==","5YmN44Gr","INmF2YfZhtiv","IM6Rzp0=","INqp24zZhNmI2YXYqtix","IHDFmWVo","0LDQu9C10LY=","IGtheW4=","6K6/","IOykkeq1rQ==","INGI0LjRgNC+0Lo=","INmF2LTYp9ix2qnYqg==","4oCC","IO2XpA==","IOygnO2SiA==","INi024zYsQ==","ZXNpbmRlbg==","0YDRltGH","6I+y","0YHQutC+0YA=","ZXRpaw==","4Liy4Lic","INi32KjbjA==","zrrOrQ==","IOyeiOyWtA==","IGRlaw==","0YDRltC5","5YaS","bsOtY2k=","rqQ=","INmF2LHYqtio","IHlhesSx","w7xzbMO8","7Jy864KY","ZWxlcmluZQ==","IHlvxJ91bg==","INCx0LDQug==","zpnOnw==","zqzOu8+F","57SZ","INGA0YPQutCw0LzQuA==","IMOnw7Z6w7xt","7KCV7J2E","IGfDvMOnbMO8","zrvPjA==","IGJlbGxp","w63FoWU=","IM+Mz4DPic+C","IG5hxaE=","IHDDoXI=","0YjRgg==","IOyGoQ==","4KWC4KSw4KSk","IM+Azr/Ou8+N","57Ch","6IKv","5rm+","IOS6iw==","IOCkrOCkuA==","IOustOujjA==","0LTQuNC90LA=","6Kqw","0LvQtdC2","IMO6xZlhZA==","INC+0YHQstGW0YLQuA==","INCy0ZbQtNGH","INC/0YDQuNC30L3QsNGH","55Sz6K+3","J3lh","5L+K","INmK2YjZhg==","INiz2Lk=","INCU0LDRgtCw","6KiA44GG","INit2KrbjA==","IEppxZnDrQ==","INCl0LDRgA==","6ZmI","4LmI4Liy4LiI4Liw","IHNheWVzaW5kZQ==","INGC0YDQtdCx0LA=","6rCA7KeA","IHllbWVr","6Kaa","4bq3bg==","44CA44CA44CA44CAIOOAgA==","IOS4nA==","INmI2Kc=","INmF2YjYsw==","INC60L7QvNCw0L3QtA==","IHNlw6dpbQ==","0YfQtdC90L3Rjw==","IHRvdGnFvg==","IHLhu61h","j2E=","2KLZhQ==","0YbRltC+0L0=","Ojo6Ojo6Ojo6Ojo6","0J3QkA==","xLF6YQ==","aGVuZA==","IOCkq+CksA==","4Lix4LiU4LiB4Liy4Lij","IEPDoWNo","INC/0L7RgtGW0Lw=","IOG8gA==","2KfZhNin","4buh","2LHbjNmF","5a6r","INiy2YXbjNmG","0YDQtdGB0YI=","0LHQsNGH","2YfYsdiz2Ko=","0L3QvtCz","77yM5aSn","IOuYkO2VnA==","IHrFr3N0","INCS0L7QvdCw","5aSH5Lu9","INin2YHYqg==","b2pl","0YHQutGW0LvRjNC60Lg=","IG5o4bq5","INC60LXRgNGW0LI=","4b+m","5pa55qGI","0LfQsNGG0LjRjw==","INCy0ZbQtNC/0L7QstGW0LTQvdC+","44Kk44K5","0LPQsNC7","INC+0LHRi9GH0L3Qvg==","2KfZiNix2b7ZiNuM2YbYqg==","5a6c","bG9zdGk=","6L+b5YWl","dXlvcmR1","67Kk7Yq4","5omL44KS","0J/QvtC0","INmF2K3Yr9mI2K8=","INii2YXYrw==","YXJha3Rlcg==","55qE5aSn","IHPEsWNhaw==","bGFudA==","IGThuqV1","INmG2qk=","6ICF44Gu","IGtlbmRpbmk=","INC/0LDRhtC4","IOq4sO2DgA==","INCy0LzQtdGB0YLQtQ==","0LLQsNC10YLRgdGP","IOuniQ==","IGNodsOtbGk=","2K7bjA==","2YTYuQ==","bsOta3k=","772kOg==","65Cc64uk","7KeV","INC60LLRltGC","qOyWtA==","bGnFvg==","IOu5hOuwgOq4gA==","IGto4buRaQ==","IOuwqeyGoQ==","ZWNoYW4=","INC30LDQutC+0L3QvtC00LDQsg==","INCw0LrRgg==","66y47KCc","IE7Dsw==","IOeCuQ==","aGxlZGVt","INGB0LLQvtGX0YU=","INix2YLZhQ==","5pu8","4KS/4KS14KSw","5Y6a","INCa0L7QtA==","4KSt4KSX","7J6Q64qU","4LiZ4Lih","0YPRgdCw","IGfDvG7DvA==","IMSRw61jaA==","IHRy4buv","5be7","6ZO26KGM","2K3Zhg==","6K6o","zrPPhw==","4b24","YWxhcsSxbmRh","IGthZg==","2YjYp9is","INC40YHQutC70Y7Rhw==","IG5oaeG7hQ==","4buNdA==","IOybuQ==","IOmdog==","44Gu44GM","INC80LDQu9C+","0ZbQu9GW","IGJpw6pu","bsOpbXU=","0L/RgNC40LzQtdGA","4pag4pag","IGthbXA=","INCy0LXRiQ==","xI1lbQ==","4KWB4KSn","5p+7","2KrZiNmG","5Y+q5pyJ","44Gv44GE","IOC4o+C4p+C4oQ==","44Ke","44GZ44KL44Go","5b6I5aSa","4LmI4LiV","IHN2xJt0YQ==","IOqwgOqyqQ==","2q/Zhw==","YW5kYcWf","44Oq44K5","z4nOvM6x","INiu2YjYqA==","57SF","0YfQuNGB","7KKM","INit2LbYsdiq","INCy0LjRgNGW0Yg=","2b7YsQ==","IHTDvWQ=","IGtvbnRybw==","0LTQtdC50YHRgtCy","44Gf44KB44Gr","7Ik=","0LzQuNC90LjRgdGC0YDQsA==","4oCv","5YmR","0L3QuNGG0ZY=","5aaH","INC70LjRiA==","44Gj44Gm44KL","0L3QsNGA0YPQtg==","0YnQuNGF","z4TOv866","b3bDoW5v","2KrYsdmE","0YDQtdC6","2LrYp9iq","IG9tZXo=","7JOw","IMOcbA==","772S","bMSxxJ/EsW7EsQ==","IHbGsOG7o3Q=","IGLEm8W+","w5xS","IOODvg==","IGRvxJ9hbA==","IGhhdMSxcg==","IHN2w71t","7KeA64+E","4LmA4Lig4LiX","IHZheQ==","IOaZgg==","4KWN4KS14KSq","IHBsbw==","6aKE6KeI","IMOnxLFrdMSx","INiv2YY=","bsOhbsOt","6reA","7ZiA","4Lie4Lia","bXXFn3R1cg==","5a6Y5pa5","IO2UhOuhnOq3uOueqA==","6YCf5bqm","bGVyZGly","0YfQtdGB0LrQvtCz0L4=","IMSwbnNhbg==","4pSD","IOCkh+CkpOCkqA==","0KHQodCg","INin2YXYsQ==","IGvDtnTDvA==","2YHYtA==","IGJvag==","INGG0ZbRlNGX","IHPDtnlsZW0=","0L3QuNGG0Ys=","44CC5aW5","4oCdLgo=","IG1pbGlvbg==","IHNvbnVuZGE=","0LfRgw==","4KWN4KSu4KSV","5Lq65Y+j","bsSbxb4=","INGB0LzQvtGC","INC60L7QvNC/0LvQtdC60YE=","INC30LDQstC40YHQuNC8","INC40LzQtdGO0YI=","IGzhuqFj","IGhhbmdp","65Sp","5Yqz","IHbEm2Np","0LXRgNC+0LI=","zrrPgc65","IGR1cnVtdQ==","INio2YjYp9iz2LfYqQ==","INij2KjZig==","IEHEn3VzdG9z","zrXPhw==","INC00LjRgtC4","0YTQuNC60LA=","IE7Eg20=","IOq4sOyIoA==","IGhsYXZuw60=","5L+D","IOCksuCkl+CkpA==","IE9icg==","LuC4og==","0LrQvtCy0L7QtA==","b3Bpcw==","IOODiQ==","INio2LTZg9mE","0L3QuNC10Lw=","IHTDqW3Em8WZ","INin2YTYrdix","INmE2KfYstmF","IG3DoWk=","aWxpxJ9p","67O8","IHnEsWs=","572y","0YDQsNCy0LA=","0YnQuNC9","44Gr5a++","57K+56We","4LmJ4Liq","IHRlbXNpbA==","w4Y=","7JWU","INC/0YDQsNCy0LjQu9GM0L3Qvg==","0YDQvtGO","27Hbs9u4","6Kme","2KfYodip","2YjYp9ix2Yc=","77yF","INCc0LjQug==","5oG2","5o+S","4KS+4KSq4KSo","INqp24zZgduM2Ko=","IFTDoGk=","IHRp4buDdQ==","b3ZhbG8=","552h","0YfQuNC7","INC70LjRgg==","zrvOtc+Fz4TOsQ==","INC+0LrQvtC9","Ojp8","0LLQsNC70LA=","INmF2LHaqdiy24w=","IGFsxLHFnw==","INC00L7Qu9C20L3Qvg==","5pmC5Luj","IHNlcnQ=","0LXRgtC+0Lw=","4Lix4LiZ4Lii","5YG3","IHbDrWM=","INGF0L7RgtGP","YWxhcsSxbsSx","bGVubWVzaQ==","44Oz44OQ","IOuqhw==","IOG7pnk=","INin2YTaqdiq2LE=","dnnFocWhw60=","6LKs","7KO87Iuc","w6HFmWU=","IHllcmU=","44Ki44Oz","INin2YTYs9i52YjYrw==","INii2LQ=","IGNow7NuZw==","IOi7","0LPQsNGU","IOOBgg==","56iz","zrTOtc+C","55uu55qE","IGNldmFw","0YHRgtC1","6aG/","4KSu4KSo","6aG+","INC60YDQtdC00LjRgg==","INmF2LPYqtmC","INC80LjRgg==","IHThu5Nu","INis2YQ=","xKlh","INin2YTYudmE2YU=","w6Frxa8=","IO2Vmeq1kA==","4LiX4Lit4LiH","4Lir4LiZ4LiU","INC70ZbRgtC10YDQsNGC0YM=","65Cg","zqzPgc+H","INmC2K/Ysdiq","4LiZ4Liy4LiH","IGFyYWM=","IGrDrWQ=","IHTDvHJsw7w=","7ZS9","ZXJzaXo=","0LXQvdC40Lw=","IHnDvHp5xLFs","IOOBhA==","IM6az4U=","IOaa","IHDFr2o=","IHThu5lp","IHRoacOqbg==","xLBT","IHRow7pj","5peB","7J6Q7J24","IMO2bMO8bQ==","2LHbjNmB","0YDQtdC2","2LXYp9mE","2LHZgdip","aeG6v3A=","0Y/RgtC40Y8=","IHBvdcW+aXQ=","w6F0dQ==","5Li65LuA5LmI","7IE=","IGtyw6F0","INm+2LHZiNqY2Yc=","IHJvemhvZG51dMOt","INGD0L3QuNCy0LXRgA==","0ZbQudC90L4=","IOWRqA==","IGtp4buDdQ==","55uu5YmN","5L+E","z4TOv865","0YTQtdGA0LXQvQ==","dcWfdHVy","IG7DrW0=","4oCM2K4=","IOG7p3k=","INGB0YLQsNGC0Lg=","0YfQtdGB0LrQuNC5","IGplc3RsaQ==","INm+2YY=","IG9iY2U=","INis2YfYp9mG24w=","0LXQtNCw0LPQvtCz","44Gn44Gu","IGJ14buZYw==","7Lm07KeA64W4","4LmH4LiE","IMSNdHZydA==","INC90LjQutCw","INCy0L/Qu9C40LI=","INC00LjRgA==","INGB0L7QsdGB0YLQstC10L0=","IOunjuydtA==","5r6z","0YDRg9Cx","5qOL","5aOw6Z+z","5LmD","2KrbjNis2Yc=","5bm8","b255YQ==","IFBsYW50YWU=","0KfRgtC+","5pCt","5L2c55So","7IWo","INC60YDRg9Cz","INmI2YHZig==","IO+8ng==","0YjQutC4","wqDQnA==","2KfYtNuM","IMWedWJhdA==","INi52LTYsQ==","bGlm","IHBvdcW+aXTDrQ==","7Yah","INCx0LvQvtC6","6IC2","4Li54Lij","IHbDvGM=","2LTZiNiv","0LjQvNCw","0L3QuNGG0LjQvw==","7J2065Oc","IOKAkA==","INC90LDQt9C90LDRhw==","IHN0cmFueQ==","5q6/","INin2YTYsdmI","57q4","5YiR","77yM5LuO","IOuptA==","INC/0YDQvtCy0LXQtNC10L3QvdGP","IGhhdmE=","IOyXhuyXiOuLpA==","5aKe5Yqg","2r4=","57y6","INi52KjYp9ix","IHThuq9j","IGluxZ9h","ZXJzZQ==","2LHZitio","IOG7lW4=","2KPYqQ==","IM+Azr/Ou865","IG3huq9j","0YHQvtC7","5rSe","LdCz0L4=","56iL5bqm","INCy0LjQutC+0YDQuNGB0YLQsNC90L3Rjw==","4oCe2Lg=","ZWxlcmluZGU=","IE5oxrBuZw==","c3TFmWVk","IGhhc3RhbMSxaw==","4LmJ4LmA4Lib","IGRlZmE=","INiy2Yo=","2KfYt9mC","INC/0YDQvtC5","INC+0LrRgNGD0LM=","zr3Ouc6x","bGFkdQ==","a29saQ==","IG/Enw==","INCy0LjRgdC+0Lo=","0Ic=","55uW","44KP44GR","44O844OB","5qGl","IMWha29seQ==","aXRvbQ==","INiq2K3YtQ==","YWxhcmE=","INC60LDQuw==","INC/0YDQuNGF0L7QtA==","IOmmlumhtQ==","wo0=","INuM2LnZhtuM","IHTDuXk=","gOuhnA==","66Ck6rOg","w6F6ZQ==","INC10Lo=","6IW5","IEZha2F0","0L/Qvg==","IMSR4buNYw==","5YiY","w6F6YWw=","0YLQvtC9","2q/ZiA==","5LiI","7Je8","INmE2YTYow==","IEXEn2Vy","5YWx5ZKM5Zu9","2LDYsQ==","IGRhxJ8=","6KGM5Lia","6rGw656Y6rCA","6LSf6LSj","Q8O0bmc=","INGE0LjQu9GM","INCw0YE=","IGNo4bqzbmc=","0L3QuNC80LDRgtGM","IGlmYWQ=","IOyF","54i1","IMWZZcWhZW7DrQ==","5Zu95Lqn","INC60LDQutC+0Lk=","IOCkruCkpw==","IFlhcg==","b2JyYXo=","IG9uZW1vYw==","IOKC","5Y6f5Zug","INmD2LHYrw==","INii2LLYp9iv","IGFkbMSx","IEhpem1ldA==","44O844OR","2YbYs9mK2Kk=","INCy0L3Rg9GC","IGTDoWxl","zpXOpQ==","INGD0YU=","INGA0LXQsg==","INC80LXRiA==","IGtvxZ91bA==","INin24zYsdin2YbbjA==","6Zi1","IOuPmeyViA==","4LmA4Lif","66CI67Ko","6Kit6KiI","cHJhaw==","cG/EjQ==","2KfYudiv2Kk=","IGFza2Vy","INmI24zamNmH","INCi0LXRgA==","bWFrdGE=","IMSNdHnFmQ==","wqDQoQ==","4oCM2qnZhtmG2K8=","77yM5Lim","INGA0L7RgdGW0Lk=","IHVudXQ=","6L+Z5LiA","b3Bhaw==","6ICQ","INC30LDQvNC10YI=","4LmM4Lil","2KjZhg==","IOuqsA==","IGluc2FubGFy","5Y+v5piv","5qKm","0LrQvtC0","6Jub","a2xhZG7DrQ==","0YDQvtCy0L7QtA==","INC80ZbRgdGC0LA=","5Ye65LqG","INC/0LDRgQ==","0L7QsdC+0LI=","2q/Yp9mH24w=","0LLQuNC9","4KWN4KSw4KSn","INC60L7QvNC/0L7QvQ==","INCw0YI=","IGFkZXQ=","IOODgQ==","INiw2KfYqg==","INit2Yg=","IHRyb2NodQ==","4LmB4Lir4LiZ","INC30LDQstC20LTQuA==","IFBhcnRpc2k=","IFNhdmHFnw==","IHPDrWQ=","INGB0L7QvQ==","2LHZitmB","IHpjZWxh","5Zi0","INGE0YPRgg==","aWxlcmVr","bWFsxLFkxLFy","IGThu7Fh","4LiX4Liz4LiH4Liy4LiZ","INmI2YTZg9mG","44Gq44KT44Gg","INqp2YXbjA==","IGzDqWthxZk=","z4HPjQ==","2KzZhdi5","xLFuxLF6xLE=","IEFuYWRvbHU=","44Gr44KI44Gj44Gm","IOq3uOufrOuCmA==","IO2MlA==","0ZbRgtGM","IMKm","5LiN6KaB","4LiW4Lih","INmK2K8=","IHDFmWVw","IOimgQ==","INC/0YDQvtC10LrRgg==","INGA0LXQs9C4","IGThuqF5","0LrQvtCy0L7Qs9C+","IMSxcw==","IEvEsQ==","INmB2YrZh9in","24zYp9iq","INGB0YLQsNC70LA=","5oqc","0YPRgNCw","INm+2KfbjNin2YY=","IGl0aWJhcmVu","0LDQvdGW0Zc=","INC+0YTQvtGA0Lw=","0LvQtdGH","zrXOvg==","5pS/562W","IOe9kQ==","5YKs","IOydtOufsA==","IGthcmRlxZ8=","0Y7RidC10LPQvg==","0LvQutC4","INin24zYp9mE2KfYqg==","2KrZh9in","INC/0L7QtNGF0L7QtA==","INit2YjZhA==","INGB0L7QstGA0LXQvA==","7Z2l","IOips+e0sA==","xLF5xLE=","INiq2YLZiA==","5q+U6L6D","IM6xzr3PhM65","IM6jzqQ=","amnFocWl","eW7EsQ==","IHBvY2jDoXpldA==","LdCa","INC30LDQstC0","2Y7Ysw==","57uT5p6E","2YXYp9ix","zr3Ov865","IM6gzrXPgc65","6Iej","IG5hY2jDoXrDrQ==","z4TPjg==","4KWN4KSv4KSk","dXl1","5pWX","ZWJp","IOuwlOuhnA==","INCz0YDQvQ==","INin2YTYp9iz","IG9yZ8Ohbg==","IGVkaW4=","5Z+D","4LmB4LiE","INit2K/ZiNiv","INC00YDRg9Cz0L7QuQ==","0L7RgdC60L7Qsg==","IFPhu58=","IHDFmWli","5L+d5oqk","2YXYqNix","IOODhg==","IGRveg==","b3B0ZXJh","4Li04Lil4Lib","2K/Yp9ix24w=","5oSf6KeJ","5Luj55CG","2YbYr9in","2KfZitin","2LXZhg==","IGNlbMOp","IOiplQ==","4LiH4LiZ","IGxlaA==","6I635b6X","44CA776J","IOyEoOyImA==","66W064qU","4KSG4KSw","5aeU5ZGY","5peg56CB","IOi3","IHphasOtbQ==","ZWNrw6k=","5rWc","INGD0L3RltCy0LXRgNGB0LjRgg==","INCx0Y7QtNC20LXRgg==","4KWHLg==","IHZzdHVw","INC+0Yk=","IOWciw==","5LiB55uu","INCy0LXQtNGM","IOunkOydhA==","IHRla25paw==","44CA772M","INC/0ZbQtNCy0LjRiQ==","INGB0LLRj9C30Lg=","INiq2LHYrNmF","wok=","IMSRw6J1","0ZbRh9C90L7Qs9C+","5bCR5bm0","ZWN0YQ==","4KS/4KSy4KSk","zrnOv8+C","44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA","dGVn","4buJbmg=","r78=","IG5lYnU=","2YrZitmG","0L7Rj9GC","6aSK","INin2YLYqti12KfYr9uM","4oCZbnVu","INCS0ZbQug==","IG5nxINu","64yA7ZWZ6rWQ","6Y8=","4KS84KSw","2KfYqNin2YY=","2Y7Zgw==","IGV0a2ls","IGNo4bqvbg==","IOuwnOyDnQ==","IHRhbWFtZW4=","INmF2K3bjNi3","w7xsw7w=","5YGl5bq3","INGA0LDRgdGC0LXQvdC40Y8=","z4DOv86v","IOi2hQ==","w6HEjQ==","IOyZuOu2gA==","INiu24zZhNuM","INiv2YjYs9iq","4LmA4LiC4LiV","IGthbGFu","66i8","YXbEmw==","66y47ZmU","INC00LjQsNCz","INmG2YjZitiz","7ZWR","4Lie4Liy4Liw","64uk6rCA","IG5p4buHbQ==","INiz2YjZhQ==","LdC8","4LiU4Liz4LmA4LiZ","4LmH4Lin","44CC44GT44Gu","56+J","V2lkdGhTcGFjZQ==","WmVyb1dpZHRoU3BhY2U=","2KfYptmF2Kk=","4LmE4LiL4LiV","5LiL6L295qyh5pWw","5Ly85LmO","INGC0LI=","IHrDoWtheg==","INis2K/Ypw==","IGdpZGVy","44O844OT","bsWv","IOunge2BrA==","IGTDvMWfw7xr","0YPQvdC+0Lo=","IHTDs2M=","INGC0YDRg9Cx","0L7QutGB","IHRy4bqjaQ==","IG1p4buFbg==","IHRoxrDhu5tj","IG5o4bqtdA==","wqBE","bWFzxLFuxLFu","6Lyq","IM6dzr8=","ZXLDpw==","IGRva29uY2U=","IEfDvHZlbg==","b3ZhbsOh","0LXQt9C0","0ZbQvdGM","6IGy","2KfZhNij","77yM5L2G5piv","INC/0L7Qu9C90L7RgdGC0YzRjg==","UG9ydMOhbHk=","INit2KfZgdi4","4KWC4KSV","0YDRg9C9","5Lq654mp","IGHDp8Sx","IHBvcnU=","ZXJpb2Q=","IEFtZXJpa2E=","54ef","INix2YjYrw==","INC60YDQvtCy0Lg=","2YjZgtiq","6Zi2","44O74pSB44O74pSB","2LHZitmF","5Y6G5Y+y","5Li4","INC30L3QvtCy0YM=","INGB0LLQvtC10LPQvg==","0LHRg9C00Yw=","INit2KzZhQ==","IM60z43Ovw==","7JWI64K0","IOOBpw==","4LmI4Liw","2ZHZjw==","57WQ5p6c","4oCZaQ==","4LmMLA==","5a2Y5LqO","IOCksOCkluCkqA==","INiz2LHZhdin24zZhw==","INCz0LvRg9Cx","INGA0LDQt9GW","0YfQvdGW","77yM5Y+I","Y8Sxc8Sx","5pyJ5YWz","44K744Oz","6JCo","IEdpw6Fv","INin2YTYq9in2YbZig==","INGA0LDQt9C+0Lw=","INGC0YDQvg==","IGHDp8Sxa2xhbQ==","5Yaz5a6a","4Lit4Lib","5ZSv","IMWfYXJr","IHNpc3RlbWk=","IHRvcHJhaw==","6ICD44GI","INC/0L7Qv9GD0LvRj9GA","IOuGjQ==","2KfZitmG","4KS44KSu","IMKA","IGVkZXJlaw==","IGdlYw==","7IKs7JeF","INGA0L7QutC4","INCx0LXRgNC10Lw=","INiu2KfZhtmI2KfYr9mH","IOi1tw==","INCn0YLQvg==","IG9ixJs=","0LjQvdGW","7J207JeI","IEluZGk=","INC00LjRgg==","44O25pyI","INC90LXQvNC90L7Qs9C+","IHrDoWtsYWTEmw==","4LmC4LiE","INGB0LDQvNC+0LPQvg==","INio2K3Yqw==","IOa2","0L7QstC2","INC+0LHRgNCw0Yk=","w5I=","4Lin4Lij4Lij","4KSC4KS2","INC+0YfQtdGA0LXQtA==","INmB2LHYsg==","64yA7ZWc","IHNpemlu","2LHZgdiq","0Y7RidC40Lw=","5ruR","YXZpcg==","INmI2LXZhA==","IHF1YXk=","INCz0LjQvw==","0YDQtdC90LjRjw==","4KWN4KS14KSk","zrnOvc+Jzr0=","4KSc4KS5","IGjGoWk=","IHBvdmHFvg==","INi52LHYqA==","0LzQtdC90YLQsA==","INC+0YHRgtCw0L0=","5LmL6Ze0","YWPDrWNo","INGB0LrQsNC30LDQu9Cw","7J20652864qU","INi02KfYrg==","IOuLueyLoA==","YXJsYXI=","INC80LvQvQ==","5Yas","LjouOi46","IM64zrU=","IGhlcmtlcw==","0LvRj9C0","2KfZhdin","IOutkA==","z4POuc68zr/PgM6/zrk=","IG9icmF6","2LrYp9mE","Qsaw4bubYw==","5bCK","7J6Q66W8","5oCS","zr/Phc+BzrM=","5byV44GN","IGtvbnVkYQ==","INin2YTYqtis","IGtyaXQ=","5b+N","IOyghOyEuOqwgA==","0LPQvtCy0L7RgA==","IGlzdGl5b3I=","0L7QutC4","INC+0LHQtdGB0L/QtdGH","IGF5csSxY2E=","4LmA4Lic","0LDRgNC+0LQ=","xLDFng==","INis2YXZh9mI2LHbjA==","INGB0LLQvtC40YU=","IHByb3bDoWQ=","INGA0LDQvA==","INmC2LY=","0LvQuNGC0LXQu9GM","44Kx44OD44OI","0L7RgdC+0YQ=","IOCksOCkueCkqA==","a292w70=","7LC4","zrPOus6x","zrvOv865","zrzPgM6/","IMSRYXU=","0L3QuNGO","IG1hbsW+ZWw=","IO2YvA==","INGC0LjRgQ==","44OG44Or","YWJpbGVjZWs=","0L3QuNC9","4LiB4Lij4Lij4Lih4LiB4Liy4Lij","6aCQ","IHBow6o=","amVkbg==","5Lqk5rWB","INCy0L3QuNC80LDQvdC40LU=","0L7QsdGA0LXRgg==","INC20LjQt9C90Yw=","0YDQuNGB0YLQuA==","4KWI4KSa","IHnDvHpkZW4=","IGdpeQ==","6ZqU","5Luy","IOiZ","IFBhcnRp","IOmWog==","4Lix4Lia4Liq","IG5lamxlcMWhw60=","2Y7ZiQ==","IOydtOygnA==","IGPhuq90","0YDQvtC30YPQvA==","IG5lanNvdQ==","bMOtZA==","zrjOvw==","4LmH4LiH","INGB0L/RgNC+0YE=","bWFtxLHFnw==","IOyqvQ==","2KfZgdmC","0YbRltC50L3QuNGF","IOmmmQ==","INmF24zZhNuM2YjZhg==","5aSi","INmB2YfYsdiz2Ko=","csO9","INC/0L7QstGW0LTQvtC8","ZWNlxJ9p","INC30LDQsdC10LfQv9C10YfQtdC90L3Rjw==","wpQ=","44GX44Gq44GE","5Z+656GA","INqG2YbbjNmG","INGA0L7Qt9GA0L7QsQ==","5LiA5Lqb","44Oz44Gu","INC/0YDQsNGG0ZbQsg==","5b6X5Yiw","IHThuqVu","5a2Y5qGj5aSH5Lu9","IO2ZiA==","IOC4lOC4suC4pw==","7Iux","0LvQuNC90LA=","INCy0L7RgdC/0LDQuw==","xJ9pbmRlbg==","0LDRgtC10LvQtdC5","csW+","INGE0YPQvQ==","INCQ0Ls=","INC/0L7Rh9GC0Lg=","0L7QstGW0LQ=","2KfYudio","4Liy4Liw4Lir","INCy0L7Qt9GA0LDRgdGC","4Li04LiH4Lir","INmB2YTYsw==","IMWhZXN0","4LiK4Liy4Lin","IOqzqA==","IG/EjQ==","44K444On","0LrQvtGB0YLQuA==","6ZuG5Zui","5rGH","IHDFmcOtbGnFoQ==","IOydkQ==","0LTQuNCy0Lg=","INC00L7QutGD0LzQtdC90YLQsA==","IENow6J1","IG3DoXU=","IGtow7Q=","w5U=","0YnQuNC5","IHPhurVu","INC60L7QvdGE","INC30YPRgdGC","5Zue562U","INC60L7RgNC40YHRgg==","IM+AzrXPgc6v","5Liw","IG3huqFjaA==","0LDQvdC6","5LiL5p2l","6LWE5paZ","4Lii4Lit4LiU","IM+AzrnOvw==","4LmJ4LiH4Liy4LiZ","IHVtw61zdA==","5r2u","56qB54S2","IGt1bHR1cg==","INin2YTYtdmB","YWxhcsSxbsSxbg==","IM6UzrfOvM6/","INCy0LjQutC+0L3QsNC90L3Rjw==","772/","INCx0LXQt9C+0L/QsNGB","INGB0LDRhQ==","IG5vaA==","4LmD4Lia","6YO95biC","xZ9hbQ==","0LHRg9GC","IOuqqOyKtQ==","INCy0LDQsw==","55CG6Kej","IGVrb25vbWlr","IGto4bqvYw==","IHN2YXQ=","0LvQuNGI0LrQvtC8","4Lix4LiH4LiI4Liy4LiB","aXp5b24=","6IO95aSf","zq/Ovc61zrk=","woo=","7KaM","INmH2KfbjNuM","IGtpxZ9pbGVy","INC60LvQtdGC","7ZiB","4KWD4KSm","acWh","65SU7Jik","2YrYsdin2YY=","INCd0YM=","4Lit4LiZ4LiX","INGB0L7Rhw==","IGlzdGV5ZW4=","IFNleg==","IOOCuw==","IEHDpw==","4oCM2YY=","INGC0L7Qvw==","INGC0LXRgNGA0LjRgtC+0YA=","YWPEsWzEsWs=","INC+0LTQvdGD","IHZlcmk=","INqp2K8=","INqv2YHYqtmH","IGNpbnNlbA==","0L7Qu9C+0LPQuNC4","IHDFmWVkbcSbdA==","4KSC4KSY","IOepug==","zrPOsQ==","J3ll","2KrYsdip","IGTFmcOt","IEjDoG4=","INix2LTYqtmH","IHZpZGVh","INC90L7Qsw==","5re7","6L+Y5pyJ","2YbYr9ix","IHllcmRl","IGtlbnQ=","4Lia4Liy4Lil","INC00LXRgdGP","5Lia5Yqh","INC+0LHRitC10Lo=","INCy0L3Rg9GC0YDRltGI","a29sYQ==","ZWJuw60=","4Li14Lil","ICwu","INC80ZbQttC90LDRgNC+0LQ=","44Gq44KT44Gm","IFPDtno=","IGNob2Q=","IHRyw7pj","7JqU7J28","IHBo4bqtbg==","0YHQutCw","INGF0LvQvtC/","0YHQutC40Lw=","IGthcGl0","65Ok7JeQ6rKM","IGLDoG8=","bMSxxJ/EsW4=","xLDFnw==","xI1uw61r","IE5nb8OgaQ==","INio24zYp9mG","IHByb8SN","INC/0YDQvtGC0Y/Qs9C+0Lw=","5YCJ","0LXRjg==","IM69zr8=","652864+E","7Leo","INCy0LjRj9Cy","INC/0L7QvdCw0LQ=","INC20L7QstGC","IOavlA==","IGRvYnk=","0LvQsNC8","0ZHQuw==","INGA0LDRhQ==","INCy0L7Qt9C90LjQutCw","0L3QuNGG0YLQstC+","5bGk","INC+0YLQu9C40Yc=","54KO","6aOv","IMW+aXZvdGE=","YXTDtnI=","IGNlbMO9","IGFkYXk=","2LHZitmD2Yo=","INio2LU=","bWV5ZW4=","7Jqw7Iqk","2KjZiNmE","INC+0LfQvdCw","6bq8","5pOa","IHprb3U=","64KY7JqU","IGtyeQ==","IG5lbW9o","IHZ5dcW+w60=","IOacqA==","INCw0LTQvNGW0L3RltGB0YLRgNCw","2KfZh9in","4LmD4LiB4Lil","77y/77y/77y/77y/77y/77y/77y/77y/","INCz0L7Rgg==","INiv24zar9ix24w=","INC70LXQutCw0YA=","6KeA","IO2YkQ==","IELDtnlsZQ==","aXN0cm92","5aWz5a2Q","INC/0L7Qv9C10YDQtdC0","INmG2YjZitiz2YbYr9mH","2ZLZhA==","INCf0LDQsg==","IMO2cm5law==","INC/0YDQuNC6","INGI0Lg=","w7xzbMO8bWFu","INmF2YLYp9io2YQ=","5Y2B5LqM","IGJla2w=","IHZlcmly","2YjYsA==","2LbYqQ==","0YDQvtGC0LjQsg==","5oyR","Li46","INiu2KfYsdis2YrYqQ==","YWTEsWs=","INCf0L7Rhw==","INGF0YPQtNC+0LY=","5a6i5oi3","zrzOv869","ZWt0aXY=","IHR2w6E=","27Lbsg==","IGzhu41j","INC+0L3Qvg==","0YbQuNGC","INCS0YE=","IOWi","5rWq","0LDRgNGW","IHPDvHJla2xp","IHN0cmE=","IGJpemU=","IHRlc3BpdA==","IGNow6J1","INin2YTYtg==","4LmJ4Lit4LiH4LiB","IOiAhQ==","IEjhuw==","INC60LDQttC00YvQuQ==","0LDRjg==","4LiZ4LiE4Lij","4LiX4Liw","INmF2LHYp9is2Lk=","IGhhbGluZQ==","zrTOv8+C","ZcSfaQ==","INmF24zYstin2YY=","INmH2YQ=","IGJvbGVzdA==","IOWcnw==","IHV6bWFu","0YDQvtCz","56K66KqN","INGA0ZbQt9C90LjRhQ==","INC30LDQutGA0Ys=","0LvRg9Cz0Lg=","INGB0L7QstC10YI=","aWRkaQ==","5ZCI44KP44Gb","IOWQiQ==","IGtp4buHbQ==","67K9","INmF2LnZhdmI2YQ=","INC+0L/RgNC10LTQtdC70Y8=","IG1pa3Rhcg==","IOyekOuPmQ==","IGlsYcOn","0LvQvtGH","IHnEsWzEsQ==","IMSQ4buD","IGFieWNo","IHJla2xhbQ==","IHZ5cGFk","INC90LDRg9GH","4LmA4LiE4Lij4Liy4Liw4Lir","IOS7lg==","cG92xJs=","77yM6K6p","56Wd","2KfZiNmG2K8=","IDp8Og==","IHJlxb4=","IHZ5YmF2","7Jyk","5q20","0L7Qs9GA0LDRhNC40Y8=","ZXpwZcSN","wrFu","0L7QstGD","INC00YPQvNCw","IGplZG5vZHU=","0L7RidC4","INmF2LTYqtix","6Kaz","IHlva3R1cg==","IG9ixI1hbg==","IFRy4bqnbg==","xLFtc8Sxeg==","zrHOuc69","wow=","2LHbjNin2YY=","IEplaG8=","INin2YTYotmG","0YHRjNC60LjQvA==","IGtkeWJ5","IGJhxZ/EsW5h","IHByZXppZGVudA==","IFZp4buHYw==","5YW8","4KWM4KSc","IOunpOunpOqwgA==","5qih5byP","bsOtbXU=","IOWC","IGRlbml6","mOibmw==","IOiAjA==","2YjYrQ==","0YvQvw==","IOKWvA==","bnVs","IFNldg==","IHJ1aA==","IGjhuqE=","INGP0L0=","IOq4sOuzuA==","IHZlbGlr","IFTDom4=","0LjQu9C40YHRjA==","INGF0YDQsA==","5YK3","IOCkhuCkjw==","IG55bsOt","wrvYjA==","INi02Lk=","5p2C","INC80YvRiA==","44GZ44GQ","IOqzteyngA==","IHThu5lj","44O844OH","INGB0LXQu9C+","INin2LnZhNin2YU=","IMWfaW1kaQ==","INin2YTZhdmK2YTYp9iv","INin2YbZgtmE2KfYqA==","INi02K7YtdmK2Kk=","IEvDvHI=","INCy0ZbRgg==","INin2YbYr9in2LLZhw==","INC80L7RiQ==","dGVybmV0","IM6xz4XPhM6u","INGA0L7Qt9GC0LA=","INCy0LjQsg==","bGVq","IOihqA==","z4PPg8+Mz4TOtQ==","INmK2LPYqg==","INC80LDRiA==","5Z2a","INC60L7QvNC90LDRgg==","4Liy4Lir4Lil","IOeZvA==","INin2YjZhNuM2YY=","6L+Q5Yqo","INC/0YPQvdC60YI=","INC+0YHQvtCx0LXQvdC90L4=","INC80LDQvA==","57up","77+j77+j77+j77+j77+j77+j77+j77+j","0LDQu9GM0L3Ri9C8","INCm0LXQvdGC","LdCc","57eS","IOCkueCknA==","0L7RgtGL","44Kk44OJ","2K/Yp9ix2Kk=","44Go44GX44Gf","4Lix4Lie4Lii","IG90w6F6","INC00L7Qv9C+0LzQvtCz0L7Rjg==","4LmB4Lil4Liw4LiB4Liy4Lij","INGC0YDQsNC90YHQv9C+0YDRgg==","INmC2LHYotmG","IOesrOS4gA==","INC80LjQuw==","IG5nw7Rp","IGxpbmg=","IE5ow6Ju","0YzQvtCz0L7QtNC90ZY=","5oCA","4LmJ4Liy4Liq","Ljo6Ljo6","IGJpcmV5","5oCd44GE","4LmD4LiU","0LLQtdGA0LQ=","IGxpc3RvcGFkdQ==","IOC5geC4oQ==","0LPQtQ==","INC60YPRhQ==","IO2ZnOuPmQ==","IOiO","INCQ0LvRjA==","7ZqM7J2Y","IM+Az4HOsQ==","IHZ1aQ==","4Lin4Lij","4KSC4KS1","IGdlY2U=","56u2","IGt1dg==","0LzQtdGJ","INGC0LXQv9C10YDRjA==","4Lit4LmA4Lih","5Yi25bqm","INGC0YDQtdGC","INmG2KrbjNis2Yc=","5LuY44GN","IO++ng==","INGH0L7Qs9C+","4oCQLQ==","IMWZw61rw6E=","4LiH4LmD4LiZ","IG7Em2tvbGlrYQ==","IGJ1bmE=","77yM5a2Y5LqO","4Lil4Liz","44CB44Go","IG7hu5lw","INin2YTYrNmG","IM6gzrHOvQ==","0J7QoA==","INiv2K7Yqtix","IMO6ZGFqZQ==","IOW8oA==","cmV0aW0=","c8SxbsSxeg==","INmH2YbYp9mD","0JvQrA==","5pWs","zpHOnA==","6aG16Z2i5a2Y5qGj5aSH5Lu9","7IKs6rCA","IHRyZXN0","dmnEjQ==","INm+24zYr9in","zrbOtQ==","INCf0L7Qsg==","2YTZhdin2Ko=","b3JleA==","6Kyb","INCy0ZbQtNC60YDQuNGC","0LzQsNGF","INGH0LjRgdC70LU=","2KrYqNin2LE=","IM6tzro=","7JWE7YyM7Yq4","cmF2ZWw=","zrHPg86vzrE=","YcSN","IOCkj+CkqA==","4Lil4Liw4LmA4Lit","INC30LDQu9C10LY=","IOaB","INC80L7QttC10YLQtQ==","INC/0L7QstC10LQ=","INio2LPbjNin2LHbjA==","IHBvxI1ldA==","2LHYqNi5","ZWxleg==","2KfZiNix24w=","IGJhxZ9r","5bCC","IGhhbGRl","5ouf","U2F1","0L7RhtC4","4Li14LiE","INCy0LvQsNC00Lg=","2ZDZhQ==","a3Vk","4KWC4KSs","5aeU5ZOh","4Liy4Lij4LiT","b3LFrw==","INmF2YjZhA==","IGJ5dA==","IHDFmcOtc2x1xaE=","6Iux6K+t","6YCQ","IHZlbGvDqQ==","IOCkhuCktg==","IHBoaeG6v3U=","4LmD4Liq","INin2LPZvg==","IHpib8W+w60=","44GT44KT44Gq","INmI2YfZig==","INGD0YfQsNGB0YLRjA==","4LiI4Liz4LiZ4Lin4LiZ","INiq2LHaqQ==","5Y2B5YiG","zp/OoA==","zrrOv867","IGZha2F0","IGNo4buX","6YCa55+l","INCy0L7QtNGD","IM6azrHPhM63zrPOv8+Bzq/OsQ==","YWNhxJ/EsW7EsQ==","0LvQvtCz0L4=","IG3DvMWfdGVy","IGplZG5vdQ==","INCx0LDRgA==","aWRhZQ==","ZMSxbQ==","6L6y","5ZC5","65Cp64uI64uk","IMWfZWtsaW5kZQ==","ZW7DvW0=","65Ov","aXTEmw==","INC60L7Qu9GM","64yA7ZWZ","IMOWcg==","IOq9","IFVCTkQ=","IGhpaw==","44KJ44GX44GE","5Ye65ZOB","Q8Oz","IM6e","IOWFpQ==","IE5ndXnDqm4=","INm+2YjYtA==","0LvRj9GU","INii2LrYp9iy","IG5oaeG7hW0=","ZGl2aWQ=","55g=","2KfZgdiq2Yc=","0LDQvNC10YI=","0L3Rg9C70YHRjw==","5LyB5qWt","0YDQvtCx0ZbRgg==","ZMO8xJ/DvA==","INqp2KfZhg==","4Lit4LiH4LiX","0LnQvQ==","IHBvaHli","IGJp4buHbg==","IO+8mw==","2YXZhtiv","IOCkhuCklQ==","IMSNbG92xJtr","44KS6KaL44KL","67ew","INGD0LLQtdC70LjRhw==","IOq0","IHlhbmzEscWf","6bqm","IOWklumDqA==","z4TOv8+Fz4HOsw==","INC/0YDQvtGH","INGA0YPQutC+0LLQvtC0","55uk","6JyY6Jub","5a6J6KOF","INCj0LrRgNCw","IHRhcnTEscWf","0YLQsNC2","IG9sdcWfYW4=","IFJ1c3lh","INC60LvRg9Cx","IM6gzqE=","YWzEsWTEsXI=","a8Sxbg==","INC30LzRltC90Lg=","bGXFn2lr","0LXRgNC/","0L7QsdGJ0LU=","IHF14bqtbg==","IOCkquCktg==","44KS5Y+X","4LmA4Lil4LiC","2KfYttix","IHXFvml2YXRlbA==","zrvOr86x","INCS0L7QvdC4","4Li44LiU4LiX","IFbDoA==","44Oz44K/","KeuKlA==","5rib","IM68z4A=","5ben","INGI0LrQvtC7","IOyymOydjA==","4Lix4LiB4LiU","5q6K","IG5o4bud","IM6/z4DOv86vzrE=","4LmB4LiZ4Lin","0LzQtdGA0LjQutCw0L0=","bsOta2E=","IO2YuO2FlA==","2LPYqNio","4LiH4Lih","7J6I64qU","2LrYtw==","2Y/ZhA==","ueaenA==","0YfRltCy","0YjQsNGP","INil2YTYpw==","2K7YtdmI2LU=","bGxsbA==","INGN0YLQuNC8","IHp2w60=","IHF1w6Fu","4LiZ4LiB","INC/0L7Qu9C+0LI=","IOa3sQ==","IG1p4buBbg==","5Lq66ZaT","INC30LjQvA==","IG1leWRhbmE=","0LXRhA==","IGLhu4Fu","2LLZitiv","INCg0LXRgdC/","zpnOo86k","IOaUtg==","cmF5YQ==","INiq2YjYp9mG2K8=","IGlzdGVy","IOuwgA==","INC80LXRhdCw0L3QuA==","IOC4leC4sw==","INC00LXQutCw","4KSC4KSX4KSy","44O844Kr44O8","IG5lcMWZw60=","INGB0YfQuNGC","IM6/zrzOrA==","IMOnaWZ0","2KjbjNmG24w=","bWVsZXJp","INCy0L7Qt9C00LXQudGB0YLQsg==","ZG91","7IOB7J2E","INCS0L7Qu9C+0LQ=","zrXOsg==","0J3QmA==","0Y/Qug==","z43PhM61","0LfQsNC90L4=","bGVuaXI=","Y2VsaWs=","INGB0L7RgdGC0LDQstC70Y/QtdGC","zrnOsc+C","INCT0L7RgA==","5LmL5LiA","z4POvM+Mz4I=","44Gr6Zai","INCy0Yc=","INC/0L7RgdC6","6Lyv","4KWA4KS2","INii2KvYp9ix","4LiE4Lin4Liy4Lih4Lij","INC10LTQuNC9","7YWQ","5bmz5oiQ","IGtpxZ9pbmlu","44Ky44O844Og","4KWN4KSk4KS1","IGthcHNhbcSxbmRh","IGFrdGFy","IHRy4bur","INix2LTYrw==","INC90LDQutCw0Lc=","2LHZitmE","4Lit4LiE","INqv2LDYtNiq2Yc=","IOawkQ==","INGC0LXQsdGP","c3Bvcg==","0Y7RidCw0Y8=","0L7QutGA0LXQvNCw","0LLQsNC0","IENow7puZw==","INiy24zYp9iv24w=","0LXQvdC+0LPQvg==","INqp2LPbjA==","w54=","IGFkxLFuYQ==","0YPQtNCw","0ZbRlA==","0LDRgtC10LvQuA==","IG7DoXbFoXTEmw==","55So5LqO","INm+2LHZiNmG2K/Zhw==","INmG2KjZiNiv","2LPYp9iq","7JeY","44Gj44Gm44KC","IOeJqQ==","0JjQtw==","5Yi3","IO2ctA==","INC+0YHQvtCx0LvQuNCy","44GX44G+44Gj44Gf","YXlkxLE=","5Ye655qE","IOyVhOuLiOudvA==","xLFzxLFuxLE=","4LiX4Liy4LiH4LiB4Liy4Lij","IHp2dWt5","IOeuoQ==","4paL4paL","INGC0LXQu9C10YQ=","INC90LXQu9GM0LfRjw==","44Or44Gu","z4PPgA==","IOez","5aCh","0YbRg9C3","2LHZitmC2Kk=","4KS/4KSb4KSy","6LKp","INCj0LrRgNCw0ZfQvQ==","INmF2LPYptmI2YQ=","INC+0YfRlg==","5pyA5b6M","INC30L3QsNGO","4LmJ4LiZ4LiX","INGC0LXRgNCw0L8=","INGB0L/QvtC6","INiu2YjYr9ix2Yg=","6Zi7","IGTDvHpleQ==","5LiA5YCL","2KfZgdmH","4KSC4KSv","6LWE5Lqn","57un57ut","INGB0LvQsNCx","5oSP5oCd","IO2ZmOyCsA==","INGP0YA=","IGTFr3ZvZA==","552b","2KrbjNio","INmI24zYsQ==","INmH2LLbjNmG2Yc=","IGJlbnplcg==","INmF2KfYr9mH","4KWM4KSV","IOC5gOC4lQ==","44KI44GP","0LjQtNC10L3Rgg==","6Iux6Kqe","0LXRgNGL","IOq4iOyVoQ==","IOODvA==","IOuNpO2UhA==","0YDQsNGC0Yw=","IOWNlQ==","4LmA4LiJ4Lie4Liy4Liw","IOaUvw==","IOCkhuCkrg==","INC30L3QuA==","IOudvOydtA==","5o6M","55CG55Sx","INin2Lo=","INGB0LjQsw==","INC10YTQtdC60YLQuNCy","INCf0YDQtdC0","44O044Kj","INCy0LjQutC+","IHR2cmQ=","64K06riw","44OL44Ki","INmF2LTYp9mH2K/Zhw==","IOCkuOCkmg==","bMO8xJ8=","6K+B5Yi4","IHNpw6p1","INC+0YLQsg==","IHZ5dHZvxZk=","INit2YXZhA==","INGE0YDQsNC9","4LmJ4LiU","5Yy76Zmi","INCy0LvQsNC0","2LrZhA==","5bu656uL","b3Nsb3Zlbg==","0LjQu9Cw0YHRjA==","2LnZhNmI2YXYp9iq","INiq2LHbjNmG","zq3Pgc61zrk=","IGLhuq10","INmF2LTaqQ==","INix2KbZitiz","IOygnOyekQ==","zrPOtw==","INC90ZbQug==","IOq1rOyEsQ==","IMSRZW4=","IOCkmuCksA==","IGdlw6dtacWf","5LqG6Kej","INC70LXRgQ==","IHF1YW5o","44CM5oiR","IE7Em2t0ZXLDoQ==","656N","w4XFuA==","4KSC4KSm4KSw","7JWE7J20","5bCR44GX","INi02YfYsduM","zrrPhM63","IOKXhA==","INmD2LM=","6LeM","w48=","5bel5YW3","5YqD","cG9t","INC90LDQstGH0LDQvdC90Y8=","INix2Kw=","0YDRg9C10YLRgdGP","IM69zq0=","24zZhtqp","4LmC4LiL","5Yuk","44GX44G+44GG","INGB0L7Qs9C70LDRgQ==","6YeR6J6N","57u/","INCh0LDQvQ==","5pW1","INC/0L7QstGW0YI=","INC/0L7QvNC+0YnQuA==","44Oh44Oq44Kr","44K344Ki","IM+Az4HOv8+C","6Iiq56m6","INCy0LDRgNC40LDQvdGC","IHlhbG7EsXpjYQ==","57O757Wx","INmB2YjYsQ==","0L7Rh9C90L7QuQ==","4LmA4Lin4Lit4Lij","INC60YPQu9GM0YLRg9GA","z4fOuQ==","xI3DrXRh","wpM=","5Lq644GM","zrrOv8+N","INGA0LXRlA==","INCy0YHRjg==","6bqX","INiy2YbYp9mG","54uC","IOC4q+C4oQ==","IHjDumM=","5YWS","xJ9pbmlu","5Zac5qyi","INGB0YLQsNC0","aXllc2k=","7Jqx","6J0=","IGt1cw==","z4TOv867","0LPRltCy","0ZbQu9C4","44GE44KE","6amX","b250cm9s","2KfZhNmD","0LrQvtCy0LjRhQ==","INGB0YLQsNC70L4=","IM6gzrHPgc6x","IGNoeQ==","IGNpaGF6","h7Q=","7J6l7J20","YWNlYWU=","2LTZh9ix","0LjQu9Cw0L3QvdGP","55qE5bCP","IHRo4bul","2YjZhtiq","0LvQvtGA","44KS5oyB","IM6Uzrk=","IOecnw==","0JvQng==","6b2Q","546E","2KfZiNmH","INC40L3Rgg==","4KWA4KSf4KSw","INC+0LHRidC1","INC00LXQv9GD0YI=","zrzOrc69zrXPgg==","INmD2YrZgQ==","2LnZhdmE","77yM5aaC5p6c","INC40L3RhNC10Lo=","aXRlbGU=","IOOAgOOAgCDjgIA=","44Kk44Oz44OI","0LvRltGC","INGB0Y4=","IHphc2U=","ZGVjaA==","0LXQutC+","6K6T","5Y+s","0LfQtdC8","zqDOkQ==","IHZ6ZHU=","4Liy4LiI4Liy4LiB","a29saXY=","emt1bQ==","6IGK","IOyxhOyaqQ==","4LmN","IGFzcA==","27LbtA==","7J24642w","IGthcsWfxLFsYcWf","77yM5Y+v5Lul","IOCkh+CkqOCklQ==","IOyKpO2DgA==","6YOo5bGL","5Yi25L2c","44O844K344On44Oz","zr/Ovc+EzrHPgg==","zrPOvw==","IOyekeyEsQ==","6JGj","b3rFmWVqbcSb","INGA0LXQt9GD0LvRjNGC0LDRgtC1","IEluc2VjdGE=","IHNrb24=","b3R1","IHDEm3Q=","0YHRjNC+0LPQvg==","IMSwc2xhbQ==","IGzhu4U=","5Lit5ZyL","INCc0ZbQvdGW0YHRgg==","5ZCI5ZCM","YXN5b251","0L7QttC10YI=","6Ieq5Yqo","0YHRjNC60L7Rjg==","IGtpxZ9pc2Vs","z4TOuc66zr/PjQ==","INGD0YfQsNGB","xLFsbcSxxZ90xLFy","INGP0LrQtQ==","0YnQuNC90Ys=","0LzQsNGA","IHNvdWR1","wqDQrw==","INC00YDRgw==","44Gh44KH","4KWL4KWc","776R","IM+Ez4w=","INi22LE=","bMOhxaE=","INC00ZbQsg==","INis2K/Zitiv","INC90LXQsdC+0LvRjNGI","6YGt","57uN","IEt1cnVsdQ==","0YHRgtGA0YPQvNC10L3Rgg==","6L+Z5piv","7JmU64uk","0LzQtdC70Yw=","IOS8ig==","4bunbmc=","INC30LDQstC40YHQuNC80L7RgdGC0Lg=","642k7ZSE","54eD","6L+H5Y67","INC30LDRgdGC0L7RgdGD0LLQsNC90L3Rjw==","INiv2KfYrtmE24w=","0YnRkQ==","IMKgIMKgIMKgIMKgIMKgIMKg","77qu","INin2YTZhdmF2YTZg9ip","c8SxbmRh","6LOA","5bGP","IOq/","IGRva3Rvcg==","INmC2KfYqA==","IFNpc3Q=","INC80LXRgdGC0LU=","INGB0L7RhdGA0LA=","2KfYtNiq2Yc=","IOacnw==","INC/0L7RgdC60L7Qu9GM0LrRgw==","IHBldg==","2Kfar9ix","2YXYsg==","INi22YXZhg==","4KWp","Z2VzaQ==","YcSfYQ==","6Kej5Yaz","64W47Lac","IGx1eeG7h24=","INC60L7QvdGC0LDQug==","4Li6","IE5nw6B5","IHbDvXN0YXY=","IHRodXnhur90","2KfbjNi5","IDovOg==","IHBo4bqhdA==","IM6Rz4DPjA==","IG11eg==","IOyDiQ==","IMOHaW4=","INqp2KfYsdio2LHYrw==","2KfYptiv","2KjYp9iv","4KWN4KSk4KSu","IOuRmA==","INC80L7Qtw==","xaHDrWNo","IOC4oeC4qw==","INii2LM=","INGB0LvQuNGI0LrQvtC8","6IOh","6KOB","5oi7","IOyEpOuqhQ==","IG90b20=","IOCksuCkl+CkreCklw==","4LiH4LiB","2KfYqNiv","4LiZ4Liy4Lih","6IKp","INi02K/Zhtiv","44Gd44Gu5LuW","YWRsbw==","xJtu","INmE2YfYpw==","INC80LjQvdC40Lw=","IGTFmWV2","IFRoacOqbg==","656Z","ZW5naW4=","4KWA4KSu4KSk","INGD0L/QvtGC0YDQtdCx","4oCM2KrYsQ==","IOelnumprA==","b3bDoW7DrW0=","INC00LXQu9C+","IOe8lg==","INin2YTYuA==","INCy0LjQuQ==","0LDRgtC+0Lw=","5YWs5ZGK","IMSRZW0=","44K344Oq44O844K6","5LiL55qE","bGFzxLE=","INCy0YvQsdC+0YA=","0YLQvtGC","64+E67OE","INGD0YHRgtCw0L0=","IO2eiA==","0LvRg9Cw0YLQsA==","IHRow6Fj","0LDQvdC40LXQvA==","0L7QstCw0YLRjNGB0Y8=","0YLRlA==","0K3RgtC+","77yM6KaB","IFZ6","INit2YjYstmH","LdC6","VuG7m2k=","ZW50xa8=","IGJ1bHVuZHXEn3U=","2LHZiNi3","INGX0Lk=","IMOnZXZy","IMWZZWQ=","INiz2KfYrtiq2Yc=","5Yqe5rOV","INmC2YQ=","acWfaQ==","77yd77yd","2LPYp9iz","IMO6ZGFqxa8=","5aw=","5o2f","w6FjdA==","IM6Rz4A=","54i3","IMWZw6Fk","IGzhu5dp","b250ZW50","INmF2LA=","b2xvamk=","INm+2LHYr9in2K7Yqg==","4LmJ4Liy4Lie","INC00LXQudGB0YLQstC40Y8=","IG1ub8W+c3R2w60=","7JWI66eI","5YG2","IMOUbmc=","IGRha2lrYQ==","aGVuZGlz","IGLDoWM=","5a+2","4LmH4LiB4Lir4LiN","bm9jZW7DrQ==","IEVyZG/En2Fu","Ojo6Ojo6Ojo6Ojo6Og==","0LDRgtC10Lw=","ZMSxeg==","INij2YrYttin","INGN0YTRhNC10Lo=","44KM44Gm44GE44KL","IGJhxZ92dXJ1","zqzOvc61zrk=","IM+EzrXOu861z4XPhM6x","IOqygOyDiQ==","INqp2YbYqtix2YQ=","IOCktuCklQ==","5by5","IG9sbXXFn3R1cg==","INCy0YHRgtGD0L8=","0YfQuNC70LA=","4Lii4Liy","INij2K3Zhdiv","b3NsYXY=","INGH0LDRgdC+0LI=","IHrDoWtsYWRuw60=","IOCkuOCktQ==","0LTQvtC9","IMWZw61qbmE=","zrrOv8+F","6YCB5paZ54Sh5paZ","z4POr86xz4I=","44K044Oq","INCy0LjQsQ==","5b2S","INC90LDQt9Cw0LQ=","IOeZvuW6puaUtuW9lQ==","4buG","IGthbGTEsQ==","7Lyc","IO2PrQ==","INGH0LjQvdC+0Lw=","6Lk=","0Y/Quw==","INGA0LDQt9C00LXQuw==","ZEc=","IFRlbnRv","0Y/RgtGM0YHRjw==","6Z2i55qE","IM6Vz4DOuQ==","6rCR","IGvDqG0=","0L3QuNGG0Y8=","55ar","6ZuZ","INmF2LHZg9iy","INC90LDRg9C6","5aKX","INGC0LXQv9C10YA=","4KS+4KSg","4LmH4Lia4LmE4LiL4LiV","zrzOss+Bzq/Ov8+F","INGE0ZbQvdCw0L3RgdC+0LI=","0ZbRlNGO","z4HOr862","7KSE","INio2KfZhtqp","dHVs","bGnEn2luaQ==","INC/0L7Qt9Cy0L7Qu9GP0LXRgg==","z4POrw==","IOybgw==","4LmM4LiE","IHBvbG92","7J6l7J2E","aXN0w6k=","INCh0KHQodCg","w6FobA==","6KU=","INC60L7QvNC/0LvQtdC6","4LiC4LiZ4Liy4LiU","4Lix4Lio","zr3Osc69","IOelnumprOaUtuW9lQ==","7Iuc7Jik","IOmmlumhteesrA==","IOeZvuW6pua1gemHjw==","5ZGo5pS25b2V","IGhhdHRh","0JLRltC0","INCy0YvRgdGC0YPQvw==","2qnYp9ix24w=","S2hp","IOywvuyVhA==","IG7hurduZw==","6Yar","IFbFoQ==","INC/0LXRgNC10L0=","0LvQsNCy0LA=","2YrZhdmK","IHZhdGFuZGHFnw==","IM65z4PPhM6/","IOC4kw==","4KS44KSy","0LPQtdC9","INio2YjYsQ==","4oCM2K/Zh9iv","bMSxa2zEsQ==","IHN0cmF0ZQ==","2KjZiNix","44CB44Ki","IHNvbnVj","INC90LDQuNCx0L7Qu9C10LU=","LdCy","INCy0L7QtNC+0Lk=","b2plbsOt","INi62LHYqA==","IGJlcmk=","YWTEmw==","IGRvdm9s","4oCM2qnZhtmG2K/ar9in2YY=","44GV44KJ","44Oz44K6","44Kr44Or","b21ldHI=","5YeA","INmB2YjZhA==","INmF2YjYs9uM","INin2YTZhdi62LHYqA==","ZWNrbw==","2YDZgNmA2YDZgNmA2YDZgA==","6rCA6rKp","0YDRg9GC","IOu2gOu2hA==","IHDFmWVkcGlz","IG9wcmF2ZHU=","0LXRgtC40Yc=","4LmC4LiE4Lij4LiH4LiB4Liy4Lij","5oWn","5ouc","2LPZgw==","7J6h64u0","4Lib4Lij4Liw4Lih4Liy4LiT","6LSo6YeP","INCz0L7Qu9C+0LLRgw==","0LvQtdC90LjRjg==","IOCkqOCkjw==","IHByb2pla3R1","2KfZgdix","YXRpdm7DrQ==","zq3Ovc+E","44OJ44Op","IHRlZGF2","6rw=","4Lib4Lij4Liw4LiB4Liy4Lio","IHR1dG8=","IGNoaeG6v3U=","IHZ5eg==","0YDQvtGI","5Y+W5b6X","INC80LjRgdGC","INGB0LvRg9GH0LDRj9GF","INi62LA=","INGD0LrQu9Cw0LQ=","INGD0YHRgtCw0L3QvtCy0LvQtdC9","IHRlc2xpbQ==","IOOAjQ==","IOij","5q+r","6YqA6KGM","ZWN0cw==","a2VtaXo=","zr3Ot8+C","6L66","INC/0YDQtdC8","IHNvbnVjdQ==","UG9rdWQ=","INCe0YHQvtCx","6L6b","6Ly4","67O06rOg","4Lia4LiE","44CC44CN","4KS+4KWkCgo=","INGB0LDQvNC+0YHRgtC+0Y/RgtC10LvRjA==","2YTbjNiq","zrvOtc66","INGA0LDQudC+0L3QsA==","0YzQuA==","4LmI4Liy4LiX","IOC4m+C4o+C4sOC5gOC4l+C4qA==","4Lih4Lit","2KfZh9ix","INCy0LjQsdC+0YA=","0Y7Rh9C40YHRjA==","IHBvdm9s","YWJhc2U=","4oCzTg==","2qnZiA==","INCj0LrRgNCw0ZfQvdCw","c3Rhbm92","INGD0YfQsNGB0YLQuA==","IGhsYWQ=","INGA0LDRgdGB0LrQsNC3","44G/44Gf44GE","4b2w","IOWbng==","IMawxqFuZw==","zrHPgc6s","2K7YqA==","5o2V","w63FmQ==","INiz24zZhg==","wqBpbg==","IE3Em3N0","5pWZ5a2m","INC+0YHQvtCx0LjRgdGC","dWpp","55S75YOP","INiv2KfZhti02YbYp9mF2Yc=","7J207JW8","INC30LDQv9C40YI=","INGB0LLQvtC40LzQuA==","27LbsNuy","77yM5bCG","44O844Gu","IHRow60=","INmF2KrZiNiz2Lc=","4KWHCg==","5aSa5bCR","77yM54S25ZCO","7ZeI","IOCkreCklw==","IOWPtw==","IHRlb3I=","5YKo","INGA0ZbRhw==","INGB0YLQsNGC0YLRlg==","INix2KfYqNi32Yc=","IO+8nA==","2KjYp9it","4Li04LiZ4LiX4Liy4LiH","4KWH4KSCCg==","2KfYptmC","INin2YTYrNiv2YrYrw==","bGnEjQ==","2KfYrdmE","bcOpbsSb","IGLhuqd1","INCS0LDQuw==","INCx0LvQsNCz0L7QtA==","0LXRgtC10LvRjA==","5bmz5Z2H","0LzQuNC9","IHPDvHJlYw==","INC30LDQstC+0LQ=","6I2Q","0YLQuNC5","0LvQvtCx","INCy0L7Qug==","bGFkxLHEn8Sx","2KfZitmK","6rKg7Iq164uI64uk","IGFtYWPEsXlsYQ==","77yM5Zug5Li6","44Gn44GC44Gj44Gf","INi02LHZiNi5","5p+U","J251bg==","0L7QutC+0Ls=","IGNpZGRp","IGLhu6U=","IHlhcMSxbGFjYWs=","INGH0YPQstGB0YLQsg==","7IKs7J2Y","4Lit4LiZ4LiU","zpfOpA==","IOuLpOyWkQ==","64uk66m0","aW1pemk=","5LmC","44Gy44Go","IOmdng==","4oCM2b7Yr9uM2Kc=","5LmY","44OK44Or","INC/0ZbQtNC/0YDQuNGU0LzRgdGC0LLQsA==","4LmR","6L+d","INmF2YbZhw==","0YDQuNC6","0LDRgNGW0LI=","INC60L7Qs9C+","INmC2LU=","IOadpQ==","IFBow7JuZw==","INC+0LLQvg==","INC/0LXRgNC10LLQsA==","6aOy","4KSC4KSf4KSw","2YrYsdin","aWxkacSfaQ==","ZXRpbg==","z4fOtc6vzrE=","IHphaHJhbmk=","2YjYrNiv","IOev","4Liy4Lij4Lii","INC30LDQutC+","INiq2YLYsw==","44K544K/44O8","5p2w","IOOCsA==","IOm7hA==","INCa0L7Qs9C00LA=","4KWr","IOasoQ==","INCy0YvRgNCw0LY=","IGNoxINt","0LvRj9GU0YLRjNGB0Y8=","2K/Zh9mF","IHZyY2g=","57qM","0L/QvtGA","IG1hxJ8=","5b6S5q2p","cG9kb2I=","4Liw4LmB","6YG45omL","5biv","IHNlYm91","aW5pemU=","INCc0LDQug==","IOaZrg==","IM+Fz4DOrM+Bz4c=","IMSQw6A=","IEJybm8=","IMWhw60=","2KfZhNi1","IG5naGnDqm0=","IG9ubGFyxLE=","IHXFvsOt","6Ieq5YiG44Gu","INC90LDRhdC+0LTQuNGC0YHRjw==","IGpzaQ==","IOCkuOCkruCksA==","IM+Gz4k=","27Hbudu4","IOCknOCkl+CkuQ==","6a2a","7J246rCA","xJBp4buBdQ==","INij2LnZhNin2YU=","4KWH4KSC4KWkCg==","5b2i5oiQ","IGlrdA==","IHpkcm9q","IEFtZXJpaw==","zqHOkw==","4LiH4Liq","IO2SgA==","0YHQvtC70Y7Rgg==","2YjZitiq","IGfDtnLDvG50w7w=","0LDQvdC90YvRhQ==","INij2YI=","INC80LjRgA==","5auM","IG3hu5Fp","IGRlcmlu","6ZKI","INC80LDRiNC4","7Lih","INis2YbZiNio","INGB0LvQvg==","44CC5LiA","0LXQvdC40Y/RhQ==","INGH0L7Qu9C+0LLRltC6","IHlhbmE=","INC+0LrRgg==","INC90LXRgA==","5oi2","0L3RjNC+0LzRgw==","INGW0LzQtdC9","44KP44Gf44GX","IM6TzrnOsQ==","44CB56eB","IGtvdQ==","INGG0LXRgNC6","bGF5YXJhaw==","44CH","2KfZhNiz","wqBU","INC00YDRg9C2","INC00LLQvtGA","zrvOrw==","IOuGgA==","IHRlcGxvdA==","2YHYp9iq","0LHRlg==","IGfDvHZlbmxpaw==","bsSbbg==","6Kmp","IGluc2FubGFyxLFu","IOyEpOy5mA==","6JOd","YXZhdGVs","amV2","INqG2LHYpw==","IGdlcmVraXlvcg==","44OD44Kw","IMOHb2s=","INmI2KzZhw==","INGD0LvQuA==","wpE=","5ZGA","INC+0YDQs9Cw0L3QuNC30LDRhtC40Lg=","INGW0YHQvdGD","IG5lYnVkZQ==","IOuwpA==","5LiK44GM","IOCkp+CkqA==","INix2YjYp9io2Lc=","zrPOs861zrs=","INC00L7RgdGP0LM=","INin2YTZgtiv2YU=","INC30L3QsNGF0L7QtA==","IMSNw61zbG8=","xZ9r","INin2YTYr9mK2YY=","IGfDvG5sw7xr","2YPZitmK2YE=","zq3Pgc6x","4LiV4Lij4Lin","INC90LDQu9C40YfQuA==","2KfZhduM2YY=","IM68zrnOug==","IGTDtm5lbWRl","4LmI4LiX","5oOR","4KWL4KSCLA==","0YfRjw==","44G+44KL","INin2YTYqtmG","0YDQsNCz","65Ok6rO8","rZQ=","INmF2YbZh9in","IFRo4bq/","6ZC1","IO++hA==","INin2YTYpdiz2YTYp9mF","44Km44K5","2YrYr9mK","IOW+lw==","INC30LDRgNCw0Lc=","44K444Ol","INiq2LnYrw==","acOt","IMOnb2N1","b3ppY2k=","IOuylA==","INii2YXYr9mH","0YTQuNC6","INC/0L7RgdGC0LDQvdC+0LI=","IGtyw6Fsb3Y=","wqjCqA==","IOykkeyalA==","IEdXZWk=","IHbDvXZvag==","IGJveXV0","IG5law==","2KfZh9in24w=","IHN0cmFuxJs=","0LjQtdC8","INC/0L7RgNCw0LY=","4KWN4KSw4KSm4KSw","6aGU44KS","IFnDvHo=","INC+0LfQvdCw0YfQsA==","4LmB4Lil4LiZ4LiU","INio2YfYsdmH","0LXQvdGC0YM=","INCd0LDQtA==","INCf0L7Qu9GM","44OX44Oq","4b+2","4oCM2b7Yr9uM2KfbjA==","INm+2KfZiNix2b7ZiNuM2YbYqg==","4Li04LiB4Liy","IM61zr3Pjg==","INiz2KfbjNix","6YG6","44CB5LuK","IEzDqg==","5LqL5oOF","IFllcg==","6IWw","INin2YTYsdiz2YU=","INin2YTZhdmI2YLYuQ==","IGjDoG0=","INC00YDQtdCy","w6F0ZWw=","INCy0YHRkQ==","7Jil","IE1lYw==","44Kb","INi12KfYrw==","INqv2LHYr9iv","IGtyw6Fz","6IyD5Zu0","YWxhcsSxbmE=","6Jma","INii2YjYsdiv","57yT","4Li04Lie","IOODiw==","IOaApw==","INmF2YbYsA==","57e0","IOq2gQ==","0LLQsNC10Lw=","IM62z4k=","IG5hdnI=","z4PPhM6xz4POtw==","INix2KM=","IGRvcGw=","77y/77y/77y/","55Sa6Iez","xI1lbA==","5oSP5ZGz","56Wt","w5g=","0YHRgtCy0LXQvdC90YvQtQ==","6KOh","IOOAiQ==","IOOAgCDjgIAg44CAIOOAgCDjgIAg44CAIOOAgA==","INCy0LDQuw==","IOG6qW0=","IGRpeW9y","4Lit4LiH4LiI4Liy4LiB","IFBow7M=","INCT0LU=","INCy0LXRgNC10YE=","IGtvbno=","2LHYsg==","INGB0L7QsdC+0Y4=","IM61zrrOtc6v","7JiB7Ja0","aWFn","INGB0LXQvdGC","IG7huqV1","IGpha8Op","IHJvemg=","INCx0L7Qsw==","2YbYp9iv","INin2YXZiNix","4LmM4LiB4Liy4Lij","IFlhxZ8=","6Yg=","5ZWq","IG9uYXk=","7JeH","b211","0YbRltC50L3QvtCz0L4=","INGB0LDQuw==","IM6jz4XOvQ==","IHNhdnVu","5aaZ","4LiI4Liw4Lih","44K544Kv","IGRvc3k=","nJg=","66i5","IG1pbnVs","44CLCg==","5YGP","INCa0LDRgg==","IGVkaWxtZXNp","0YbRltGU0L0=","7ISx7J20","5ZaU","INCy0ZbRgA==","6K+R","4KS+4KSH4KSh","INmI2YLYqtuM","xJDhu4M=","IHZ5xaHFocOt","xI1pbGE=","0LDQtNGD","54m55Yil","IOyduOq4sA==","dWrDrWPDrWNo","IFBvZGxl","IHlhdmHFnw==","meaxnw==","IGtheWI=","5Yqq","57S5","INC+0LHRgNCw0LHQvtGC","INC80LDRjw==","IOWPig==","5o6l5Y+X","2YbYqtuM","IM+Hz44=","0YLRgNC+","IHV5YXI=","INi52YXZhNqp2LHYrw==","INC+0YbQtdC9","INC80LXRgdGC0LA=","4LiV4Lil4Liy4LiU","2YXZgg==","aWxkcmVu","INC30LDQstC40YHQuNGC","wqAgwqA=","IG1vxb5uw6E=","5pit5ZKM","xLFya2Vu","0LrQuNC9","5Z2C","z4TPg865","INGH0YPQtA==","0JrQvtC9","aXNsYXY=","INCa0YDQsNGB","TmVq","wqBi","cm9m","IGlsZXJp","INCe0YA=","IENo4buJ","IG7DvGZ1cw==","INGW0L3Rgg==","IeKAnA==","IOCkqOCksA==","5Li75LmJ","INiq2YbYuA==","xa92b2R1","INCz0L7RgNC+0LTQsA==","IGt1cmFs","IGplZGlu","0YDQsNGC0LXQsw==","5YC6","IHpwxa9zb2JlbQ==","7J247J2Y","INmG2Kg=","IE5nYQ==","INCd0LDQuQ==","INin2YHYstin2LE=","0L3Rg9Cy0YHRjw==","INC00LLQvtGF","IHJvenA=","zrXOr86/z4U=","IM6/zrnOus6/","IEdlw6c=","wpc=","IGNoaeG6v20=","INGA0LDRgdC/0YDQvtGB0YLRgNCw0L0=","IGjGsMahbmc=","6Ieq5YuV","INmF2YjZgdmC","5oyl","77yB4oCdCgo=","z4HOv8+Gzr/PgQ==","6I+M","44O044Kh","5qyn576O","INGC0LXQv9C70L4=","44GC44GC","44Km44Oz","IMWfZXlp","IHPDvHQ=","44G544Gm","44Oz44OR","zrzOrc69z4nOvQ==","IGdlbmVsbGlrbGU=","INiv2LHZhdin2YY=","2ao=","IGFrxLFs","INCc0Ys=","IGV0bWnFnw==","xaFsYQ==","INCy0L7Qt9C80L7QttC90L7RgdGC0Yw=","IGfDvG5jZWw=","IG7DoXJv","5b2i5byP","IM6xz4DOv8+EzrU=","INC80ZbRgdGG0Y8=","INix2LY=","5LiN55+l6YGT","cmF2YQ==","IM6azqw=","4Li04LiZ4LiX4Lij","INC70LjRgdGC0Yw=","6Iac","44Gr44Gq44KK","IOadvg==","5a6P","INC80LjRgQ==","w6F0bsOt","IHnEsWxsxLFr","IE1lcmtlemk=","IGnDp2VyaQ==","xZnDrcW+","IHDFmWU=","z4fPgc65","IOWNgw==","IHNycA==","4LmC4LiX4Lij","IEtyw6Fs","Ls6j","w6F2YWw=","bMOpZA==","IM67zrE=","4Li14Lii4Lin4LiB","44GP44Gq","IHbFoWljaG5p","INC/0YDQtdC00L7RgdGC0LDQsg==","7L8=","IOq1rOq4gOyDgeychA==","IOCkieCkquCksuCkrA==","0LLQvtC3","IOuFhOuPhOuzhA==","772kXw==","4Lia4Lij4Lij","INGB0LLRltGC0YM=","INGA0YPQsdC70LXQuQ==","bGVubWU=","bMOtxI0=","z4TOtc65","IOWPpA==","IE9icsOhemt5","IOyYge2WpQ==","INCz0YDQsNC20LTQsNC9","7YK5","IHNhaGlwdGly","INC/0L7Rh9Cw0YLQutGD","INij2YrYtg==","INGC0L7RgNCz0L7Qsg==","IGdlbGVjZWs=","IOusuO2ZlA==","aWtsZXJp","INC90LXQvtCx0YXRltC00L3Qvg==","IOS6kQ==","b3ZvbA==","IOCkpuCksg==","IOyViuqzoA==","INC80LM=","IHpqaXN0","YW5sxLE=","4Lix4LiH4LiZ","0YDQsNGF0L7Qsg==","zrnOvc63","INC/0LvQvtGC","IG5pdGVs","7Iqk7YGs","IFNvbnJh","INGB0LHQvtGA","IM+Dzr/PhQ==","IG9sbWFt","IGFuYWxpeg==","4LmM4Lin","IG3hu7k=","Y2VhZQ==","INC00LXQvQ==","0LLQtdGA0LbQtA==","4bqi","44GT44Go44KC","7IKs7ZWt","6KiA44Gj44Gf","IOy5tOyngOuFuA==","0YDQuNGC0Lg=","IGNoY2U=","IMOnZXZpcg==","24zbjNmG","5Lya6K6u","4Lix4Lih4Lie","IOWE","INm+2K/YsQ==","5byP5Lya56S+","INGG0LXQvQ==","4Li04LiW","IGppbmFr","INCx0LvRjg==","0LjRhtC40L0=","2ZLZhw==","2qnZiNix","IOyVhQ==","ZWtzaXlvbg==","INGB0LLQtdGA","INC+0LHRgNCw0LfQvtCy0LDQvdC40Y8=","IOODmQ==","5pyJ5Lq6","IGJpbGdpbGVyaQ==","IGjhuqd1","0LXRgNGW0LM=","IHZhxaFl","IG5lZGly","5LiN5b6X","IGJhxZ9hcsSxbMSx","IGtheWJldA==","5am3","INCd0LDQsg==","IOq0gO2VnA==","0YHRgtGO","5a6e6ZmF","a2xhZHk=","0LTQsNGC0Yw=","cmHDpw==","IGt1dnZldA==","4LiB4Liy4Lij4LiX","5Zo=","INGA0LXQvw==","IOC4nQ==","IERpxJ9lcg==","7ZSE7Yq4","IG5lanbEm3TFocOt","IOyggeyaqQ==","IG9uZW1vY27Em27DrQ==","0LDQutCw","0KDQsNC3","INmB2KXZhg==","44K144Kk44K6","IHZsw6Fk","IHJhZHk=","44CB44GT44KM","0YHRgtCy0LjQtQ==","bMSxxJ9h","5a2U","IMOhbw==","4Lit4Liy4LiB4Liy4Lio","IOCkj+Ckrg==","zrTOsc+C","INCw0L/RgA==","5o6b","IOeriw==","4paP4paP","INCh0Lw=","IG5lbcOh","IOii","zr3Ov868zrE=","INmB2LHZiNiv","IMO8bGtl","IOaYnw==","4Lix4LiZ4LiB","44GV44KT44Gu","ZcWfaWw=","xJ9peg==","INCR0L7RgA==","IHThuqdt","zrXOuc+Ezr/Phc+BzrM=","IM6zz4HOsQ==","4KWN4KS34KSV","IHbhurs=","IGtlbmRpc2luZQ==","IOyVjOqzoA==","IOq1reygnA==","IG7Em2tkbw==","INuM2Yc=","INqp2KfYsdio2LE=","44OZ44Or","77u0","IHR1ecOqbg==","IMOnYXQ=","4oCQ4oCQ","wo8=","IOyCrOyXhQ==","6YaS","5o+Q6auY","5reh","IMSf","6Jam","44CL77yI","5qGD","7JeE","IOaelw==","xII=","IMSMZWNo","zrHOuc6/","INi32LHZitmC","INC30LDQstC10YDRiA==","2KrZiNio2LE=","INit2Kw=","IM6tz4fOv8+Fzr0=","wr/Dgg==","IGTEm3TDrQ==","IGnDp2luZQ==","IENow7ph","0LDQvdC90YvQuQ==","INmI24zamA==","IG5hc3Rhdg==","xLFzxLFuYQ==","INGX0Lw=","0L/QvtC9","0LXQvdGP","INmI2Lg=","2q/ZhA==","4Lir4Lil4Lin4LiH","IHphc3Rhdg==","0LDQutC+0L0=","wqDCoMKgwqDCoMKgwqDCoMKgwqDCoMKgwqDCoMKgwqDCoMKgwqDCoMKgwqDCoMKgwqDCoMKgwqDCoMKgwqDCoA==","IEvEsXI=","57W2","INC+0YDQs9Cw0L3RltC30LDRhtGW0Zc=","44Gf44KK","2LDZig==","IOCksOCklQ==","YW1waXlvbg==","IOa4hQ==","55y8552b","IOyViuydgA==","6bm/","IOW/gw==","INC/0YDQtdC60YDQsNGB","INGB0LXQs9C+0LTQvdGP","IOCkuOCksg==","IM+Fz4DPjA==","INCV0LPQvg==","INCb0Lg=","44Ko44Or","INC70Y7Rgg==","6aWw","IHZ6ZMOhbA==","wq/Dgg==","INC90LDRj9Cy","INiq2LTaqduM2YQ=","INiz2YjbjA==","IHTDoWk=","IGthcMSx","IHN2xJt0xJs=","zrTPjM69","5ryi","7I2o","IGJhxZ92dXI=","0YDQuNC90LA=","IGtlbGlt","0LDRgtC+0Lo=","IM66zqzOuM61","IFnDvGtzZWs=","4LmH4LiZ4Lic","6aCC","5ZCM5pmC","xZ90xLFy","4Lin4LiH4Lio","b3R5","INin2LHYrw==","IOyekOyLoOydmA==","INGP0L3QstCw","w7x5b3JkdQ==","5p2o","IOKAkwo=","77yM5a6D","0LXQudC9","INC/0LXRgNC10YI=","IGRlxJ9pxZ9pa2xpaw==","INC+0LPRgNCw0L3QuNGH","7ISc7Jq4","IGdlbGl5b3I=","INm+2LDbjNix","5ZOy","ZXlpbg==","IOuPiA==","IHVuaXZlcno=","IGhuZWQ=","IHThuq1u","dm/FmcOt","IG5pw6pu","ZMSbcG9kb2I=","7IKs7ZqM","44GM44GC44KK","INGB0ZbRhw==","Jyci","IHRvcGxhbnTEsQ==","INGB0YfQtdGC","5YeG5aSH","0LDQvdGW0Y8=","IHplbA==","dmFsYQ==","INCw0L/Qvw==","INin2YTZhdmE2YM=","IGhvxZ8=","INCT0LXQvQ==","0YLQsNCx","IMSMZXNrbw==","INC80LDQudC20LU=","IG3Em3N0bw==","eW9uZWw=","6rGw66as","IOyYqOudvOyduA==","57Sv","IGRlcmVj","INC+0LrRgNGD0LY=","IHlhYmFuY8Sx","IO2EsA==","IOi1hA==","zpnOms6X","INC/0Ys=","IHbEm24=","0LjQvdC60Lg=","4bulcA==","5py65qKw","IOyVjOugpA==","64WV","IM67z4zOsw==","ZXlu","IOuQmOyXiOuLpA==","5rGh","IHZlZGxl","INmD2KrYqA==","66eo","INmF2YLYp9mI","5bm044Gr","4KS+4KSH4KSV","INGB0YLQvtGB","IM+Dz4TOv8+Fz4I=","0LzQtdGC0Yw=","IGVzYXM=","65CY6rOg","IGt2xJt0bmE=","IOmc","ZMO8aw==","5Z+3","6KqM","IG1sdXY=","INC/0YDQuNC90Y8=","IHBvdMOp","INqp2YbZhQ==","INC/0YDQtdC00LvQvtC2","INCc0L7RgdC60LLQsA==","77yM5aaC","IHN2w6lt","INin2YXZhg==","4Liq4Liy4Lii","INGD0LzQtdC90Yw=","IOOBk+OBrg==","5YmC","INGB0LXRgNGM","IG3hu4c=","IOS5nQ==","INC30LDQutGW0L0=","INCy0LXQu9C40Yc=","INC60L7QvdGC0YDQsA==","IFNvc3lhbA==","IHl1a2FyxLE=","INiv2YjYqA==","5L6n","INC30LDQvNC10L0=","77uu","IHNvYsSb","INCi0LDQutC20LU=","0I4=","zrXOtA==","2YXYp9ix24w=","zr7OuQ==","7Lmt","INC/0LvQsNGB0YLQuA==","z4POv8+Fzr0=","6JyY6Jub6K+N","2YjbjNiy24w=","IG5hcMWZ","INGC0LjQv9Cw","4KWC4KSb","IMWfYWg=","0LvRj9GC0Lg=","2KjbjNix","4Lij4Liw4Lii4Liw","INCx0L7Qu9GM0YjQuNC9","z4TOt8+EzrE=","IO2PieqwgA==","IHByb2pldg==","w7Jp","INC60L3Rjw==","z4bOtc+B","0LXRgNGD","0Y3QvQ==","INi52YXZhNuM","4KSg4KSo","44Oz44Kv","IOyVhOuemA==","zog=","INio2KfYs9iq","INiq2YM=","YcSNbsOt","INC70ZbQutGD0LLQsNC90L3Rjw==","4LiE4LmC4LiZ","IOiDvQ==","zrjOu863","bGVubWnFnw==","IGzhu5k=","IHNpbGFo","IEF1c3Ry","2K3Zgg==","LioqKi4qKio=","7Kk=","IGfDoA==","INio2KfYstio24zZhtuM","IMSRw6Bu","w61reQ==","IM6Vzr0=","2LbZhQ==","5aeT","INmG2YjbjNiz","IHNrdXBpbnk=","INiz24zYrw==","IGFsZMSxxJ/EsQ==","bWVsaQ==","0LLQuNC2","7LmY64qU","0L7QstCw0YU=","IOap","2LTZhtin2LPbjA==","IG5pbWk=","INCT0YDQuA==","7ZeM","INC60LI=","6Z+T","IO2bhOq4sA==","IHN0xZnDrQ==","INC60ZbQu9GM0LrRltGB0YLRjA==","IEJha2FubMSxxJ/EsQ==","INC80LXQvdGM0YjQtQ==","2KfZiNuM","INin2LHZiNm+","IOiJsg==","INqp2Yjahtqp","IEF5bsSx","IOS6hg==","INiz2YHYsQ==","INGC0LXQsNGC","IHbEm2Q=","0LDRgNC+0LI=","INC+0LHQvNC10LY=","IOyViuyVmA==","6L+95Yqg","6aCI","ZMSbbGVuw60=","IGtpbXM=","IOiPsg==","INCz0YDRg9C9","INii2YTZhdin2YY=","INCw0LLQsw==","INGJ0L7RgdGM","IOW+tw==","INCd0LDRhtGW0L7QvdCw0LvRjA==","5oiQ56uL","4Li54LiZ4Lii","44O844Or44OJ","6Zuy","IFThu5U=","Y8SxbMSxaw==","IEFsbWFueWE=","IG92xaFlbQ==","wos=","IM+Hz4HOt8+DzrnOvM6/z4DOv865","IMO2cmfDvHQ=","4KS/4KS44KS4","6Jed","IEdp4bqjaQ==","IHN2b2I=","IHLFr3puw71jaA==","IHNtbG91dnk=","0YDQtdGB0YE=","4Li14LmA4LiU","INin2YXYsdmI2LI=","44KF","5Z2m","4LmJ4LiE","INC60LDQtg==","5byX","0YfQvdC+0Zc=","5ZyI","INii2YfZhtqv","66qw","IOa6","IOiE","5LiA5q2l","0L7Rh9C60LA=","IHByb3N0b3I=","IG5n4bqvbg==","IOe3","0L3QsNGA","IOCknOCktQ==","INC90LDRh9Cw0LvRjA==","INC90LXQtNC10Ls=","INGB0LjRgdGC0LXQvNGD","2KzZig==","2KfYr9in2Ko=","IOai","INis2KfZhdi52Kk=","IOS7jg==","IOCkheCkqw==","6JaE","INio2KfZgg==","2KjZiti5","44GV44KM44Gm","IMOHYWzEscWf","2K7ZiNin2LPYqg==","44OD44K344Ol","INit2LPbjNmG","INC+0LHQvdCw0YDRg9C2","0LLRltC00L7QvA==","IGjDtG0=","0LvQsNC90LQ=","IOCkteCknOCkuQ==","2LPZitmG","5qCP","IG5hdsOtYw==","44K144Kk44OI","INGP0LrQvtC80YM=","IO2b","IFlhbmk=","44KT44Gn44GZ","INCz0YDRg9C/","xI1uw70=","0YbQuNC6","2YjZitix","IFjDow==","IGZ5eg==","IO+9iQ==","4oCM2KrYsduM2YY=","4KSf4KSV","0YTQvtGA0LzQuA==","IE95dW4=","5aC05omA","2K3Yqw==","IOyVjOyVhA==","0YDQsNCy0LjQu9GM","77yM4oCd","Ym9ydQ==","IEt1bGxhbg==","IEtheW5haw==","IOqwlg==","57SU","77yM5q+P","zpfOoQ==","IHDFr2w=","INCz0L7RgdGC","2LHZiNmF","77yM5Y2z","27Lbsw==","INmG2K7Ys9iq","INqp2LPYqA==","IOC5gOC4mg==","IHlhemFy","amVrdA==","4LmC4Lil4Lii","INC00L7QsdGA0LU=","INm+2LLYtNqp24w=","INiq2YfbjNmH","576O5ZyL","0L3QvtGB0Y/Rgg==","66CI7Iqk","5Zev","IHLDoG5n","IM6Vzr4=","0LDRgtCw0YA=","a292YQ==","IMWfZXlsZXI=","2K7Yp9i1","IOyViOyghA==","0YnQtdC5","IOuwnQ==","4oCM2KrZiNin2YbYrw==","44GI44Gw","IHbhu68=","INGB0LDQvNCw","INC+0LHQvtGA0YPQtA==","4oCM2KjYp9i02K8=","4LmM4Lit","IGRldGF5","5oKy","wog=","44Km44Kj","INC/0YDQsNCy0LjQu9Cw","a3LDqXQ=","4LmM4Lij","5Yy5","IOWFjQ==","INGB0LjQu9GM0L3Qvg==","INC40YHRgtC+0Yc=","IHNhxJ9sYXI=","IOatpg==","7ZaI7Iq164uI64uk","S2jDtG5n","4LmI4Liy4LiH4LmG","27DbsNuw","INix2YI=","4oCZ0Y/Rgg==","5Zuy","4LmB4LiU4LiH","IMW+w6FkbsOp","Y291eg==","w4s=","INC/0ZbQtNCz0L7RgtC+0LI=","IOuMgO2VmQ==","IGTDvG55YW7EsW4=","6ICB5biI","6IGM5Lia","IHllcmk=","4KWL4KSV4KSw","INio2YfYqtix","64uI7JWE","7J2M7J2E","IOaMhw==","44CN77yI","INGB0L7QvtGC0LLQtdGC0YHRgtCy0LjQuA==","5oqT","4LmC4LiX","IHRy4buTbmc=","INC/0YDQsNGG0ZY=","IOuGkw==","4KSH4KSo","IOygleunkA==","44CV","IGPhuq1u","5Zad","IOqzhOyGjQ==","IOS4jg==","5aWP","INi52KfZhNmF","IHZ5c3bEm3Q=","INC00L7RgNC+0LM=","INC90LXRgNCy","INCx0LXRgg==","INC/0YDQuNGC","0L7QstGL0Lk=","5beh","2YHYp9i5","0JrQmA==","4LiV4Lij4Lin4LiI","INCc0LDQuQ==","64+E66Gc","IHpsYXQ=","IHNhxJ9sYW0=","z4HOsc69","4LiK4Lij","5bm044Gu","4LiE4Lij4Lit4LiH","woU=","IGhvw6E=","INC00L7QstC+0LvRjNC90L4=","IG9sbWF6","IHBvZG3DrW5reQ==","INGF0L7Qt9GP0Lk=","5pm0","0YDQvtCy0LA=","IGzGsOG7o2M=","4KS+4KSo4KSo","INC60LDQv9C40YI=","INqY2KfZhg==","5pyJ5Lqb","INC/0L7QstC10YDRhdC90L7RgdGC0Lg=","INGG0ZbQvQ==","w7x5bGU=","IGphenk=","IFBow7o=","IOCkuOCkqA==","5Ye65ZSu","wqDQtA==","IOOCrw==","55Sx5LqO","4KWN4KSq4KSk","INin2YTYrtin2YU=","INin2LXZhNin2K0=","INiq24w=","IHRhdG8=","5bm5","5rO9","4Lit4LiB4LiI4Liy4LiB","0YPQu9GO","INCy0YHQvw==","bWVrdGU=","4KWA4KSr","INqY2YjYpg==","IGzhu4duaA==","4oCM2qnYsdiv","7Y+s7Lig","YW5raQ==","IOuTseuhneuMgO2WiQ==","IOOCnQ==","INin2LHYsti0","IHRow7o=","IOG6pW4=","6KGM5Li6","INGB0L3QvtCy0LA=","6r64","IHNvdWhsYXM=","INCy0L7Qt9Cy","z4HOrc+AzrXOuQ==","INC90ZbRh9C+0LPQvg==","0L3QvtC2","0YLQuNC6","44Gp44GT","INC+0YHQvdC+0LLQtQ==","44Kl","4Lib4Lij4Liw4LiI4Liz","IOC4l+C4reC4hw==","IGVrc2lr","INmE2KU=","44GL44Gu","IOOBqg==","LeCkqg==","z4HOtc65","IOyghOusuA==","4Liy4LiB4Lil","zrLOtQ==","7Yq567OE","7ZWY66m07ISc","4LiE4LmC4LiZ4LmC4Lil4Lii","IOWlvQ==","IHlhecSxbQ==","66eM64Ko","INC60LjRgdC70L7Rgg==","INGN0L3QtdGA0LM=","55a+","INiv2LQ=","IHNvcnVtbA==","INC30LDQutC70LDQtA==","4LiK4Lit4Lia","INmB2LHZh9mG2q/bjA==","IOCkj+Cksg==","IOu5hOq1kA==","bGVyY2U=","INi32YTYqA==","44Gr44GX44Gm","INGP0LrQvtGX","INin2YTYqNiq2Yc=","INCc0LDRgg==","5Y2T","IOWFrOWPuA==","IHPDtnlsZXk=","IOyDiOuhnOyatA==","INGE0LDRgA==","IGFsdMSxbmE=","IHN0YXZ1","4oCZxLE=","YWxpemFjZQ==","INCy0LjRgdGC0YPQvw==","5pWZ5biI","4KWA4KSP4KS4","b2TEmw==","INGG0ZbQuw==","IOuMgOyDgQ==","INC60L7RgtC+0YDQvtC8","INi42LHZgQ==","6Y6u","2KfZgdmK2Kk=","IOyXhuydtA==","IM68z4zOvc6/","IEPGoQ==","5a+7","z4TOuc+D","IOOChA==","IGplZG5vaG8=","2KfYpw==","ZXRsZXI=","IOCkteCkuA==","INGA0LDQt9C70LjRh9C90YvRhQ==","INis2LrYsdin2YE=","IHRo4burYQ==","INCz0YDQvtC80LDQtNGP0L0=","4KWw","INin2YTYo9iu","INC90LDQs9GA0YPQtw==","57i+","4KWC4KS5","INC/0YDRj9C80L4=","4oo=","INin2YTYo9mI2YTZiQ==","5paw6IGe","IOyDge2ZqQ==","aXRlc2k=","642w7J207Yq4","5q23","77yM6ICM5LiU","44Gv44Ga","5Lqn55Sf","5rCX44GM","eXNsdQ==","7Ja064KY","2KfaqdmF","4oCD","KeydgA==","INis2LPYqtin2LHZh9in24w=","2YjYqw==","44WO","IGthdnJhbQ==","dsOhbA==","5pyt","5oKg","7IWA","aHJhZA==","INiq2YPZiNmG","IEjDsmE=","5bm055qE","IMOnYXJw","IHlvbHU=","IGR1Ym5h","INCS0LXQu9C40Lo=","IHTDtG4=","5pWM","IGNvaQ==","IG5ha29uZWM=","INGN0YLRgw==","7Ya166C5","0YjQtdC7","IG5lYnls","aW7Dpw==","2KjYp9mE2KXZhtis2YTZitiy2YrYqQ==","77yh","0L7QvdGM","INC90LXQvNCw0ZQ=","IOqzoOqwnQ==","INmC2LfYuQ==","INGC0LXRgNC40YLQvtGA0ZbRlw==","5Lq644Gv","IM6jzrE=","6YKj5Lqb","44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA","aW9zcGVy","7YKo","cmFraQ==","2KfbjNis","wqBD","INCw0L3QsNC70ZbQtw==","44KP44KK","IOyVhOuLjA==","INin2YTYudmF2YTZitip","bGFtZW50","6buo","dWrDrWPDrW0=","IHLhurs=","5LiN5Yiw","IHJlemVydg==","INin2YTYsNmK2YY=","5oul","0JjQvQ==","IOCkpOCkueCkpA==","cmVzaQ==","IOODog==","0LvQtdCy","44CAcg==","IOS7ig==","IMO2ZGVt","IHBvdHJhdg==","IOq1kOyImA==","0YDQtdC00Lg=","IM6azpHOmQ==","INC90LDRh9Cw0LvQsA==","INC40LfQsQ==","IGLFmWV6bmE=","IGxlZG5h","0YDRg9GO0YI=","INC80L7Rgg==","5Y+X5Yiw","INGA0YPQutGD","4bubbQ==","YWRlbGU=","INGA0L7Qt9Cz0LvRjw==","5YWQ","INix2YjYp9mG","0LDQutC+0LI=","0YPRgNGL","IGF6YWw=","INGD0LrRgNCw","0L/QuNC+0L0=","IMSNbG92xJs=","5LqM5LqM5LqM5LqM","2KfYqNmK","IGFzbMSxbmRh","67mI","INCy0YDQsNGH","66O5","INCz0LXQvdC10YDQsA==","4LiB4Liy4Lij4Liq","INGB0L7QstGB0LXQvA==","2YjZhNin","IOCktuCkrA==","4KS+4KSW4KSj","2LPYqtin2YbbjA==","5oq9","IHLFr3o=","IO2MkOunpA==","4LiB4Liy4Lij4LiV","2KfYptuM","YXNhbA==","INGA0LDQsdC+0YLRgw==","4KWL4KSy4KSo","IOmprA==","IGxhaQ==","w7Np","dmFw","64WE7JeQ64qU","INC/0LXRgNC10LTQsdCw0Yc=","INC/0LvQtdGH","aWRkZXQ=","INGH0L7RgA==","aXlhbg==","44CA44CA44CA44CA44CAIOOAgA==","INit2LHZgdmH","5aSn6Ziq","0YfQvtCz0L4=","INC60Lg=","2KfZiNmK","IGJhxZ9sYW4=","IG1lcmtlemk=","wqnCqQ==","INix2KfYs9iq","IOuKlA==","INGB0YDQsNCy","INCy0L3Rg9GC0YDQuA==","44CA44OO","5Z2b","INCy0YI=","Ojov","IHPDtnpsZcWf","IHZlcmRpxJ9p","4Li04Lii4Lih","INCf0YDQvtGC","2YPYp9ix","INio2YbYr9uM","2Y/ZiA==","55u05pKt","INmF2YTZig==","IG51dG7DqQ==","4Liw4LmB4LiZ4LiZ","IE3Dow==","IOy0","4LmI4Liy4Lih","0LzQvtGB","INC/0L7Rj9Cy0Lg=","IG5naGk=","IOuQmOuKlA==","0YHQutC70LDQtA==","4KSX4KSy","IEPhu5luZw==","55+l6K+G","IHRhag==","INi52KjYsQ==","6ZmE6L+R","w7zEnw==","IOqzteqzoA==","6KOV","4oCM2LTZhg==","IGdlcsOnZWt0ZW4=","bnVu","2YXYtA==","6rCA64ql","44Op44Oz44OJ","YXlhY2Fr","5Y2B5LiA","IELhuqNv","IHlldGVybGk=","xb5pdg==","INmK2YbYp9mK2LE=","IGLDvXZhbA==","7JuU6rmM7KeA","IG7hu6M=","IOq0gOqzhA==","IO2drA==","0LDRjtGC0Yw=","IGfDtnTDvHI=","INCy0LDQttC90L4=","5rWp","IOydvOu2gA==","0YbRltC50L3QuNC5","66Cl7J2E","INC70LXRh9C10L3QuNC1","6Zai5L+C","IFTDvG0=","7JmU","6YGX","IETDtm4=","INGB0L/RltC70Yw=","44OB44Kn","0L3Rj9C10YLRgdGP","aWx0ZXJl","IO2MgA==","6Kit5a6a","IHJvZGlu","INin2YLYqti12KfYrw==","0LDQu9GM0L3QtQ==","4KWN4KSV4KSw","IHbDvWLEmw==","IHRlaGxpaw==","4pSQ","IOeUsA==","z4HOr8+C","aXllbA==","IHRoaeG7h3U=","z4jOt8+C","INC00LLQtQ==","IEVsZWt0","4LiB4LiO","0L7RgNGD0LY=","YcWfxLE=","6Kmz57Sw","INin2KrZgdin2YI=","IGfhuq9u","5rKS5pyJ","INmF2LfYp9mE2LnZhw==","z4TOuc69","IG9rcmVz","0Zw=","6rCU64uk","0KDQvtC3","5b6L5a6+","77yJ77yI","IOyatOyYgeyekA==","44Kr44OG","bGHEjQ==","4KWH4KSs4KS4","IG/EjWk=","LdCx","ZWxlcmRlbg==","a292w71jaA==","IMSwem1pcg==","4Liq4Lih4Liy4LiK","bGFkYXRlbA==","IOa7","6ZSA5ZSu","INC00L7RgdC70ZbQtNC20LXQvdC90Y8=","INC70ZbQutCw0YA=","INC+0LTQvdCw0LrQvg==","IFbDoWM=","IOir","6YCy6KGM","5Lul5aSW","6bOl","INmG2Kw=","IGJhxZ9rYW4=","IG9wYXTFmWVuw60=","2KfYsdi0","2LbYp9mB2Kk=","44K544Os","zq7OvQ==","xJt0w60=","4Lin4Lii","INix2LPZiNmE","xZlpY2g=","IHDFmWlo","0YzQvNC4","54S26ICM","IHRo4bqzbmc=","bGFtYXo=","2YDZgNmA","IOywuOyXrA==","INmG2YjYtNiq2Yc=","INGB0YLQtdC6","44Gu44G/","INmI2KfZhNi5","5pWi","4KWA4KSCLA==","0J7RgdC90L7Qsg==","0LjQvNC+0YHRgtC4","IMSMZXNrw6E=","0ZbRh9C90LjQuQ==","4Liy4Lih4Liy4Lij4LiW","ZWtrw7xy","wqBo","zrnOus63","INiq2LnbjNuM2YY=","0LrQvtGB0YLRlg==","IE11c3RhZmE=","IOymiQ==","44Gn44GC44KK","5bel5Lia","b3bDrWQ=","0J3Qvg==","INiz2b7Ysw==","2q/bjNix2K8=","INC/0LXQtNCw0LPQvtCz","INqp2KfYsduM","INGI0YLRgw==","5oyC","2KLZhdiv","55yf5piv","INin2KjYqg==","INix2KbbjNiz","INiv24zZhg==","z4jOtQ==","IHNlem9u","IOeG","4KS44KSo","44O744Ki","IOWFrQ==","IOix","IOygnOuqqQ==","INmF2LnYrw==","INmB2YLYrw==","6YKK","zqnOow==","IOWh","IG9idnlr","IOydtOugh+qyjA==","INCx0L7RgNC+0YLRjA==","27LbsQ==","IOG7kW5n","6K+X","IMSQ4buRaQ==","INCx0LXRgNC10LfQvdGP","IHNvxJ8=","IO++jQ==","44KS44Gk","44GX44KD","0LXRgNC10Yc=","44CA44CAIOOAgCDjgIAg44CA","5oiq","INin2YTYs9i52YjYr9mK2Kk=","IOuCqOyekA==","IEFuZ2lvc3Blcg==","Pz8/Pz8/Pz8/Pz8/Pz8/Pw==","IHByxa9t","INC/0LvQvtGJ0LDQtA==","IM+Ez4HOsQ==","0LTQsNGO0YI=","IHPEsW5hdg==","IG3hurdj","5rC05bmz","INCy0LjQs9C70Y8=","IG7DoXN0","INC+0LHRi9GH","IOydtOyVvOq4sA==","67mb","IEJhxJ8=","INin2YTYq9in2YTYqw==","IHNlcnZpcw==","IOufrA==","0L7QvNC40L3QsA==","zq/OuA==","IOG6pA==","6rK96riw","IOyhuA==","4Li14Lia","IOCkmOCkn+CkqA==","IOC4meC4suC4hw==","Ls6g","7JWV","csO8bg==","IG9ubGFyxLFu","INC30LHRltC70YzRiA==","4LmB4Lif","IOyXrOq4sA==","IOuMgO2RnA==","INGB0LjQu9GD","4LmC4Lib","INiq2YLYrw==","INCf0L7QvA==","INC80LDRgdC70LA=","IOyYgeyDgQ==","0L3QtdC90LjQtQ==","zrvOsc68zrI=","IEJ5bA==","5oq1","5o6q","IM66zrHOuM+Oz4I=","bcSxesSx","5paw55qE","6YeN6KSH","4Lix4Lib","562G","INGC0LrQsA==","INC30L3QsNGH0LXQvdC90Y8=","0LvQsNGC0Lg=","IHZsaXY=","0JDQvQ==","INqG2KfZvg==","INC/0LjRgtCw0L3RjA==","Ou+9iQ==","5pWZ5o6I","IOy5nOq1rA==","IHRyYW8=","4KWN4KSv4KSV4KSk","4Li44LiE4LiE4Lil","INix2YjYtNmG","INi52YTZitmH2Kc=","44CB44GE","64WE7JeQ","6YCG","INC80LDQs9Cw0Lc=","776e776e","IHNpY2U=","4oCZdGU=","INin2YTZhNi62Kk=","w6F1","6Ieq6Lqr","IG5nxak=","INGB0LrQu9Cw0LTRgw==","IHpydQ==","IHRydXk=","IGlsYW4=","INm+2KfbjNmH","Ojo6Ojo6Ojo6Ojo6Ojo=","ZmFr","0YLQtdGF","IHRha3k=","IOyWuOyWtA==","ZWRlbsOt","IOCkmuCksuCkpA==","IOuwsOyasA==","IGptw6lubw==","INmE2KPZhg==","zrHOvc6s","0LrRg9C70Yw=","INit2YHYuA==","INii2LLZhdmI2YY=","0LjRgtC10LvRjNC90YvQtQ==","INCe0LvQtdC60YHQsNC90LQ=","6I2j","IOCknOCkrOCklQ==","IHJvZGk=","INio2LHYrtmI2LHYrw==","IGhhZnRh","zrvOuc66zqw=","4LiV4LiZ","INCx0LXRgNC10LM=","zrHOvc60","LdCh","IHByYXZpZGVs","INCx0ZbQu9GP","7ZKN","INC/0YDQtdC00YPRgQ==","INC80YPQvdC40YbQuNC/","5YyW5a2m","INiq2YXYp9iz","IOCkieCksg==","0JPQng==","2LrYsQ==","cmFkYW4=","IOuCmOyYpA==","6KiC","4LmA4LiY4Lit","4oCM2LPbjA==","INC+0LHRj9C30LDRgtC10LvRjNC90L4=","0L7RgtC1","4LmM4LiK","55So55qE","IGFsdMSxbg==","INGB0L7RgtGA0YPQtA==","0ZbQvdC60Lg=","0L7Qt9C80L7QttC90L4=","zpA=","67mM","wpU=","INGC0L7Rh9C90L4=","IGptZW4=","2KfZhNuM2Kc=","6IiN","Y2hvZHU=","6rOk","aWNrw6lt","INmF2YjYsQ==","44Oq44Oz44Kv","IGHFn2Ft","INC40YI=","IOCkqOCkrw==","IM68zr8=","6ZWc","INio2YbYp9io2LE=","INiq2K7Ytdi1","IOC4quC4ng==","INC/0YDQvtGE0LXRgdGB0Lg=","IHB1YW4=","INmB2LHZhdin2YY=","64yA7ZqM","INC/0Y/Rgg==","INmF2YjYqA==","IHbEm2t1","IOuD","ZWNrw70=","IOyImOuPhA==","IHRoYW8=","IGthcGF0","INC30LDRhdCy0L7RgNGO","IOWFiQ==","2LHYp9mG24w=","6YCg5oiQ","INGB0LLRltC5","INC00L7RgdC40YLRjA==","IG1pbHlhcg==","IGVuZXJqaQ==","INC60LjQvw==","IOyii+yVhA==","INio2KU=","6rKM7Iuc","IEzGsHU=","INmF2YbYuNmI2LE=","z4nOvM6s","zrbOrw==","xLFtZGE=","IOydtOulvA==","4LmS","INCy0LLQsNC2","IGdhemV0","4KWN4KSk4KSo","4LmJ4Liz4Lir4LiZ","5Zyf5Zyw","IOCkuOCkpuCkuA==","2KrYqNip","IHBvxI3DrXRh","IOyLnOyKpO2FnA==","4Lij4LiE","IGVkZWNlaw==","INiq2K3ZhNuM2YQ=","5oyJ54Wn","5Z2q","IOq3uOqwgA==","2KrZh9mF","INCx0LDQtg==","2KfZgdi5","6YCa5bi4","INCi0Lg=","zrPOvc+J","7LmZ","IHpuYW1lbsOh","77y877y8","zrHPgM+M","5YaZ55yf","IO+8vAo=","5Yqg5bel","6IKh5Lu95pyJ6ZmQ5YWs5Y+4","0Y/RgtC40Lk=","IGjDomw=","IMOnYWI=","INit2KfYttix","UMWZ","INin2YTYqtmC","zr7Ot8+C","0LHQtQ==","IGtow6Ft","IOKMkg==","IOmVvw==","IOKApgo=","4KSm4KSu","IFN0dWRp","IGtvZHU=","IGtvbXVuaWs=","IGthdGvEsQ==","bmV0ZQ==","IHJhcG9y","6Ya0","44KJ44Gb","INC90LXRgdC60L7Qu9GM","IGjhu41w","77+j77+j77+j","urw=","6KOC","0LXQtNGM","INin2YTYp9it","bGFkxLFr","IGZvdG/En3JhZg==","5pel44Gu","INit2KfZhNiq","INir2YTYp9ir","0LDRgtC+0LI=","ZXlzZQ==","IOqwkOyCrA==","w6HFvmU=","INC90LDQtNCw","IOCkleCkueCkqA==","IOODnQ==","44Gr44GC44KL","44Gr44Gq44Gj44Gm","2YjYr9mH","IHBvxaFr","5aSq6Ziz5Z+O","57uP6aqM","5pKt5pS+","IG1hamV0","0YXQvg==","INGC0LXRgdGC","77yPCg==","z4POtc+EzrU=","INCi0L7QvNGD","2Y7YrQ==","IOyeiOycvOupsA==","INC30LDQt9C90LDRhw==","6ZqQ","INC00ZbRlw==","0LrRgtC40LI=","2YjZgdmK","IHThu50=","4Li54Lib4LmB4Lia4Lia","INGA0LXQtNCw0Lo=","IGF0ZcWf","IGtoaeG7g24=","w7xueQ==","4Li14Lii4LiB","INGH0LDRidC1","IHR1eQ==","zrPPic69","4Lij4Lit4Lia","IHRyw7luZw==","4LmB4LiX4LiZ","IM6xzrrPjA==","INCS0LXRgNGF0L7Qsg==","4LmD4LiZ4Liq","44CB5L2V","5Yem","IOe7jw==","5qiT","2KfZhtqv2YTbjNiz24w=","IGxlcMWhw60=","IOW8gOWniw==","6Zm6","INGH0LXRgtGL","INCh0LXRgA==","0L7RjtC3","IHh1bmc=","5ZOB54mM","IOyDge2DnA==","INmG2LXYqA==","INGH0L7QvNGD","INiq2LHaqduM","LdC70Lg=","b3bDrQ==","INin2YbYrA==","57Wh","INiq2YjYtQ==","IOy/oA==","IHZhcnNh","INGA0LDQt9GA0LDQsdC+0YI=","4LiC4Lit4LiH4LiE","6a2C","IOCkiuCkquCksA==","5p2l6K+0","INGG0LXQvdGC0YDQsNC70Yw=","IFRha8SxbQ==","IG9ubGFy","INiz2LHYudiq","5aW95YOP","IGJ14buVaQ==","INCR0LXQuw==","wqBj","2KPYqg==","4LiC4LiT4Liw","44Gr5Ye6","ICsqKioqKioqKioqKioqKg==","z4TOt866zrU=","2KfYrNix","IOKAsg==","44O844Os","6aWt","INis2YTYsw==","INio2LPYqtmH","4Lin4Liy4LiH","IM6yzqw=","INCw0LzQtdGA0LjQutCw0L0=","IFByZW1p","bWFl","INGB0YDQtdC00Lg=","4bqg","INCy0YDQtdC0","44CC6ICM","5ZKy","IOqzteqwnA==","6IKl","0LfQstC40YfQsNC5","IHByb2NlbnQ=","0LjQu9C+0YHRjA==","4KS24KSo","6aCB","0LXQutGC0Lg=","2K/Yp9i02Ko=","7ZWZ7ZqM","44CA44CA44CAIOOAgCDjgIA=","INmF2K/ZitmG2Kk=","4KS/4KSy4KSo","IOiX","0LzQuNGA","INC90L7RgA==","IO2VmOyngA==","0LLQtdGJ","bsSbbQ==","0LXRgNCw0LzQuA==","IHByYWNvdg==","INio2YrYp9mG2KfYqg==","IM+Dz43OvQ==","INis2LA=","44GE44Gn","IELDrQ==","6LGG","IGhtb3Q=","aWxlY2XEn2k=","INiq2KfYqw==","6LS0","IOq4iQ==","IG15c2w=","IOydtO2VtA==","IOq4sOuKpQ==","INCi0LDQvA==","INC90LDRgdC10LvQtdC90LjRjw==","IE1leg==","IOuqqOultA==","7ZmU66W8","INmG2LPYrtmH","INiq2YTZiNuM2LLbjA==","IMSNZXJ2bmE=","xrDhu6FuZw==","2LXYrQ==","INGC0YDQtdC9","1aE=","IGNlbG91","xalp","7JeG7J20","bsOta3U=","IHByb2dyYW11","4KWN4KSq4KSo","INC/0YDQtdC2","2KfYsdio","5pyf6ZaT","IM68zqw=","642U64uI","4bulbg==","INC/0LXRgNC10YHRgg==","5a+55LqO","6L+Q6KGM","INGC0LDQvQ==","IOyCrOydtO2KuA==","IFF14bqjbmc=","IHN0b2rDrQ==","44Ot44O8","2q/Yp9ix","INC10L3QtdGA0LM=","IGt0ZXLDvW0=","INC/0YDQuNC80ZY=","INC60LDRgNGC0Lg=","IHplbmdpbg==","77yM5YaN","INiq2LHYqA==","INGG0LXQvdGC0YA=","IHNhxJ9sYW1haw==","64ud","44Gu5a2Q","IOOAgOOAgOOAgOOAgOOAgOOAgOOAgOOAgOOAgA==","IHPGoW4=","esSx","0YLQsNC60Lg=","xJtzdMOt","IOCl","6a4=","5Z+56K6t","IOyUqA==","IGJlbGtp","IOydtOuypO2KuA==","65SU7Ja0","IHPDoG4=","0L3QuNC60LDQvA==","YWxpbQ==","5a++5b+c","IFPhu7E=","6YGT6Lev","6auY5riF","IGTDtWk=","INmE2YA=","IOiCoQ==","zr3OuQ==","4oCeSg==","J25kZQ==","zpHOkw==","44Go44Gq44KL","54i4","2LnZhNuM","z4HOuc+Dz4Q=","IGXEn2l0","INC30L7QstC90ZbRiA==","INC/0YDQuNC8","2LPZhdio2LE=","IG3Em3N0xJs=","IM+AzrXPgc65z4PPg8+Mz4TOtQ==","0JDQoA==","5oSf5Yiw","IOusuOyEnA==","44GL44KL","2YLZitmC2Kk=","INCy0YLRgNCw","IOC4reC4sw==","0YHQutGD0Y4=","2K/Zh9in24w=","IMSwc3Q=","INCX0LDQsg==","IOmDvQ==","0YjQtdC8","INC10YnRkQ==","INCc0LjRhdCw0Lk=","INGD0L/RgNCw0LLQu9C10L3QuNGP","0LvQtdC90L3Ri9C1","IHphxI1hbA==","5qGM","INC/0ZbQtw==","0LvRj9GC0YzRgdGP","IOyekOujjA==","44CA44CAIA==","IEtyYWw=","6IiJ","IOCkreCktQ==","INiu2YU=","INCw0LrQsNC00LXQvA==","IGlzdGVu","INC40YHQutGD0YE=","INi52YbYr9mF2Kc=","INin2YTYp9mF","aXNtdXM=","IGF5csSxbnTEsQ==","INCp0L4=","INmH2YjYtA==","2K/ZiNin2Kw=","0LvQsNC2","INqp2YbYp9ix","wqBS","5oCn55qE","0YHRltC8","IE3DvHo=","0YDQvtCy0LjRhw==","IM6p","IOyWtOuUlA==","2LPZhdip","INGA0Y8=","IHTGsMahaQ==","INGA0LDRgdGF0L7QtA==","5Y+w54Gj","INin2YTZiNmC2Ko=","2KjYsdin24w=","INC30YDQvtCx0LjRgtC4","INCx0YPRgA==","IMSNaW5ub3N0aQ==","INi12KfYrQ==","INi12YbYudiq","INi32YQ=","zr7PjQ==","IHRpc8OtYw==","IEZyYW5zYQ==","7KaY","6Lu9","0Zg=","z4zPhM63z4TOsc+C","IE1pbGxldA==","44CA44O+","INC/0YDQuNC10Lw=","INiq2LHYrNmF2Yc=","INiz2YjYrw==","IHNvdcSNw6FzdMOt","0JTQvg==","IHRy4bul","6Laz55CD","4Lib4LiB","IHVzdGFub3Y=","zp/OmQ==","0J7QvQ==","INC90LXQtg==","0LrQvtCz","5LiA54K5","INiv2YjYsdin2YY=","5b2x6Z+/","ZWxpZGly","4oCeTg==","ZXNpeWxl","0YDQtdC80LXQvdC90L4=","IGlsZXRpxZ9pbQ==","4Lih4LmA4LiV","5Lul5YmN","44OL44O8","6Zu76Kmx","4LmC4Lie","b3ZreQ==","INC30LDQvNGW","IOCkteCklQ==","wpk=","INCy0ZbQudC90Lg=","IG9sbWFkxLHEn8Sx","5qKB","INCi0LXQvw==","bsSbdGU=","6IWV","7IKs64qU","bWFtYWs=","IGNpeg==","5qOS","IO+8jzo=","6YGL5YuV","INmH2YbYpw==","IOqwkQ==","INmH2Ybar9in2YU=","IHXEn3Jh","5b2m","IG9iamVrdA==","44Go44GZ44KL","5Zu95YaF","INC00LXRgNC20LDQstC4","IOiM","IHVsdXNsYXJhcmFzxLE=","2aM=","IG11dGxhaw==","INC30L7QsdC+0LI=","IM6zzrXOvQ==","4LmE4Lif4Lif","IMO2emfDvHI=","7YS4","INCy0LjQv9Cw0LTQutGD","IOCkleCkrA==","INin2YTYrti3","zrjOt866zrHOvQ==","77yM5oqK","0Y/RgtGC0Y8=","IG9sbWFkxLHEn8SxbsSx","wqBrVw==","IG7Em2t0ZXLDvWNo","44OH44Or","5qSN54mp","zrzOuc67zr/Pgg==","0JDRgNGF0ZbQsg==","INCi0L4=","6Jas","0YHRgtCy0LjRjw==","INiu2YjYp9iz2Ko=","0L7Qu9C+0LPRltGX","2YjYp9mH2K8=","INC90LDQug==","INC60L7RgtC+0YDRg9GO","IOCkpuCklQ==","4oCeTQ==","zrvOuc6x","5q2y","56ys5Zub","4KS+4KSc4KS4","ICjCqw==","IHRo4bq7","772kCg==","56OB","INmE2YI=","IOyVlA==","INC90L7QstC+0LPQvg==","IOyVhOyjvA==","IOuQmOyWtA==","IG9sdW4=","w74=","IGthcml5","INit2LPYqA==","IOydmOuvuA==","LtCc","IG96bmHEjQ==","2YTYs9mE","INCS0LjQtA==","66Gc64KY","4KWN4KSf4KSu","7Zy0","IGJpbGdpc2F5YXI=","7J247KeA","INCy0L7Qsg==","bmljdHbDrW0=","4Liy4Lit4Lii","INi02K7YtduM","0L/RltC+0L0=","5pys5b2T","INio2YA=","INC80LDRgdC70L4=","IFBow6F0","INCx0LA=","0LDQu9GM0L3QvtC80YM=","56S+5Yy6","INI=","Ojo6fA==","6rQ=","IOS4gw==","INmI2KfZhNiv","0L3QuNC60LU=","4Lit4Lil4Lil","IHllcmxlxZ8=","IGtvbWJpbg==","dcWh","INC+0YLRgNC4","5LmM","acWfdGk=","IHPDs25n","zrvOt8+C","INC60YPRgNGB","4LmI4Liy4LiE","INmK2LM=","INiv2KfZhQ==","55Kw5aKD","0YfQtdC90LrQvg==","44CN44Gu","IG3DrXN0YQ==","INGE0L7Rgg==","IHDFmcOtem4=","INGA0LDQt9Cw","57Sr","bMOhZGE=","INGB0L/QtdGG0LjQsNC70LjRgdGC","INio24zZhdin2LHbjA==","IOuTow==","54uX","2YjZiA==","0LDQvdGW0YI=","INiv2YbYqNin2YQ=","INmF2KzZhdmI2LnYqQ==","w61uYQ==","IEhhbGs=","w6FqZW0=","ZW7DrXpl","IGlteg==","q25n","IM6VzqA=","INmF2YfYrw==","7JyE7JuQ7ZqM","IOycoO2YlQ==","4KS+4KSq4KS4","IGplxb4=","0LDQvdGW0Lc=","0LjRgtCw0Lk=","4b+W","aXJsZXI=","6riw6rCE","INCy0L7RgA==","IM+O","IHBvem4=","INiz2KfZhg==","5a+/","5pav54m5","IHR1cmlzdA==","IOyekOycoA==","4KWA4KSW","zrzOvM61","YW5zxLE=","7IaM64WE","IHRlZGF2aQ==","0YfQtdGB0YLQstCw","5aOT","0L7QstC1","77yM55yL","INC/0L7RgdC70YPQsw==","INGC0YDQsNC90YE=","IHrDoXo=","5oi0","INC80L7QvdCw","4Li04LmA4LiE4Lij4Liy4Liw4Lir","INmG24zZhQ==","IOyCrOuejOydtA==","YWhhdA==","z4XOug==","INC+0YLQutCw0Lc=","INCS0L7Qu9C+0LTQuNC80Lg=","INCh0Lo=","4KS/4KSV4KSk","5aaW","IOuLpOyatOuhnOuTnA==","7JiB7IOB","IOCkqOCkiA==","Y2V0ZQ==","INCz0YDQuNCx","ZWNlxJ9pbmk=","IMOnb8SfdQ==","INC80LDRgtC10YDQuNCw0LvQsA==","4bupdA==","IHphdGVu","IEZSQQ==","IEJpcmxpxJ9p","IHNpdGVzaQ==","IOWU","INCS0L7Quw==","wqBQUw==","4KS+4KSy4KSk","INCx0LDRhw==","0LDQu9GW0LfQsNGG0ZbRlw==","IFNsb3Y=","57OW","INCz0L7QstC+0YDQuNGC","INCy0LLQtdC0","4Li44LiV4Lia4Lit4Lil","44GG44Gh","IHlhcHTEsWs=","IOygley5mA==","6rCc66W8","4KWI4KS44KSy","2KzZitmE","INC30LDRgdGC0L7RgdC+0LI=","6L+r","IEt1cnVs","IE5hc8SxbA==","INC90LDQv9GA0Y/QvA==","IOS9jQ==","4LmM4Lia","IOmBkw==","INC90LjQttC1","INC60L7RgdGC","2LjZh9ix","0KLQsA==","7Ked","IMO2bsO8bmRl","0LbRlg==","INin2KzYsdin24w=","INC+0YDQs9Cw0L3RltCy","dmlzZQ==","IOydhA==","4LiV4Lij4LiH","2qnZhtmI2YY=","IGRsb3Vobw==","0J7QnQ==","IOycoQ==","55uu5qCH","66+A66Gc","77yP77yP77yP77yP77yP77yP77yP77yP","INC/0L7Rh9C10LzRgw==","5pWF5LqL","0YLQtdGB0Yw=","INmC2YTYqA==","INiq2KzZhw==","aWxlbmRpcg==","INC40LPRgNCw","INCU0L7QvQ==","IHDFmcOtamVt","6KaG","0KHQvw==","LdC90Lg=","b25zZQ==","0LjQvdC+0Lk=","0L7Rh9C90L7Qs9C+","2KfYs9in2YY=","INC/0L7Qu9GD0YfQuNGC0Yw=","0YLQsNC/","IEzDvQ==","IMOCdQ==","IGjDvGM=","ZWJlaw==","IFlhecSxbg==","5peL","4Lix4LiZ4LiX4Lij","INCy0LjQutC+0L3QsNCy","IHPDtG5n","4KWB4KSc","INCX0LDQsw==","pOuLpA==","IGPFqQ==","INqv2LHZhQ==","5LyP","44Gr44GZ44KL","LdCk","INmC2YU=","IG9sYWNhxJ8=","5p2l5LqG","5oub6IGY","INCd0LDRgdC10LvQtdC90L3Rjw==","IOyYgeyWtA==","IOatpA==","INio2K/Zhg==","27LbuA==","0L7RgNCw0YLQuNCy","77yz","IG5lYnlsbw==","INGD0YfQuNGC","5p2c","INC00LDQvdGW","IHNwb3TFmWVi","44O844OG44Kj","0LXQvdC90YPRjg==","6rmM7JqU","dmVt","UMWZw60=","IHlhbmRhbg==","6byT","INiv2LPYqtmI2LE=","IGhhZmlm","aMWv","IHbDocW+","IOyVhOyngQ==","2Y/YsQ==","INC70LA=","66CJ","4Liy4Lib4Lij4Liw","bMSxa2xhcg==","INGB0YLQsNC90LTQsNGA0YI=","4Lit4LmD4Lir","5aW0","INC+0YLQvw==","4oig","44O844OA","Y2jDoXpl","IOq3uOugh+qyjA==","b3N0ZWw=","INCz0LDQu9GD0Lc=","w6Jr","0LXRhtGC","656R7Iqk","IMSNaXN0","0YDQsNC90LA=","IHbhu69uZw==","IHNlbmk=","IGfDs2M=","z4bPjA==","w6FudQ==","IMO2dA==","IHPDs2M=","44GE44Gu","INGB0LrQu9Cw0LTQsA==","0JDRgNGF0ZbQstC+0LLQsNC90L4=","IOydtOuyiA==","44K544Gu","aWxlYmlsaXI=","772A44O9","4Li14Lii4LiN","IM66zrHhvbY=","IOuvvw==","5pu05aSa","xLFzxLFuxLFu","IEdpw6Ft","5q2j5byP","z4POvM+M","IGFyY2hpdA==","IO+9sg==","0YfQsNGO0YLRgdGP","67KE7KeA","44Kk44Ok","6auY5qCh","6Kiz","INmF24zaqQ==","IOaDhQ==","IHBoYQ==","5aSq6YOO","4Lie4Lij4Liw4Lij4Liy4LiK","2YLZitip","INGD0LvRg9GH","0YHRgtCy0YPQtdGC","IGtlxZ8=","6auY562J","IHPhu5tt","z4HOus61","zrzOv8+B","IHrDoXN0dXA=","b3rDrQ==","IG1pbGk=","INC80L7Qs9C70Lg=","INC30YDQvtC30YPQvA==","INio2KfYtNuM2K8=","IGFrY2k=","INC00YDQsA==","IM6xz4HOuQ==","44GL44KJ44Gu","5a+S","IFphbWFu","INGW0LTQtQ==","IOOAgCA=","IGtsdQ==","YWtsxLE=","4KWH4KSa","INGB0LLQvtCx0L7QtA==","2LPYp9mF","INC+0LI=","IHVieXQ=","6YeH55So","IGRhdnJhbsSxxZ8=","IG5hYsOtesOt","INCR0YPQtA==","IM+J","INin2YTYsdit","4Lix4LiV4LiZ","0LjQvNC1","INiq2YTZgw==","2KrZhdi5","INCw0LTQvNC40L3QuNGB0YLRgNCw","IHpvcnVuZGE=","INmG2LPYqNip","INi12YbYudiq24w=","INGE0YPQvdC00LA=","6Y+h","IHBvdG9t","INC/0YDQtdGB0YI=","IHPEsXJhZGE=","IGF5YXI=","2KfZgtmE","5rqq","INii2YLYp9uM","INC/0LXRgNC10YXQvtC0","INC/0YDQsNC60YLQuNGH0LXRgdC60Lg=","6buD","INGD0YXQvtC0","INmF2KrZgQ==","IHNpeWFzaQ==","INC/0L7RgtC10L0=","2Y7ZgQ==","INCb0YM=","INC60L7QvdGC0YDQvtC70Yw=","INGB0LrQsNC30LDRgtGM","4KWA4KSV4KSw4KSj","5YWo55CD","27Lbtg==","IHRvdG8=","INmI2K8=","44K/44Kk44OX","5ZyN","5byV55So","77yj","6Iq4","5LuL57uN","INGC0LXRgNGA0LjRgtC+0YDQuNC4","5pel44Gr","bcOtdA==","YW3EsXo=","7J207Ja0","IHlhcsSxxZ8=","IGfDvGM=","IM+HzrE=","4Lix4LiZ4Lii4Liy4Lii4LiZ","44KS6KGM","IG1pbGxp","IOePvg==","S2R5xb4=","bWF6b24=","67O064K06riw","INGC0YDRg9C00L7Qsg==","6aO+","INCy0LjQvdC40Lo=","INmI2LLYp9ix2Ko=","6YeM55qE","0LzQsNC3","IFJVUw==","0LXQutGC0YM=","INi52KfYtA==","IGtvbmNl","44KI44GG44Gn44GZ","INC80LDQu9GL0Yg=","bWVuaQ==","0LXRgdCw","2KfYttuM","IGJyYXQ=","INCy0ZbQtNC90L7RgQ==","zrjOtc+B","INCn0LXQvA==","5pGH","INmF2KfYr9ix","55So5ZOB","INmF2K3Yp9mB2Lg=","IG15xaE=","2KzYuQ==","IGlzaW0=","5rOK","xLFsbWF6","IM6bzrE=","5a+p","IGF5xLFy","0LXQvdC40LzQuA==","4KWH4KS54KSk4KSw","5ZyG","44G+44Gj44Gf","55Si5ZOB","INGW0L3RhNC+0YDQvNCw0YbRltGX","IHThu6c=","4Liq4Lih4Lia","IHN0xZk=","IOuwnO2RnA==","0LDRgNGM","IENhbw==","zqHOmQ==","4LiB4Liy4Lij4LiI","INC/0L7QtNGD0Lw=","5LuV5LqL","INCa0YDQvtC80LU=","IOyXlA==","INGD0LTQsA==","INCw0LLRgtC+0LzQsNGC0Lg=","IOC4hOC4k+C4sA==","IEtpxZ8=","INGB0L7RgdGC0L7Rj9C90LjQtQ==","bGlzaQ==","IOuWqOyWtA==","b290YmFsbA==","IO2NvA==","INC70LjQvA==","IMOnZXLDpw==","2YjZhNmK2Yg=","IHNsb8W+","IOuovA==","4Lij4Lit4LiH","0YjQtdC1","4oCm4oCm4oCm4oCm4oCm4oCm4oCm4oCm4oCm4oCm4oCm4oCm4oCm4oCm4oCm4oCm","44GT44Gh44KJ","0L7RgNGL","54Of","wqBG","0LDQvdC+0LPQvg==","2KvbjNix","54+N","5biC5aC0","dsSbZG9t","7LKo67aA","IOyCrOqxtA==","776M","4LmD4LiZ4Lin","IHp2bMOhxaF0","z4TOtc+F","INC60LDQutC40LU=","z4bOv8+Bzqw=","5YSE","IHpwxJt0","7ZWc7YWM","IHp2b2w=","IOeX","0YDQsNC90LXQvdC40Y8=","INiz24zYp9iz2Ko=","INCa0L7Qu9C4","INC+0YDQs9Cw0L3QuNC30LzQsA==","INGP0L3QstCw0YDRjw==","INiv2KfYr9mG","0L/RgNCw","77yM5LuW5Lus","5pGY6KaB","IHF14bqnbg==","2YrZiNmG","INCy0LjRhdC+0LI=","wqDguYDguJQ=","INC10LvQtdC8","ZWJpbGVjZWs=","INC00L7Rhw==","INCx0LvQsNCz","INGP0Lk=","YWRuw60=","IHrDoXJvdmXFiA==","ZW5zdHbDrQ==","4oCM2KfZhg==","44GV44KT44Gv","L3w=","INin2YTYudin2YXYqQ==","6aC8","INiu2K/Yp9mI2YbYrw==","0L3QsNC8","INGB0LvQuNC3","5raJ","4Lij4Lip","ZcWfdGly","INmG2K/Yp9ix","4Lij4Liy4LiE","6KiA44KP","IOit","INC60YDQuNGC","INCy0L7Qt9C00YPRhdCw","IOCkl+CkpA==","IHByw6F2bw==","4KWL4KS34KSj","IHPhuq9w","7Y+t","INi12LHZgQ==","INix2KfbjNqv2KfZhg==","INC+0YLQug==","64aT","INGB0LXQutGA0LXRgg==","xLBu","b25hdmly","IFZ5cw==","IGJhxZ9sYXQ=","IE11aGFt","INC70LjRgdGC0L7Qvw==","IFRpY2FyZXQ=","IGFkbGFuZMSxcg==","INCU0LzQuNGC","z4POvM6/z40=","5L61","7Iuc64qU","4LmB4Lib4Lil4LiH","xLFuxLF6YQ==","LdCz","0LjRh9C90L7Rlw==","0YPRgNC4","VVo=","7Jug","INiq2KjYr9uM2YQ=","5rqr","INGA0LDQvNC60LDRhQ==","IG7DqXQ=","5rK/","IHJvesWhw60=","IOCkuOCkqg==","INGC0LDQutC1","0YDQsNGH","INin2YTZgtiv","7ZWZ6rO8","0YPQstCw0L3QvdGP0Lw=","IG3DoW0=","66Gv","4b2Q","IGV0a2lsaQ==","INin2LHYqtmB","IHRlY2hub2xvZw==","IOy9mA==","INiq2YPZitmK2YE=","IHDFmWVjZQ==","5a625bqt","IOOBjw==","4pS0","7YG8","IM6czqw=","4LmA4LiV4Lij","INGB0YLQsNC90L7QstC40YLRgdGP","56uL44Gh","IOmWiw==","IMSweWk=","IG7Em2t0ZXLDqQ==","INGA0L7QsdC+0YI=","IMSQxrDhu51uZw==","INin2YTYp9is","IHNwZWNp","54m55Yir","5a2d","4oCM2q/YsA==","YWzEscSfxLE=","INC80LjRgNCw","7Y6Y7J207KeA","2K7ZgQ==","44Kq44Oq","INiz24zZhQ==","IOyWtOuKkA==","0LDQu9GM0L3Rgw==","0YfQvdC40Lk=","w7xtw7x6ZGU=","5pm66IO9","w71u","INiq2YLZiNuM2Ko=","INC/0YDQuNCz","INCz0YDRg9C/0L/Riw==","YW3EsQ==","zrPOv8+N","0L7RgNGC0YM=","IEdpYW5n","xZllbg==","IG9rb2zDrQ==","5Lqn5Lia","INC30Lw=","IOm+","2YrYp9ix","INin2YTYtNmK2K4=","0LjRgtC10LvRjNC90YvQuQ==","INin2YfZhQ==","INio2KfZhNix2YrYp9i2","INm+24zYp9mF","IGtyZWRp","IEFyYXA=","INGA0LDQsQ==","INC90LXQutC+0YLQvtGA0YvRhQ==","INit2KfZgdi42Yc=","0LjRgtC10LvRjNC90L7Qs9C+","IGdlcmVrbWVrdGVkaXI=","IERlbml6","INiq2YTYp9i0","c3RhZ3JhbQ==","w6F2a3k=","5Yqg5YWl","b3pvcg==","IGR1cnVtdW5kYQ==","IO2PieuLuQ==","IOu0iQ==","IHBlbsSb","2q/Yp9mG24w=","IEt1cA==","INGG0LXRgA==","dWxtYXPEsQ==","4pGg","INGB0ZbRh9C90Y8=","xLFtxLF6YQ==","5a6a55qE","wqDRgg==","5Yqe5YWs","7Jy864uI","INin2YTYpdmG","IOeD","44CN77yM","0ZfQvdCw","INC/0YDQuNCz0L7RgtC+0LLQu9C10L3QuNGP","0IU=","INGB0L7Qu9C9","IOu2gOyCsA==","5oW2","44K+","dm9qZQ==","24zYr9mG","7IOd64uY","57mB","w6FkdQ==","Ojo6Ojo6Ojo6Ojo6Ojo6","2LPZhtqv","6ZSL","INC30LLQuNGH0LDQuQ==","5aeU5ZGY5Lya","IM68zq3Pg86x","INGA0L7QttC00LXQvdC40Y8=","5oiQ5Lq6","IGTDrWw=","INCU0L7QsQ==","IOC5g+C4ig==","z4DOrw==","Z2FtYmVy","INmI24zamNqv24w=","IOiKsQ==","IGLDoHk=","INC20L7QstGC0L3Rjw==","5YWs5byA","INGC0L7Rh9C60Lg=","44GC44Gu","0LDQu9GW0LI=","IGNoYXJha3Rlcg==","IM6SzrE=","IHprdcWhZW4=","IOCkreCkl+CktQ==","0YfQuNC60LA=","4KWA4KSC4KWk","6KOP","5ZGK6K+J","aXlhdMSx","INGG0LXQu9GM","IOyKiA==","0LDRgNC0","IMOcbGtl","IHByb3NpbmNl","INmG2q/Yp9mH","44CM44GK","zp/OpA==","7ISc64qU","2Yjar9ix","2LbYp9mG","IGTFr3NsZWQ=","55C0","4LiV4Liz4LmB4Lir4LiZ","0LrRgtGW0LI=","bMOhZMOh","4b+G","IERvxJ91","44GR44KM44Gw","55uu44KS","IOebtA==","5puw","INCy0YLQvtGA0L7QuQ==","INCz0LvRgw==","IOydvQ==","6riw7KSA","IG1hZGRl","IGplZG7DqQ==","INC+0YTRlg==","7Iud7J2E","IGNow7p0","5Ye644GX44Gf","0LjRh9C10YHQutCw0Y8=","INC70L7Qug==","IGFsdMSx","65Oc64qU","ZXlnYW1iZXI=","INGB0LLQvtC1","IHRhxZ/EsW0=","INGC0L7RidC+","IGdlw6d0aQ==","IHByZW1p","IE1laG1ldA==","77yM5Zug5q2k","zq/Ous63","IMO2bmNla2k=","IOCkleCkqA==","INGC0LXQvNC/0LXRgNCw0YLRg9GA0LA=","6Zi0","IOyWvOuniA==","2LTYqA==","w6FreQ==","44CAVg==","0LLQvtGA0LXQvdC90Y8=","bGFzeW9u","INC00L7QutCw0Lc=","IOucuw==","INC+0LHQu9C40Yc=","zpnOkc6a","INGA0LDQt9C0","77yM5Li6","5a69","IGtvcnVt","5ZWK5ZWK","IMWZZWtsYQ==","44OX44Os","INCy0LDRgNGC","INC/0YDQvtCx0LvQtdC80Ys=","IOS9oA==","IHRoxqFt","IHRha292w6k=","0LvQtdC90Ys=","IOWItg==","IGppbsO9Y2g=","INmG2LU=","INCz0YDRg9C00L3Rjw==","IOOBlw==","0LjRgtC10LvRjNC90L7QuQ==","INin2K3YqtmF","0Y7RgA==","z4bPhQ==","INi02YXYp9mE24w=","IOy7tA==","YWNhxJ/EsXo=","7KeA66eJ","INGE0LjQvdCw0L3RgdC+0LI=","IOq3uQ==","INqG24zYstuM","4KWA4KSb","2LXYp9iq","4KS+4KSo4KSu","INCy0L7Qt9C80L7QttC90L4=","6KiO","54Sm","INin2YTYqNmE2K8=","IOeUs+WNmg==","56Wl","IOuwlOudvA==","2q/bjNix","27XbsA==","zrzOuc6/z4XPgc6z","IHDFmWVkc2Vk","57uP6JCl","5aeR","ZW1leQ==","INmG2YjZgQ==","5b69","IHByw6F2YQ==","INCy0L7QvtCx0YnQtQ==","IO2LsA==","INio2KPZhg==","IEZyYW50aQ==","IFBhxZ9h","INm+2LPYqg==","a8Oibg==","INGB0LjQs9C9","IGThuqdu","5pCc","IHJva3k=","2YPZiNix","IM6Uzq4=","0LDQu9C40LfQsNGG0LjQuA==","5Lyg5aWH","xLFkYQ==","bMOtYg==","INGA0ZbQstC90Y8=","INC90L7Rjw==","YsSbaHU=","4Li04LiH4Lir4Liy4LiE4Lih","77yM5Y20","INGH0LXRgQ==","bGFubcSxxZ90xLFy","IMaw4bubYw==","w6F2YWPDrQ==","4Li14Lis4Liy","zrTOrQ==","4oCM2LTZiNmG2K8=","INGA0L7QsdGW0YI=","IOW3tA==","IE1ldg==","INmF2LHYrdmE2Yc=","INCy0LfRgNC+0YE=","572a","INio2KfZhNmF","INC40LfQs9C+0YLQvtCy","IFNwb3I=","5YSA","INin2YTYo9mG","4LmI4Liy4LiH4LiB","0LvQsNGB0YLQuA==","zp/Omg==","INqp24w=","5ZG95Luk","2K3Yr9ir","2YrZg9mK","INC/0LXRgNCy0YvQuQ==","44K544Kz","IMWhcGF0","IG5pa2Rv","4Lix4LiH4Lih","6LWr","5pio","INCy0YPQu9C4","INCa0LA=","4LmI4Lil4Liw","IHNhbW90","INC+0LHQtdGB0L/QtQ==","INmF2LnYsdmB24w=","INmF2K3YtdmI2YTYp9iq","0LLQsNC90L7Qsg==","INmF2LPYqtmC24zZhQ==","5aKZ","wqDQmg==","INC00L7Rgg==","emlt","2ZDYsQ==","INi02Yg=","5Zyo5Zyw","IOeOsA==","IOWMlg==","2LLZiA==","IHlheWfEsW4=","INC+0YDQuNCz","2Y/Zhg==","IGV2cm9w","IO+9nA==","IOuFuOy2nOuTseuhnQ==","5Yed","0LvQtdC90L3Ri9GF","IGplbm9t","INCn0YLQvtCx0Ys=","IOyXhuuLpA==","IOyXrOyEsQ==","IHJlc21p","aW3DoWxuw60=","55uu44Gu","c2lhbg==","LdC90LjQsdGD0LTRjA==","zr/Oug==","54us56uL","xZ9laGly","5ZCQ","5Yi26YCg","IM6UzrXOvQ==","44OL44Ol","0LjRgtC10LvRjNC90YvRhQ==","INmD2KfZhQ==","z4HOug==","IHJhdQ==","INGB0LzQtdGA0YLQuA==","IM+Mz4TOsc69","IFThuqFp","INix2Kg=","zrXOvc6/","2LHYr9iv","IOC4geC4o+C4sA==","z4POvM6/","IOa8lA==","4Li04LiI4LiB4Lij4Lij4Lih","INGA0LDQt9Cy0Lg=","44K544Oa","0ZbRh9C90L7Rlw==","bMOhxaFlbsOt","2KfYqNi52Kk=","b3bDvW1p","0LDQvdCz","INC60LDQv9GW0YI=","44CB4oCL","7ZaI642Y","INGD0YHRlg==","4Lii4Liy4Lin","2KPZhQ==","44Op44OD44Kv","IOuV","INiz2YbZiNin2Ko=","INGB0YLQsNGC0YzQuA==","0ZfRhQ==","z4HOv8+Hzq4=","INij2YPYqtmI2KjYsQ==","bGFubWE=","IG1hbHplbQ==","56Oo","INCx0L7QutGD","5a2X5bmV","INC+0YDQs9Cw0L3RltC30LA=","44Op44Kk44Oz","INmF2LnYr9mG","55S35a2Q","IOaC","4bq+","IG1lemluw6E=","0LjQstCw0Y7Rgg==","INi32KjbjNi524w=","6JmR","4KSf4KSw","INC/0L7QtNGB","IMWfYcWf","4LiZ4LmG","IMWhcA==","dsSbxZk=","0LfRjA==","652866eI","4Li44LiY","4oCm2Lc=","66as7KeA","4oSW4oSW4oSW4oSW","IGLhu6lj","IFNwb2o=","INC40YHQv9C+0LvRjNC30L7QstCw0L3QuA==","5bem5Y+z","ZW5sZXI=","INC+0YnRg9GJ","INC+0LHQu9GW","2LjLhg==","2YTbjNiz","5o+Q5Y2H","INCz0L7QstC+0YDQuNGC0Yw=","IGvDvHI=","IM67zrXOuc+Ezr/Phc+BzrM=","0LvQsNCz0LA=","INGB0YPQtNGD","IOy4oQ==","zrjOtc+Dzrc=","INC90LXQvQ==","IGJpw6dpbWRl","0YbRltC50L3QvtGX","4LmA4LiE4Lii","IERhbMWhw60=","INC40LzQtdGC0Yw=","6IuX","INmF2LnYsdmI2YE=","IHThuqFw","IG1lxZ8=","wqBO","0L7RgNC+0L3QuA==","2LnZgQ==","4LmC4Lij4LiH4LmA4Lij","4pSs","IOC5gOC4nuC4o+C4suC4sA==","IOiPsuW+i+Wuvg==","0YHRgtCy0LXQvdC90L7QtQ==","INin2LLYr9mI2KfYrA==","INGE0LXQsg==","6aC7","IOC4quC4pQ==","4LiV4Lit4LiZ","IOq4sOqwhA==","5L2p","z4TOt869","64Ks64uk","IFF1eQ==","IOu2mQ==","INCh0YPQtA==","0LjQtg==","IOC5gOC4geC4oQ==","INGB0LLRj9GC","ZXRvb3Ro","zrXPgc6/","2YTZhdip","2LTZiNix","IGRvbXU=","6I2S","bcOu","64+E66W8","INGA0LXQutC+0LzQtdC90LTRg9C10YLRgdGP","IHNvbnJhc8SxbmRh","INC00L3RltCy","IMOnYWw=","44Kr44OG44K044Oq","INC10LY=","IOyViQ==","6Imy55qE","4oCZbmRl","IM+Az4nPgg==","INGH0LXRgtCy0LXRgA==","a2lsaQ==","5oCn6IO9","2KfYr9mK2Kk=","57qv","INin2YTYqti0","INGC0LXQu9Cw","INC+0LHRitC10Lw=","5bKX5L2N","IGtvbmtyw6l0","IGFyYWRh","7Iuc7JeQ","IG9yYW7EsQ==","2LHZgw==","0JvQkA==","IG3DqW7Emw==","2KzZiNuM","IHbhu6M=","IEFuZ2lvc3Blcm1hZQ==","6IOO","IGjDtG4=","5LqL5qWt","INC+0YLQstC10YA=","IHNyZA==","xaFsaQ==","4Liq4LiB","5ryP","INi02LHYrQ==","0YbRj9C80Lg=","IHNsYXY=","IGNlbnk=","4Lit4LmA4Lij","INmI2YTYrw==","INC60L7RgNCw","INCx0YDQvtC9","Oi46LjouOi46","IG5lbXVz","6L+Z5qC355qE","INio2LHZhtin2YXYrA==","IMO6cGxuxJs=","4Li14LiZ4Liy4LiE4Lih","IOuwm+yVhA==","zrzOtc+BzrE=","57yp","IG7huq9t","INC+0LHRitGP0YE=","INC60L7QvdGC0YDQvtC70Y4=","w6F2YWrDrWPDrQ==","IGt1bQ==","55S35Lq6","IHZuaXTFmQ==","INio2K/Zhw==","INij2KjYsdmK2YQ=","5Lq65rCR5YWx5ZKM5Zu9","IHlhcMSxbMSxcg==","IG5hxaHDrQ==","44O844Ot","IHThuqFt","IGhlbsO8eg==","IHplbWk=","IGtow6FuZw==","5YWs5YWx","IOiAgQ==","INi52YjYp9mF2YQ=","wqBW","4LmJ4LmB4LiB","zqzOvc+EzrE=","INGC0YDQsNCy0L3Rjw==","IM63zrzOrQ==","6LS4","4Liq4LiU","INiz2YXYqg==","INiu2KfaqQ==","INGC0LDQutC40Lk=","IGV0dGlr","IM+Mzrs=","INC/0L7Qu9C4","INC90L7Qtg==","2LrYp9mG","2YbYr9mK","IMSNdHnFmWk=","IFBoxrDGoW5n","INmI2LHYsti0","44GE44GL","cnbDqQ==","IOCkpOCksOCkqw==","IOCkqOCkl+CksA==","bWFzxLFuZGE=","0LXQstC40Yc=","dmXFmWVq","5L+d5oyB","5oqA6IO9","5o6o6I2Q","bMOibQ==","IM+N","5aKe6ZW/","INin2LXZgdmH","INCX0LDQutC+0L3Rgw==","INCf0YDQtdC3","IHBvZHBvcg==","6riw7YOA","IO2PkA==","IOuLiA==","bGFyxLFuxLF6","44OW44Ot","INGE0YDQsNC90YbRg9C3","44OK44O8","IGJlbGVk","4Lix4LiZ4Lin4Liy4LiE4Lih","INmB2LHZiA==","0YTRgNC+0LI=","IOydtOufrA==","xrDhu6N1","IOqzteyLnQ==","IGJpcmRlbg==","INC30LXQu9C10L0=","55KD","IGjhu5NuZw==","IMWha29sYQ==","INGB0LDQvNC+0Lw=","YW5sxLFr","56m66Ze0","5Y2X55yB","0LvQtdGA0LM=","0ZbQt9C90LXRgQ==","wqBB","44CN44KS","IGtlbmRpbmU=","INin2YjZhg==","44CU","IM6jz40=","4LmA4LiE4Lil","5aW2","44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA44CA","IMO6xI1ldA==","0YPQu9Cw","6YCa5L+h","IOymkA==","Lg0KCg==","INCc0LXQtA==","2KfYudmK","IGplaG/Fvg==","IEfDvG5leQ==","IM6Rz4DOvw==","INC/0L7Qu9GW","w7xtZQ==","aG9kb2I=","IM6dzrE=","INii2YTYp9iq","IHDFmWl6","IHRhdnM=","INiq2KjZhNuM","44Oz44OU","2K7Ysdis","INCw0LrQutGD","IMO6cHJhdg==","INin2K3Ys9in2LM=","7Lm06528","xLFtxLF6xLE=","INC00L7QutGD0LzQtdC90YI=","INin2LXZhA==","2LjZhw==","IOyduOqwhA==","INis2LHbjNin2YY=","zqXOnQ==","0YfQtdGB0LrQsNGP","2YrZhtmK2Kk=","5ZKo","5peF5ri4","IOC4iOC4s+C4meC4p+C4mQ==","INCw0L3Qsw==","z4XPhw==","6Jmr","INmF2YLYsQ==","INmF2YjYs9uM2YLbjA==","dXR1dA==","INCb0LU=","INCf0ZbRgdC70Y8=","44Kt44O8","4Li44Lil4Liy4LiE4Lih","5Yeh","z4DOv8+N","IMOWZMO8bA==","z4POug==","INGG0Y4=","0YvQstCw0Y8=","772e772e","INi02YXYp9mE","6L+F","INio2YTaqdmH","546b","IOyngOuCmA==","INmB2qnYsQ==","INGB0YLQtdC/0LXQvdC4","INC90LDRg9C60Lg=","54mp55CG","xJtsZQ==","INC+0YHQutGW0LvRjNC60Lg=","INC60YPQu9GM0YLRg9GA0Lg=","6ICD6K+V","IG1hdGVyacOhbA==","INGB0YLQstC+0YDQtdC90L3Rjw==","IOCkheCkpg==","5o+Q5Ye6","IOipleS+oQ==","2ZLYrw==","IOunjOuTpOyWtA==","ZHXEn3VudQ==","2YrZhtmH","5ZOm","0L7Rh9C90YvRhQ==","INmF2LY=","aXNtdQ==","INGH0LDQuQ==","2YjYsdmI2Kg=","INCw0L3Qs9C7","b8SfdW5sdWs=","INC/0YDQtdC00L/QvtC7","IM6tz4nPgg==","4Liq4LiW","IM6VzrvOu86szrTOsQ==","IEJpbGc=","INio2KfYqg==","INCb0YzQstGW0LI=","IHlhcMSxbG1hc8Sx","5qOA5p+l","5pWw5a2m","IDouOg==","IOeOqQ==","zprOsQ==","4LmA4LiX4LiE4LmC4LiZ4LmC4Lil4Lii","INiz2KfYrtiq2YXYp9mG","IOyGjOumrA==","6by7","IHNtcg==","IOuLpOyWke2VnA==","IGplZG7DoW7DrQ==","IHNlcnZpc2k=","IGV5bGVt","INC80LDQu9C4","IHbDvWhvZA==","6Zmk5LqG","INC/0L7RgNGP0LTQutGD","IG5vdsO9","5aSV","INC90LXQutC+0YLQvtGA0YvQtQ==","IF57fQ==","zrPPic6z","0YPRiNC60Lg=","IHBzaWs=","IO2UhOumrA==","2LTYp9ih","INCy0LDQvQ==","INiz2YPYp9mG","56K8","IM6czrc=","INGD0YDQvtCy0LXQvdGM","44K144O8","INin2YTYqNit2LE=","IGRuw60=","4LiB4Liy4Lij4Lio","ZWRpxJ9p","IGJlbGlybGk=","2YvYjA==","IHphbcSbc3RuYW4=","5p+x","2KfZgdmK","IGjhuqNp","5oCd5oOz","IG5lbGVy","INix2LPZhduM","0YHQtdGA","44GT44Go44Gn","IFrDoWtsYWRuw60=","0LvQvtCy0LA=","0LrRgtGD","2YjYs9mB","0ZbQsdC70ZY=","zII=","0YDQtA==","6Zmz","5o23","IHlhxZ9heWFu","4KWB4KSa","0ZbRgtGC0Y8=","IGLhu4E=","64KY6528","INC80Y/RgQ==","IHtbJQ==","zrjOsQ==","INC00L7Qt9Cy0L7Qu9GP","IOWQhA==","INCf0LXRgNCy","IFNhxJ9sxLFr","0YHRgtC+0YDQuNGP","IGJ1bmxhcg==","IHPhu5U=","4KS84KWN","IOWIqQ==","INGB0L/QvtGB","IHlhcHTEsXI=","IHTGsOG7nW5n","2YjZhtip","INC10L8=","44Gn44GN44Gq44GE","2YHYqtmF","INCT0L7Quw==","7ZWY7KeA66eM","IOynhOynnA==","IG9iamVkbg==","INC40LfQvNC10L3QtdC90LjRjw==","5aWz5Lq6","INC/0LvQsNC90Lg=","IEZha8O8bHQ=","IHR6dg==","INC+0LHRj9C30LDRgtC10LvRjA==","INCx0LvQuNC30YzQutC+","cmFzxLE=","IM61z4DOr8+DzrfPgg==","INGE0LDQutGC0Lg=","IMSQ4bq3Yw==","IEFsdMSxbg==","0LvQuNGC","INC70ZbRgQ==","54mn","INC/0YPRgdGC","INC60L7QvNGW0YE=","5L+d6Zqc","5YW35L2T","LdGC","IHRyaHU=","IOKJiA==","INC00LXQutCw0LHRgNGP","INGE0L7RgNC80Ys=","Tmdvw6Bp","IGRvaG9k","2LHZitmD2YrYqQ==","INii2YXZiNiy2LTbjA==","IHphasOtbWF2","IGthdMSxbMSxbQ==","5LiY","IGtvbnVt","INC80L7Rhw==","44Oz44OV","0LTQuNCy0LjQtNGD","IOS6mg==","IOaS","zrPPgc6sz4Y=","44OQ44K5","INC/0YPQvdC6","IEJpcmxlxZ9paw==","IHF1ZW4=","INCy0LrQsNC3","4KWH4KS24KSV","IFl1bmFu","44Gg44Go","27Hbudu3","w6F0eQ==","INmI2LU=","INC90LXQs9Cw0YLQuNCy","44Gk44Gu","IOWKqA==","44ON44OD44OI","INC00ZbQuQ==","IGJhxZ/EsW5kYQ==","IHRyxrBuZw==","IG1ha2lu","IOaEmw==","0LzQtdGH","IOi/kQ==","2YLYr9ix","INin2LPYqtin2YbYrw==","IGluZm9ybWFjw60=","4KS+4KSw4KSV","6Kyd","0YDQsNCx0LDRgg==","IOetlA==","IOiHsw==","INC/0L7Qu9GM","INmH2YbYsQ==","64yA67mE","INiu2KfYsdis24w=","cmFjdA==","44CC44GT44KM","INi02YjYsdin24w=","0LvQtdC90L3Qvg==","IGhpc3NldA==","IGPDoGk=","INGE0L7RgtC+","5oGS","INC80LXQtNC40YbQuNC9","0YHRgtCy0ZY=","INin2YTYudmE","INC/0LjRgdGM0LzQtdC9","44CC44G+44Gf","IHZsYXN0bsSb","INC/0L7QtNCw","z4HOv865","IOyEnQ==","IOydvOydtA==","IOybjA==","0L7QutGB0Lg=","IG9zb2J5","0J/QvtGB0LvQtQ==","INGW0YHRgtC+0YDRltGX","2LnZhNmJ","0L3QutCw","2KrZhdio2LE=","4KWH4KS54KSw","IEphbmE=","2YTZitin2Ko=","INC80LDRgNGC0LA=","INCa0LjRlA==","INGA0L7QsdC+0YLRgw==","IG5o4bqlbg==","0LjRgdC70L7Qsg==","65+t","IG9kdg==","IFThu4luaA==","4oCc6re4","44G744GG","6bKc","0LzQtdGG0Yw=","4Liy4Lio4Liy4Liq4LiV4Lij","4KWB4KST","4Li04LiZ4LiX","bWFkYQ==","2LLYp9mF","INmD2KjZitix","5a6e5pa9","emXFiA==","IGzDoWk=","z4POvM6x","2KfYs9in2Ko=","0YTRgg==","6LCx","54yc","IHByb2LDrQ==","5pyA6L+R","0YDQsNC0","44K944Oz","INC60LvQsNC0","4KWc4KSV","w6l2","4Lil4Liy4Lii","6I6O","IM68zq3Ph8+Bzrk=","INC60YPRgQ==","IO2ZmOqyvQ==","0YfQvtGX","5Y+Y5YyW","INio2KrZiNin2YY=","IHThuq90","IGfDtnN0ZXJlbg==","0LDQu9GO","INC60L7QvNCw0L3QtNC4","IOy7qA==","0YPQvdC0","INis2YTZiA==","5a2Q55qE","INGB0LE=","INCg0LDRgQ==","UENQ","IEN1bWh1cmJhxZ8=","0L7QtNCw0YLQtdC70Yw=","w61zdG8=","IG96bsOhbQ==","44O844OL","IG9rdXk=","b3BoeQ==","4Liy4LiZ4LiE4Lij","IM6VzrjOvQ==","YXnEsW0=","2Y7Yow==","5o6h","IGZ1bmtjZQ==","5pqW","2LfYp9ix","INCd0LDQsw==","IOS4h+WGhg==","IO2SjQ==","IOS9jw==","IO+8jg==","0YvQstCw0Y7RgtGB0Y8=","IFBsYQ==","2KfZitmE","IOustOyXhw==","INC60L7QvdC10YfQvdC+","0LrQvA==","4KSC4KSq4KSw","IOygleu2gA==","IOuCtOugpA==","44Kw44Or","54Gw","IGN5aw==","INC20LXQu9GD0LQ=","IOuGkuydgA==","55Sf5ZG9","5rW0","IGFydMSxxZ8=","INCH","77yy","ZWtpbQ==","INGE0LXQtNC10YDQsA==","INCy0LXRgNC10YHQvdGP","0L3QuNGC0LU=","IMSwxZ90ZQ==","INmI2LbYuduM2Ko=","44GV44G+","IHTFmWV0w60=","dWx1xJ8=","IEN1bWh1cml5ZXQ=","5Lyf","IOunnQ==","IHZlcm1law==","IG5hbGV6","55O2","IGRpxZ8=","IEjhu5NuZw==","2LrZitix2Kk=","5amG","0L3QuNCy","IHLDunQ=","J25kYQ==","IGhyb3o=","4KWJ4KSq","INC30LDQutC+0L3QvtC8","IGplZG51","IEthZMSxbg==","aW5kaXI=","2LPYp9iy24w=","5Yy65Z+f","IGtvbnXFn3R1","INiy2YbYrw==","4KS+Cgo=","INCQ0Lc=","4LiH4LiC4Lit4LiH","INGB0LLQvtC50YHRgtCy0LA=","IOyeke2SiA==","0L/QtdC60Lg=","IOWwsQ==","0LXQstC+0LPQvg==","IHRhxZ/EsXk=","INmF2YbYt9mC2Kk=","IMOHb2N1aw==","27Lbtw==","IM+Dz4XOvM+A","6aOf5ZOB","aMOh","77yv","2YTZhdmH","44Go44Gq44Gj44Gf","0L7RgNGW","wrB9","IHRhxZ/EsW4=","55+/","INGH0LDRgdGC0LjQvdC4","INiv2YrYs9mF2KjYsQ==","IOiJrw==","c3TFmcOt","INGG0LjQug==","4oCV4oCV4oCV4oCV","IMSwbmdpbHRlcmU=","INGB0YLRgNCw0YLQtdCz","w4TFuA==","0LjRh9C90L7Qs9C+","w61yaw==","IM6Rz4E=","IeKAnAoK","IOq5qA==","4KWB4KSG4KSk","INiv2YbbjNin","bMOtbg==","IOCkleCkoQ==","INmF2KjYqg==","0LXQvNGL0YU=","0L7QsdC4","4Lii4LiZ4LiV","4KSC4KSn4KSo","2obbjA==","IOefpQ==","IFh1w6Ju","YWRha2k=","IG9ydGE=","5qC55pys","5YWx5ZCM","0L3QtdC90LjQuQ==","2KjZitix2Kk=","562L","77qU","4oCM2YfYp9mK","IMO2ZGVtZQ==","INii2YbahtmH","INC30LDRj9Cy0Lg=","INmG2YLYtNmH","IOezuw==","4KWL4KWk","IOyngOyglQ==","IGluc3A=","INGC0LXQvQ==","INiq2Lc=","IHF14bqjbmc=","5Ymj","44GP44Gu","INGG0LjQvA==","a292aQ==","aXlhaA==","IOuQnOuLpA==","2LXZhw==","IMSRdQ==","IHN14buRdA==","xLFtYQ==","7KeA6rOg","zIM=","4Lia4Liy4Lii","IENlcnRpZg==","INGD0YHRltGF","4LiV4Liw4Lin","zrXOr8+EzrU=","IA0=","INC80L7QttC70LjQstGW0YHRgtGM","IC3igJA=","IO2YuQ==","7IKs7KeE","INC00LDQvdC40YU=","IHphaMOhag==","7KO864qU","INCz0LjQtA==","bmnFvg==","IF57wrB9","IGtybw==","xI1lbg==","z4bOuQ==","xLFtxLF6ZGE=","IOa5lg==","INC/0L7QstGA0LXQttC0","IOyhtOyerA==","4LiZ4Liy4LiZ","zrzOrc69zr/Pgg==","5r2c","77yM5L2/","IGRvc3A=","IGxp4buBbg==","4Lix4Lia4LiE4Lin4Liy4Lih","INGA0LDQsdC+0YLQtQ==","INC80LDQudCx0YPRgg==","4LmA4LiB4Lip","QmHFnw==","IOadseS6rA==","0L3QsNGH0LDQu9Cw","zrTOtc65","4KWI4KSq","0ZbQvNGW","IGZpemlr","4Lin4Lil","5LyN","IOC4iuC4meC4sA==","J9GP0YI=","0L3QuNC7","0LjQvdC+0LI=","IMSRb8Ohbg==","4Lij4Lin4LiI","ZmV0","4LmM4LmC","INC80LDRgtC4","6aiO","0JrQog==","4LmA4Liq4LiZ4Lit","INC80LDQsg==","bMSxxJ/EsW5h","INC/0L7Rh9C40L3QsA==","4Li54LiV4Lij","0YbQtdGA","dWpldGU=","IHRhaG1pbg==","INCy0LjQvNC+0LM=","4Liy4Lif","0LXQtNC2","z4TOtc+N","YWRsYQ==","IMSRxrDGoW5n","INiv2KfYs9iq2KfZhg==","IGJhc8Sxbg==","INGF0LI=","IHJlYWs=","INC+0YLQvNC10YI=","5rOl","IG3DoXRl","IHpvcnVu","44Go5oCd44GG","INiv2LHYrNip","INCy0ZbQtNGB0YPRgg==","INi52KfZhdmE","6JS1","IHNvbnJha2k=","IG1vaGxp","0LjQstCw0LXRgg==","INC/0ZbQtNGB0YLQsNCy","IG9zdHJvdg==","4KS+4KSo4KS1","4oCeUA==","INCy0LjQt9C90LDRh9Cw","IHByYXZkxJtwb2RvYg==","IHpheg==","7J2066W8","INC00LbQtdGA","INCg0LDQtA==","INGB0LXRgNGM0LXQtw==","INC00LXQvA==","z4DOrg==","INCE0LLRgNC+0L8=","IMSNZXNrw6k=","776P","INit2Yo=","7LyA7J20","INiu2YjZhg==","wqBM","44GE44Gr","0LjQt9C90LXRgQ==","INmF2YLYp9mF","INin2YTYrdmE","64aN","INii24zYpw==","57+8","77y9","5riQ","0LvQuNCy0ZY=","44GE44Gm44GE44KL","IM6RzqA=","INC40YHQv9C+0LvRjNC30YPQtdGC0YHRjw==","IG3DoXQ=","IM68zrXOs86s","64W8","5rW36YGT","INmF2LTaqdmE2KfYqg==","0YfQvdCw","JzsnOw==","IM68zq/OsQ==","z4HPjs69","IGJ5c3Rl","INGN0LvQtdC60YLRgNC4","IFlhcmTEsW0=","IGjDoXQ=","INCU0LXRgNC20LDQsg==","LtCh","IG9yYWRh","IGFsYW7EsQ==","5Zyw5Z+f","INiv2YfZhtiv","0LzQtdC90Yg=","INC+0YDQs9Cw0L3QvtCy","INi52LU=","4Li54LiH4Liq","INi02LnYsQ==","IOyWuw==","IM6szrvOuw==","IGfDs2k=","INmG2KfYrQ==","5byY","4KWN4KSl4KSy","aWxpbQ==","65CY7KeA","INC60L7QvdGG0LU=","IMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKgIMKg","IOykgOu5hA==","IG9zdGF0bsOt","IHZsw6FkeQ==","INGB0L7QsdC40YDQsA==","IOyXreyCrA==","4LmB4LiB4Lij4Lih","Lu+8jw==","2Y/ZiNmG","2b7Ysw==","IFdpa2lw","IOa+","INC20LDRgA==","5a655piT","IHByb3N0xZllZG5pY3R2w61t","IMW+ZW55","IOiPsuW+i+WuvueUs+WNmg==","0LDRjtGC0YHRjw==","IG1pw6p1","IHBlbsOtemU=","zrTOuc6s","b2xkdXI=","INC/0YDQuNC80LXRgNC90L4=","IOyeiOqzoA==","4LiH4Lit4Lii","0LrQvtCy0LjQuQ==","Ls6f","4LmD4LiE4Lij","54ug","INCf0ZbQsg==","5pS56Z2p","INCd0LDRgdC10LvQtdC90LjQtQ==","xaFldMWZ","2ZLYqA==","IOKUgA==","2LrZitmE","INC00ZbRj9C70YzQvdGW0YHRgtGM","INmE2YrYsw==","IOyLnOyepQ==","44Of44Ol","INqp2YjYqg==","IM6Tzrk=","4Li04LmA4Lin","ZWt0b3I=","INCx0YPQtNGD","0L3QvtCy0LDQtg==","0YnQsNC10YLRgdGP","IG5nw7Ru","IHbEm2M=","5b6Q","4Lit4LmA4Lih4Lij","4Lix4LiN4LiK","INC40YHQv9C+0LvRjNC30YPRjtGC","cnVidQ==","IG5o4buxYQ==","44GM44GK","INCT0LDRgA==","0L7RgNC1","INC30L7Qu9C+0YI=","5p+z","INmK2LQ=","INC/0L7QstC40L3QvdGW","2KfZgtiq2LU=","2YTYrQ==","INC+0LrRgtGP0LHRgNGP","IG7Em2tkeQ==","INC+0LHRgNCw","2LPYqtqv24w=","56ym5ZCI","IHRoaeG7g3U=","5piv5LuA5LmI","IHJvenM=","7L2c6rG4","INC60LDRhA==","5ZCM5q2l","7LyT","z4DPhM+F","4Lig4Liy4Lii4LmD4LiZ","zrnPg8+Ezq4=","INiv2YjZhNin2LE=","INmF2KfZitmI","IHBlxI0=","4Lix4Lia4Lih","zpnOlA==","xLF5ZMSx","4Lix4LiB4LiX","4Lib4Lij4Liw4LiW4Lih","zrrOsc65","IHByb2Rlag==","INC40Y7Qu9GP","IHbFqQ==","6amx","IGh2xJs=","5oOz6KaB","56+E","w6dhaw==","INC80Y/Qsw==","xLFtxLFu","IGRpc3BvemljaQ==","IHVrYXo=","cmFjYWs=","INCx0L7Qu9C10LfQvdC4","4Lin4LmC4Lih","INC30LXQuw==","INCS0LjQutC4","INCg0L7QtA==","4Li54LiB4LiE","7ZG4","IHRo4bqjaQ==","IGJhxJ/EsW1zxLF6","INGA0L7RgdGB0LjQuQ==","INCa0LDQvA==","INC40YHQv9C+0LvRjNC30L7QstCw0L3QuNGP","INit2LA=","wqDCoMKgwqDCoMKgwqDCoMKg","INin2YbYqtmC2KfZhA==","INCw0LHRgdC+0LvRjtGC","IMSxxZ/EsWs=","z4TOv86zz4HOsc+G","INCx0L7Qu9GM0YjQvtC5","INi52KjYp9ix2Ko=","w63Fvg==","INiv2LHYs9iq","INGB0LvQvtCy0L4=","4KWICg==","2KjZiNio","INCS0L7Rgg==","4Lin4LmE4Lib","IGJpbGluZW4=","INmC2Yo=","IGJ1bmxhcsSxbg==","2ZHYqg==","IGJhc2l0","66a/","2KfYptix2Kk=","IHDFrw==","IGVkaWxtacWf","IOS9kA==","IFnDtm5ldGlt","2YXbjNix","IHNwb3U=","5rex5Zyz","INCy0LfQsNGU0Lw=","zpnOmw==","0IM=","INC00LXRgNC20LDQstC90L7Rlw==","IG1ydA==","IERlbWly","6buO","INGA0LXQs9GD0LvRj9GA","INC90LjQutC+0LPQtNCw","5by+","4KWJ4KSh","INCz0LvQsNC3","INmF24zaqdmG","6ZmQ5a6a","INC90LDQstC6","INC/0L7QtNGC","INiq2LXZiNuM2LE=","INin2YTYrdiv2YrYqw==","IGRvxaFsbw==","0L3RjtGO","INGB0YXQvtC0","2LfZgtip","INGB0LXQvdGC0Y/QsdGA0Y8=","54m55q6K","4LiB4Liy4Lij4LmB4LiC","w6F6ZA==","0ZTRgtC1","IM6jzrU=","INmE2YPZhA==","5ZCN5a2X","2KfZhtuM2Kc=","IGNpbnM=","6riw7JeF","IOmfsw==","6aCD","4Lii4Liy4Lii","7JqV","IHbDrXTEm3o=","4KWN4KSw4KSs","INi02LHZgtuM","IGJlenBlxI1ub3N0","IMOnZXLDp2V2","IOunmw==","Y2t5","k6g=","INGD0LzQvtCy0LDRhQ==","0LvQuNGF","bWVuaXo=","INio2q/bjNix","2YbZiQ==","IOC4geC4suC4o+C5geC4gg==","zrnPg861","4oCzRQ==","IGTDtm5lbWluZGU=","66as7Lm0","IOWIsA==","IGh1a3Vr","0LDRgtC+0YDQsA==","INin2YTYudmG","77qY","w7xuw7x6","0YHQvtGC","4Li44Lip","IGTGsMahbmc=","b3ZueQ==","INGE0L7RgNC80LA=","44GX44Gu","2LLZitiy","INin2YTZhtin2LM=","INGH0LjQvA==","5aSn5Lq6","2q/Zig==","INCT0L7RgdC/","6aKG5a+8","IG5pbmg=","IOC4o+C4suC4hOC4sg==","2YLYp9ih","7Ims","IOydtOyghA==","IMO2xJ9yZXRtZW4=","INGG0LLQtdGC0LA=","0LXQvdC90L7RgdGC0Yw=","5aSn44GN","INC80LjRgdGC0LXRhtGC","2LHZiNiq","cG/FiA==","IMWeaXJrZXQ=","INC60YDQsNGB0LjQsg==","INGA0LXRgdGD0YDRgQ==","5Lm+","INmB2Yc=","IFnDlg==","6Iqz","zrzPic+C","xJtqaQ==","INCy0LvQsNC2","INGD0LLQtdC70Lg=","2KfYsNin","44CC5aaC5p6c","INC/0YDQuNGB0YPRgtGB0YLQsg==","IOG6pG4=","5oCW","INCc0LXRgg==","IGplZG5h","IGPhu6Vj","INin2YbYqti02KfYsQ==","INC30L7QutGA0LXQvNCw","0LjRh9C10YHQutC4","INC60YDQsNGX0L3QuA==","0LjRgNGD","INGW0L3RgtC10YA=","INCw0L3QsNC70L7Qsw==","0Zs=","4Li14LiL","0L3Rg9C70Lg=","IE5pbmg=","0LXRgNCw0YLQvtGA","IHJ1Y2U=","INGI0LrRlg==","2KrYsdmG2Ko=","IHNvbnJhc8Sx","IOaN","0YbQtdC90YLRgNCw","IOC4reC4s+C5gOC4oA==","2LfZig==","77yM5b2T","INGC0YDQtdGF","wqBI","5rSq","44Oz44OE","INCy0ZbQtNC/0L7QstGW0LTQsNC70Yw=","4oCZZGFraQ==","w6HFmWk=","IHDFmWVt","dHVr","INmB2LHZhdmI2K8=","IOyduOymnQ==","4Liq4Liz4LiZ","7IOB7J2Y","xZnDrW0=","5r6k","INGA0LXQuQ==","INC70Y7QsdC+0Lk=","dWp0ZQ==","67O17KeA","INiv2LHYsw==","INCS0LvQsNC00Lg=","INGB0LLQvtC40Lw=","IOyduO2EsOuEtw==","6LGK","INC90LDQu9C+0LM=","44KI44Gz","INiu2KfYt9ix","IOyeheuLiOuLpA==","44CC44GX44GL44GX","0LvQsNCz","5bCW","64ul","7Iqk64qU","7Iug7LKt","44OH44O844K/","INGD0YDQvtCy0L3Rjw==","IOustOyKqA==","INin2YTYo9ix2LY=","4LmJ4LiV","4bubdA==","INmG24zYsdmI","5aKo","44K244O8","cnViYQ==","INmG2LTYr9mH","0LjQu9GP","YWPDrW0=","44Op44Kv","WEg=","INiz2LHYrw==","IOCkpuCkuA==","dGVtYmVy","IERvxJ91bQ==","INC/0YDQvtGA","zrjOv8+C","IGnFn2U=","4Lit4Lif","0LvQsNGI","2KfYtdmE2Yc=","bGl2xJs=","67aA67aE","0L3QsNC6","5Y2B5LiJ","4Liq4Liy4Lir","4Lib4Lij4Liw4LmA4LiX4Lio4LmE4LiX4Lii","44Kt44Oz44Kw","INC80LXRgtC+0Y4=","IGt1bGxhbmFyYWs=","4pGh","24zYstin2Ko=","INmF2YjYqNin24zZhA==","INC30L3QsNGH0LjRgg==","IG9yZ2FuaXphY2U=","0YDQuNC4","b3ZuYQ==","IOqyveygnA==","44CB5b28","IOCkruCkuA==","IOC5guC4m+C4ow==","TEFSSQ==","5oeC","INCy0LA=","INmD2YbYqg==","INGA0LDQsdC+0YLQsA==","wqAgwqAgwqA=","5aW95LqG","IHphbcSbc3Ru","0LbQtdC90Yw=","IHVrb24=","bsSbbsOp","IOOAgOOAgOOAgOOAgOOAgOOAgOOAgOOAgOOAgOOAgA==","INin2YTYrtin2LXYqQ==","IMSNYXN1","5bCP6K+0","INit2LHaqdiq","5pGE","z4fPiQ==","INGB0LLQtdC2","5paw6Ze7","IOyLsQ==","IGXEn2Vy","IHNpdHVhY2U=","IOe3qA==","Zmlr","66eI64uk","zpXOmg==","IOqwnOy1nA==","IGPDoA==","2KfYr9ir","IHNhecSxZGE=","INij2YHYttmE","5rOV6Zmi","IC4s","IFRoxrDGoW5n","z4HPjM+C","44GX44KI44GG","x44=","5pG4","IOmZsw==","pZA=","4Lik4LiU","IGdp4bqjbmc=","INC70Y7QsdC+0LI=","IGVrcmFu","0L7Qv9C40YE=","0LXQttC00YM=","INC90LDQt9Cy0LA=","5ouT","xLFm","4LmI4LiB","0LjRh9C90ZY=","IOqzhO2ajQ==","4Lig4Liy4LiE4Lih","INin2b4=","66as7J2Y","44Gn44GZ44GM","IGtvbmNp","INqp2KfYsdiu2KfZhtmH","IOS9lQ==","INGC0LLQsA==","INCf0L7RgdGC","INCw0L/RgNC10LvRjw==","INin2YTYudix2KfZgg==","5Lit5Y2O","4LmH4Lit4LiB","4KWN4KSk4KSV","IHrDoWplbQ==","INiv2LHYrNmH","IOCkrOClnA==","INGB0YLRgNCw0L0=","6K2m5a+f","IHllcmxlxZ90aXI=","IFbFqQ==","576O5YWD","IOyhsOq4iA==","IOC4o+C4reC4hw==","IGFrYWRlbQ==","4LiE4LiT4Liw","IHBveml0","IGtvbmXEjQ==","6LCD5p+l","IOOBiw==","IMSNZXJ2ZW5jZQ==","IE9ka2F6eQ==","IOuPhOyLnA==","4Lix4Liq4LiU","IGfDoWk=","INCg0L7QsQ==","INCx0L7Rjw==","5omp","5byA5bGV","YW5paw==","IHZ5xb4=","IGJhxZ9sYXk=","IGJha8SxxZ8=","ZWtjZQ==","0YHRgtC40LrQsA==","0LXRgNCw0YLRg9GA0LA=","IOu2hOulmA==","IFBvxI1ldA==","b2TDocWZ","64uY7J2Y","IGtsaWQ=","27LbuQ==","INqG24zYsg==","bcO8cg==","IHPhu6k=","2YrYp9mG2Kk=","5Yqx","IG9rdQ==","INCy0L7QtNC4","INiy24zYsdin","5aSn5Yip","INmE24zZhtqp","INmK2KzYqA==","2YXbjNmE","IM+Dz4TPgc6x","5pmT","4Li04Liq4LiV","IMWfaWRkZXQ=","INGA0LXQutC+0LzQtdC90LTQsA==","IHBvxb5hZGF2","INC/0ZbRgQ==","5YWs5byP","INqv24zYsduM","0LrRgtCw","INmF2YbYp9i32YI=","IGZpcm15","IOC5hOC4mw==","IM6tz4HOsw==","5b+r6YCf","44GM44Gq44GE","0L3QtdGB0YLQuA==","IOeyvg==","0YDQsNC00Lg=","44KS44GL","77qq","a3luxJs=","IOCkueCkpA==","dGFr","INmK2YjZhtmK2Yg=","w7bEnw==","INGA0YPQug==","5ZyL6Zqb","0Y7RgdGM","INC00LDQstC90L4=","IHBvcGlz","IELEsEw=","INmG2YLYrw==","INGB0L/QvtC2","0YbQuNC+0L3QvdGL0YU=","INGI0L8=","0YPRjtGJ0LjRhQ==","INCy0L7Qt9C00YPRhQ==","0YLQuNC1","IFXFvg==","z4zOtA==","4LiB4Lij4Liy4LiE4Lih","IGFsYW7EsW5kYQ==","IHPhuq90","44OQ44Kk","TmfDoHk=","IOu5jA==","77yJ44Gv","IOS/oQ==","0JXQoQ==","IFRhdG8=","IMO6bm9yYQ==","ZXJhcA==","xKA=","IFThuq1w","INC60L7QvNC/0LDQvdC40Lg=","44Op44Kk44OI","6ZyA5rGC","INiq2YjZgg==","4oCZ4oCZ","656N64uI64uk","INC60LLRltGC0L3Rjw==","IG95dW5jdQ==","woDCgMKAwoDCgMKAwoDCgA==","5YaK","IHlhcG3EscWf","4Lix4LiH4LmE4Lih","INC30LDQv9Cw0YU=","w6FsYQ==","INGC0LXRhdC90LjRh9C10YE=","INit2LU=","4Lij4LiU","5byE","INqv24zYp9mH","2KfZh9ix2Kk=","IOCkj+CkoQ==","0L3QuNC80LDQtdGC","2KfYr9mG","zpzOkQ==","IOekvg==","0LDRgNGH","2KrYsg==","5ram","aW5pemlu","IGJleWF6","INio2YjZhA==","5Z2h","44Gu44KI44GG44Gr","IHlhcHTEscSf","IGRhxJ/EsQ==","IGJhxZ9hcsSx","IM+Azqw=","INC/0YDQvtC00LDQtg==","QuG7mQ==","IOCkpOCkpA==","IHBvZHN0YXQ=","IOa1gQ==","IHpkcmF2w60=","IOeh","IG9wYWs=","IGjhu41h","5ouU","0YPQttC0","IHRy4bupbmc=","2YjYsdmK2Kk=","0YvQuw==","dW1zdXo=","INiz2KjYqA==","6K645aSa","5a6e6aqM","INCx0L7Qu9C4","IGR1eeG7h3Q=","4bqtYw==","INCR0LXQtw==","INio2YTZhtiv","0LzQvA==","0YDQtdC7","TsSw","IOODrw==","6Yu8","INGB0LLRjw==","IOWQjg==","IG11aHQ=","INC/0YDQvtCx0LvQtdC80Lg=","INGC0Y/QttC10Ls=","INCh0LXQvA==","4Lik4Lip4Lig4Liy4LiE4Lih","4LmI4Liy4LiV","w7Zyw7w=","w7x5b3J1bQ==","INin2YTYo9it","INGB0YLRgNCw0Yg=","aG9v","4KSn4KSw","IHRsYWs=","IHNycG5h","aWZpa2FjZQ==","IHJlaA==","INC80LjQvdGD","44CAag==","INCz0YDRg9C/0Lg=","IM6szrs=","IG9sdXJzYQ==","zrvOv86zzq/OsQ==","INCS0LjQug==","IG3DvGNhZGVs","IHrDoXbEmw==","INGE0LXQstGA0LA=","xI1uw6E=","4LmM4LmA4LiL","INmE2YTYrQ==","0YDQuNC/","INCx0YPQug==","44GI44Gq44GE","IHBvcmFk","IHNhbW9zdGF0","IHRlc2lz","2KfYqNmC2Yc=","INis2K/Zitiv2Kk=","6YCS","4pSB4pQ=","2LPbjNmG","IGdlcmVrdGnEn2luaQ==","4Li14Lii4LiZ4Lia","6KiA44Gj44Gm","INGW0L3RgtC10YDQtdGB","INGP0LrQuNC8","IOaAuw==","a292b3U=","IGRlbWVr","2KfZhtmK2Kc=","IGRvbcWv","xaFuw60=","YXRlxZk=","5YCr","zrTOv8+Dzrc=","IOq4sOyXhQ==","5ZSH","7Lmg","0ZbQtNGD","66aw7J20","5pyA5Yid","6LiP","5oOz5Yiw","4KWN4KSs4KSw","IOyelA==","INGA0LDQt9C90YvRhQ==","a3JvbQ==","zrnOsc69","INC00YDRg9C3","5Lu/","IOq3uOughw==","INC00LDQu9GW","5pWI5p6c","IOCkueCktQ==","6Lyd","IOywuOqzoA==","IOyGlA==","IHpuYWw=","INC/0LXRgNGB","2ZDZkQ==","INGC0LXQtg==","5Yuf","zrnOuA==","xI3Frw==","IGVraXA=","IGtodW5n","6ZeY","INiq2LXZhduM2YU=","0L7QuNGC","INGF0L7Quw==","5oqe","YW1hbQ==","IOKWsw==","44GH","INi52YbZhw==","IOywuOqwgA==","IM6az4w=","5Y+k5bGL","0LrQvtCy0L7Rlw==","4Lio4LiI","0L7Qu9C+0LPQuNGP","INmF2KvYqNiq","INCa0YDQsNGX0L3QsA==","INC80LXRgdGP0YbQtdCy","IGFsxLFuYW4=","IM+Az4HOsc6zzrzOsQ==","IOyeoeuLtA==","INC/0LvQvtC0","INGC0LrQsNC90Lg=","0YHRi9C70LrQuA==","2LPYt9iz","cmFuxJs=","0LrQsNC2","0LXQvNCw0YLQuA==","INiy24zYs9iq","5r+f","IHBvcGxhdA==","zrPOrc69","7Yag7Yag","IHTDonk=","IOy1nOq3vA==","44Op44Oz44K5","IGfDvG5lxZ8=","INmB2YI=","IHNhxJ9sYXlhbg==","INit2LLYqA==","4KWB4KSy4KSo","IEJpbGlt","IEJhdMSx","5qC355qE","zrTOuc66","zrHPgc6vzr/PhQ==","IOybgA==","IGzhu61h","2YbZiNi5","546y","0LDQvdC+0Lw=","IHN0w6F0bsOt","IOS6jg==","IG3DuWk=","IMSR4buZdA==","5rKD","5YWs5ZyS","INGB0YzQvtCz0L7QtNC90ZY=","0L3QvtGB0Lg=","WmE=","INC00LvQuA==","IM+Dz4XOvc6t","IFbhu4s=","bWF2","IE3DvHNsw7xtYW4=","L++8jw==","INC30LDRidC40YI=","6ZaJ","IOeBqw==","IOW3nQ==","INCw0LY=","6L+H5p2l","4LiX4Liy4LiZ","IEFyYcWfdMSxcg==","1aHV","IHBvbcSbcg==","IGTFr20=","5aau","IGhsYXZuxJs=","IGZpbmFucw==","IM6zzr3PiQ==","z4PPhM63zrzOsQ==","77yM55So","7Iut7Iuc7Jik","INmF2KvYp9mE","LdCR","0YbRltC50L3Rlg==","INiv2LPYqtmH","4KWJ4KS4","0YDRltC/","IHDFmWlwb20=","INmI2YTZig==","INmI2LLZhg==","IGVsZWt0cmlr","IFF1w6Ju","aXbDqQ==","IGzhur0=","566A5Y2V","IG9ubGFyYQ==","0L7RgdC70LDQsg==","7Iuc7YKk","66qs","INmF2YLYr9in2LE=","IE9ydGE=","IFNlw6c=","INmG2YjZgdmF2KjYsQ==","4Li44LiZ4Liy4Lii4LiZ","INGD0LzQvtCy0Lg=","IOCkquCksOCkrg==","IHN0cm9t","INC60YDQsNGJ0LU=","56em","55u45omL","6Zu76KaW","IHV5Z3VsYW1h","INGA0LjQtw==","5oiy","4KSv4KSw","IEhsYXY=","IOyLuA==","INC70LjQv9C90Y8=","xYh1asOt","0YDQuNC3","6auY6YCf","55u45b2T","a2VuaW4=","INC+0YHRgtCw0L3QvtCy","IGJpdGs=","b3ZhbsOpaG8=","INCc0LDRgNC4","6LW2","7L2p","IMO2bMOnw7w=","INCh0LXRgNC10LQ=","IFRo4budaQ==","z4nOvc6x","2YjYqNip","IGNo4bulcA==","4oCM2K8=","IGNow6F5","INCS0LXQu9C4","INC+0LHRgdGC","IOyLnOymjA==","2K/Zhdip","0L/QvtC0","bHVl","INC00ZbQu9GP0L0=","INm+2YjYs9iq","INin2YTZhtiz","6IKM","7IiY66W8","IMO6cm92","INmF2LTaqdmE","6YeN6KSH6YeN6KSH","0L3QtdC3","IGRvcG9ydcSN","IHRhc2FyxLFt","7YGs6riw","7J207IWY","IGRlc2V0","INmF2LHYqtio2Lc=","4Lix4LiS4LiZ4Liy","J8Sx","0YfQutC4","IOyeiOuNmA==","0YjQutCw","bsOhbQ==","0YHRgtGA0L7Qsg==","4KWN4KS44KSw","0L3Rg9C70LDRgdGM","44Gh44KH44Gj44Go","IOWm","zrPPjA==","IOm7kQ==","WGVt","IHThu4c=","IOuMgO2GteuguQ==","6riw6rSA","5omN6IO9","6K+t6KiA","ZWRleXNl","INCi0Ys=","INGB0L7QtdC00LjQvQ==","IOyXhuyKteuLiOuLpA==","0Y/RjtGC","4LmB4Lir4Lil","IOyngOuwqQ==","IG9zb2Juw60=","24zZhNuM","INCw0LLQs9GD0YHRgtCw","0YnQuNC6","IHbDvcWhZQ==","Z3Ro","IM+AzrHOvQ==","2KzYp9ix","INCy0LjQtNC+0LI=","7J207IqI","INCR0LDRgA==","IM+Mz4DOv8+F","5qSF","INi52KfZhNuM","IFF1eeG6v3Q=","w5xN","44Od44Kk44Oz44OI","IOq5jA==","INC60LDQvdC00Lg=","a292w6lobw==","IE1lcmtleg==","IHlpeQ==","IHDFmcOtc3DEmw==","INGC0LXQvNC/0LXRgNCw0YLRg9GA0Ys=","INm+2Yo=","4Lik4Lio4LiI","6LCD55So","INGB0YLQvtGA0L7QvdGD","4LmJ4LiK","5aW944GN","LsWe","INC/0YDQvtC3","2YbYqtin2Kw=","6Zu75a2Q","LjouOi4=","6KiT","0LjRh9C10YHQutC+0LU=","INC90L7Qs9C4","IM67zq0=","IHPEsWvEsW50xLE=","IOqwgOyhsQ==","INiq2YbYuNmK2YE=","IMO2ZMO8bA==","IGHFn2HEn8SxZGFraQ==","IMW+ZWxleg==","INin2YTYudiv2YrYrw==","2LrZhg==","INC+0LrQvtC90Yc=","0YDQtdC80Y8=","TMSw","IG5lamQ=","IM+AzrvOsQ==","0YHQutC+","IOyImQ==","INm+2YjZhA==","zrjOtc69zq7Pgg==","IOyjvOyalA==","IOaKpQ==","INmF2YXYpw==","0KDQodCg","INGA0LDQtNGW","5LiA56eN","6b6E","IHPDtnls","z4HOus61zrnOsQ==","INC30LXQvNC70ZY=","IHZlxI1lcg==","Z2XDpw==","2LPYqtmF","IHNlZmVy","INGB0LLRltC0","77uf77s=","0LDQu9C+0LI=","7Iqk66W8","4oml","INiq2YTZgdmG","5Y675LqG","4KS84KWL4KSC","INGE0L7RgNC80LU=","ZMO8bQ==","5YWB","0YDQsNC/","IFbGsMahbmc=","4Lit4Liw4LmE4Lij","4Lix4LiB4Lip4LiT","IOWNsw==","INin2YTYsdmF","INC30LDRhdC40YHRgtGD","wrBF","b2TDrQ==","IOCkteCkqA==","IMSRw6hu","IOWPlw==","6JC944Gh","IHppbQ==","66as7KaI","6IiS","INC30LHRltGA","IOS7t+agvA==","INC70Y7QtNC40L3QsA==","INCf0L7RgdC40LvQsNC90L3Rjw==","0LjRiQ==","IM6o","4Li04LiB4Liy4Lii4LiZ","IGJ1ZGV0ZQ==","INC30YDQvtGB0YI=","IHZ5aw==","INCX0LXQvA==","INC40Y7QvdGP","IG3Em2xv","2YTYp9mB","INmI2LQ=","INGB0L/RgNCw0LLQuA==","44GZ44GO","INCz0YDQsNC00YM=","Um96","zrnOvc6u","IGNo4buTbmc=","5LiA5Y23","IFhlbQ==","INGB0LjQvNCy0L7Quw==","IG9kbcOtdA==","INGA0Y/QtNC+0Lw=","INGH0LXRgNCy0L3Rjw==","4LiB4Lij4Liw4LiX","5Lq65Lq6","5rCX5oyB44Gh","dW5kYWtp","5ZyL5a62","zrXPgc68zrHOvQ==","INC70Yw=","IE7DvGZ1cw==","INC80LXRgNC1","2KjYsdin2YrYsQ==","0L3QsNC90L3Rjw==","INC90LDRgA==","IHThuqVt","5pa95bel","6aGv","IGjDqA==","5piO55m9","INC00L7Qs9C+0LI=","INmB2LHZhQ==","6ICX","7Iqk7J2Y","7IS464yA","6K+a","INC90LXQsdC+0LvRjA==","IOC4m+C4o+C4sOC4gQ==","IOy5vA==","IG92bGl2","IE5HQw==","44CC5LiN","2KfZhNmJ","5omj","LtCQ","0YDQsNGB0YLQsA==","IMOHZXY=","44Gj44Gh","77yM6YO9","IHJvdm7Em8W+","IM+Hz4HPjM69zrnOsQ==","IOyhsOyEoA==","INii2KjYp9iv","INCc0LDRgQ==","55m85bGV","5LuU","IGtlbmRpc2luaQ==","4LmI4Lit4LiH4LmA4LiX","IFbEmw==","IHLGsOG7o3U=","IG3DoW1l","INC+0YfQtdGA0LXQtNGM","INiz2KjYqtmF2KjYsQ==","INCx0L7Qug==","7KeA7Jet","INiq2KfYq9uM2LE=","IGxpc2Fucw==","IGdlcmVrdGly","IHNpemk=","0ZbQvdC+","IE3DvMWf","44GP44KJ44GE","INC30LDQutC70Y7Rhw==","44GT44Go44Gr","6KiA44GE","44CB5bCP","IGV0bWVrdGVkaXI=","5aCx5ZGK","IGthcsSxxZ8=","INC+0LHQu9Cw0LQ=","5aWR","cmFjYXQ=","INin2LHYqtmB2KfYuQ==","zrzOsc65","7ZSI","INmI2YTZhQ==","64qU7KeA","bG9tb3U=","INC70LjRhtCw","IOydjOyVhQ==","IGhvZG7Emw==","6Iux5paH","woQ=","4LmJ4Liy4LiC4Lit4LiH","IOqzhOyVvQ==","5ZCE56eN","INmF2LHarw==","6ZSB","IOCkqOCkpg==","44OL44Oh","INC10Lw=","IGVsZcWfdGly","IO2KueuzhA==","IM6lz4DOvw==","xaFrZXI=","TEVSxLA=","5rKI","bGlrbGVyaQ==","INmF2YfZhtiv2LPbjA==","IGJhxJ/EsXI=","ZMSxxJ/EsW7EsQ==","INin2YTYqtiv","4Li44Lib4LiB4Lij4LiT","INGB0LvQtdC00YPRjtGJ0LjQtQ==","IOyngeygkQ==","5bCk","INC+0YHQvdC+0LLRlg==","IHTEm2xh","INC/0LDQug==","aXphY2U=","IG7DoXJvZA==","YW7DvQ==","INGN0L8=","IMO8w6fDvG5jw7w=","zqXOoQ==","6Ya06Ya0","4LmA4LiB4Lit4Lij","4oCM2KfZhtio2KfYsQ==","57aZ","zpHOoA==","xLFsxLHEn8Sx","IMOccsO8bg==","INC00L7Qt9Cy0L7Quw==","IO2DiA==","IOCkk+CkteCksA==","6Ku4","6JiH","INC/0YDQvtGB0YLRgNCw0L0=","6Z2S5bm0","44Gu5pa5","INqG2q/ZiNmG2Yc=","2YTYtw==","4oCc5oiR","IOuLpOyatOuwmw==","4KS+Lgo=","IG3DvGNhZGVsZQ==","IGPDrXQ=","4LmI4Lin4Lih4LiB","xJ/EsW5h","6rCc67Cc","IM+AzrHOuc60","2LbYp9uM","IGJvcsOn","7Yqc","INiu2K/Zhdiq","IHVkw6Fs","INCy0LjQsw==","IOuwsOyGoQ==","5bm+","2Y7YrA==","IOyXmA==","54Cs","74A=","IM6RzrjOrg==","0L/RgNC40LrQu9Cw0LQ=","INC/0YDQuNGH0LjQvdCw","INmB2LTYp9ix","5ru/","IGRvc3RhdA==","IOyhuOyXhQ==","INin2LHYsg==","2YjZhNmI2Kw=","2LPZiA==","5pig55S7","IHRow7Rp","IMKgwqDCoA==","4LmB4LiZ4Liw","6Kit5YKZ","INC80L3QvtCz0LjQtQ==","0YLQvtGE","acWhdMSb","4KSX4KSi","INC40L3QtNC40LLQuNC00YM=","IOyDne2ZnA==","INC30L7QstGB0ZbQvA==","7YOV","55yg","IOq1reuCtA==","ZXB0YWw=","cmFjaQ==","6KGh","44S3","IFN0xZllZA==","2KfZhNmK2Kc=","zqPOpA==","m7A=","44OJ44Or","w6F6w60=","INCw0YHQvw==","IGTEscWfYXLEsQ==","INCy0LjRgNC+0LHQvdC40YbRgtCy0LA=","ZXph","77yM5LiN6L+H","54Om","44Oz44Kw44Or","IHJvenZvag==","INmF2YbYqti02LE=","INGD0YLQtdC/","INiv2YrZhg==","INC30LDRgdC+0LHRltCy","TmfGsOG7nWk=","44K344O8","IEZyYW5zxLF6","zpnOpA==","2KfYptmB","zrnPhw==","4Li14LmA4Lih","4KWN4KSu4KSo","4KWN4KSu4KSa","INiz2LnYsQ==","776d","67Cp67KV","INCh0L4=","IOCkluCkrOCksA==","7IaM6rCc","IHNsb3Zh","UVBDUA==","IEvEsXo=","2LfZgdin2YQ=","INC60L7RgNC8","IOyXheuNsOydtO2KuA==","ZXNwb8WI","4LiU4Liy4Lin","0L7RgNC+0Lw=","INCz0YDQsNGE","INC/0ZbRiA==","IOu/kA==","w712","0KHQsNC8","IGtyZXY=","IEJ1bnU=","IHpvYnJheg==","INiz2K7Zhg==","IOaUrw==","0LvRjtCx","2Y7Yp9mG","0LzQsNGC0YDQuNCy0LA=","zrvOtc+N","INC/0L7RhdC+0LQ=","INCz0YDQtQ==","55yL552A","4LiI4Liz4LiB","4Lix4LiH4LiE4Lih","IHNlw6dlbmVr","xLBzdGFuYnVs","INCy0ZbQtNC80L7Qsg==","bWl5b3I=","IG3hu6Vu","7J207JeQ","IE5oxrA=","wqB0b20=","bMSxa2xhcsSx","wqDEkQ==","44O744Oe","INmB2Ko=","IEZha8O8bHRlc2k=","7KCE7Z6I","6aqR","IOyekeydgA==","57yY","7LqQ","IG3DvHppaw==","0LDQu9GL","IHBvemVt","54On","IOW4uA==","xaFpbA==","4KSG4KSq","4LiB4Liz4Lir4LiZ4LiU","INqv2LHYr9i0","zrvOuc6s","IMO2ZGVu","5Y+q6KaB","IMSQbw==","IHN0cmF0ZWo=","INmH2KrZhA==","2YLZgQ==","IGt1bGxhbsSxbMSxcg==","INGB0L/QvtGB0YI=","IG7Em2hv","INCf0LXRgNC10LQ=","INC40LfQvNC10YA=","XV0+","INC90ZbQutC+0LvQuA==","IGhheWFs","INC00L7QtNCw0YLQutC+0LI=","IOCkqOCklQ==","IGluc2FuxLFu","4Li44Lih4Lig4Liy4Lie","b2dyYWZpZQ==","0LLQvtCx","INin2YbYs9in2YbbjA==","IG3DvGs=","INGD0LzQtdGA","0L7Rh9C90YvQtQ==","64+E7J2Y","IGFyYQ==","IOu5qA==","IM66z40=","0LvQvtC5","0YHQuNC+0L0=","IHJvemTDrWw=","YXnEsWY=","INmI2KfYrdiv2Kk=","0L7RgNCw0LvRjA==","IHBvY2hvcA==","6Zqo","4LmJ4Lit4LiH4LiZ","INmI2KfZhg==","zpzOtQ==","IM68zr/OvQ==","0YPRiNC60LA=","b3JkdW0=","5riF5qWa","IERlxJ8=","z4DPgc6/","INmI2KfZhNiq2Yo=","IHBva3Vz","7ZuE6riw","6aWu","5peF6KGM","INC20LXQvdGJ0LjQvQ==","IGRvxJ9ydWRhbg==","INGP0LE=","IHphxI3DrQ==","IOuztOyXrA==","LUNQ","5ZCo","4KWL4KSW","0YDQvtCz0YDQsA==","bGVyZGk=","7Iq0","2Y/ZiNin","IHVzdGFub3ZlbsOt","INC00L7RgdGC0LDQsg==","IGbEsXJzYXQ=","INin2YTZhdmH2YbYqQ==","INCy0LXRidC10YHRgtCy0LA=","INC90LXRgdC/","INin2YTaqdiq2LHZiNmG","dGHFnw==","5oiS","IHl1cnQ=","IGdpcmRp","INCa0YPQsQ==","IOulvA==","4Li44LmM","44Gd44GG44Gq","4LmJCg==","INCy0YvQsdGA0LA=","a292xJs=","IFNpeg==","INqv2KfZhw==","INCn0LDRgQ==","INC30LPRltC00L3Qvg==","LtCf","5aeK","INCa0YPRgA==","IOydmO2VtA==","IGV0cmFm","INC60LDRiA==","INi324w=","zr7Otc65","57KS","INii2LA=","IGLDtmxnZQ==","IOCkruCknOCkrA==","2ZDZgw==","IHbDoWxreQ==","44Gg44KI","IG1lc2Fq","IHDFmWlzdA==","IHR5cHU=","INC60LjRiNC10Yc=","44KJ44Gu","IGtlbmRpc2k=","INCy0ZbQtNCx0YPQstCw","5L6v","INC00LjQt9Cw","44CACg==","INC/0YDQvtGG0LXRgdGD","INGN0LvQtdC60YLRgA==","X1BVUw==","INC80L3QvtCz0LjRhQ==","IGvDqW0=","5p6q","546w5Luj","IOmFjQ==","66Gt","0YLQuNGB0Y8=","IGzhu6Vj","INmI2KfZhNit","cHRhbA==","4bq1bmc=","z4DOuw==","IGRvbHU=","IHTDsmE=","INC40L3QvtCz0LTQsA==","INC/0L7RgNGP0LTQvtC6","0K/QutGJ0L4=","4pSY","INi62LHYqNuM","56e75Yqo","4Lii4LiZ4LiV4Lij","SERBVEE=","X1BVU0hEQVRB","INir2KfYqNiq","5YyF5ZCr","IM+Az4HOrc+AzrXOuQ==","4KS84KWL","5ZCN5YmN","0YLQtdGA0Lg=","772v","IOWFiA==","0L3QtdC0","z4HOv8+Nzr0=","0LLQtdC5","6IKW","IMWZZWRpdGVs","IHRow6lw","INmH2YHYqtmH","INC00YDRg9Cz0LA=","RVLEsA==","IOG6og==","INC/0LXRgNC10YA=","INC20LXRgdGC","IMSR4bqzbmc=","56au","0LDQu9GM0L3QvtC8","4KS/4KS34KSv","0LjQtNC10L3RgtCw","INii2K7YsduM2YY=","IOaT","IOC4oeC4q+C4suC4pw==","INC70Y7RgtC+0LPQvg==","INCx0ZbQt9C90LXRgQ==","Z8Sxw6c=","IG5n4buTaQ==","0L7Rh9C90YvQuQ==","IG/EjWVr","INmF2LHYqQ==","IHR2YXI=","IHNhbW96xZllam3Emw==","IEJlbGVkaXll","INCy0L7QtNCw","INqv24zYsdiv","INCz0L7QtNGL","44Gr6KGM","5piv5oiR","0YjQuNC70Lg=","IOWbveS6pw==","4bunaQ==","INCx0YPQtNGD0YLRjA==","INGA0LDQudC+0L3Rgw==","IOyT","INmI2KfYsw==","INin24zYtNin2YY=","zrXOvc6/zrTOvw==","INC90LXQt9Cw0LvQtdC2","INm+2LTYqg==","IGdpcmnFn2lt","INC00LXQu9C1","INin2LXZgdmH2KfZhg==","4LiU4Lin4LiB","INin2YTZgtmK","4LmM4LiI","66q7","IGRydQ==","6L+5","0LDQtNC20LXQvdC90Y8=","2YHZhg==","z4fOv8+C","4LmC4LiI","ZXlsZQ==","5aGR","IHVwcmF2","INC30LTQsNGC","IHZpZMSbdA==","IOC4m+C4ow==","INGE0LXRgA==","0IbQvQ==","IOy1nOyLoA==","bG9oYQ==","INC40YHQv9GL0YI=","IGF2YW4=","zrPOv8+F","IEdp4bqleQ==","44K744Oz44K/44O8","6YGN","0LXRgNCw0YU=","IOqwgOyngOqzoA==","INC40LQ=","IG1ub2hlbQ==","5qOA5rWL","IGV0bWU=","INiq2YXYsQ==","IGJhxZ9sYXlhbg==","44GP44KM","4LmH4LiZ4LiB4Liy4Lij","INGF0LDRgNCw0LrRgtC10YDQuNC3","IGFubGFtxLFuYQ==","2Y/Zhw==","INGB0LXRgNC/0L3Rjw==","55Wq57WE","IG1zZ2lk","IHp2w63FmQ==","IO2ajOybkA==","IHlhcGFy","5LyY5Yq/","0LXQvdC90YvQvNC4","INij2Ks=","7LKZ","IGppbsOpaG8=","INiv2YHYp9i5","INit2qnZiNmF","IHJpemlr","zqzOu865","4LiH4LiC","6LWi","IM6Vzps=","IG9rdW0=","5pS25YWl","INqG24zZhg==","5pyJ55qE","0YbQsNC80Lg=","ZMSbbsOt","INC60L7RgNCw0LE=","IGFsYW5kYQ==","4Liq4LiZ4Liy4Lih","77yJ44Gu","xLFzxLF6","2YrZitix","2YPZitip","IG5lYm/FpQ==","IGJpdGly","IOODnA==","2ZHYpw==","77yG","INin2YTYqtin2LHZitiu","4Lih4Lir4Liy4LiZ4LiE4Lij","YXTDvHJr","44K544OG44Og","zrjOrs66zrc=","IM66zrHOvQ==","IFPDvHI=","IGTEscWfxLE=","IGthbmNlbA==","INm+2K7YtA==","aFBh","IMSNdA==","INC/0YDQvtGF","4LmJ4LiI","IOqxsOyVvA==","INC00LXRgNC20LDQstC90L7Qs9C+","6IKh5Lic","7J207YGs","2YPYqtmI2LE=","IOOAgCDjgIAg44CAIOOAgCDjgIAg44CAIOOAgCDjgIA=","6Ki6","INio2YXYpw==","INC90L7RgNC80LDRgtC40LI=","w6dpbGVy","4LiH4Lio","6ZuG5Lit","0YDQuNGB","0YfQsNGU","bGnEn2lu","44O844K/44O8","0LDRgNCw0YI=","5Yqb6YeP","INGB0YXQtdC8","5YWl5Y+j","56a75byA","z4HOv8+Gzr/Pgc6vzrXPgg==","INCX0LDRgtC10Lw=","IGthcsWfxLFzxLFuZGE=","INin2YbYqti4","772K","IGXFn2l0","IHlhesSxbMSx","0JrQvtC8","2KfYstmK","IGtpbXNl","0YDQsNGJ0Lg=","4Lix4LiB4Liq","IGthbnVu","IOuQmOyXiA==","IM65z4PPhw==","INC80LXQtNC4","5rCn","77yM5YW25Lit","IHlva3R1","IOOCvQ==","INC/0YDQuNC+0LHRgNC10YI=","2YjbjNi0","44Wg44Wg","INqp2LHYr9mF","IGR1dmFy","IOe4","xLFzxLFy","IO+6jQ==","INCg0L7RgdGB0LjRjw==","4LmJ4LmD4LiZ","IGnFn2k=","ZG9s","INmF2K3ZhdmI2K8=","INGB0LDQvNGL0YU=","INio2YbYp9io2LHYp9uM2YY=","44KM44Gp","4Li44LiV4Liq4Liy4Lir","LsK7","4Li54LiK","IFRlcA==","44GP44KT","IOW4gw==","IOCkpOCksg==","IHNlcm0=","zrvPjM6z","IMWeaW1kaQ==","IOCknOCkqOCkpA==","LdCS","6Kiq","INCy0ZbQtNC/0L7Qsg==","4Li04LiZ4LiU","zrnPg868z4zPgg==","zqnOpA==","4oaS4oaS","zrnOus6/zq8=","INGB0L/RgNCw0LLQsA==","5py65YWz","IMOd","INC80L7QstCw","INC80L7Qs9C70LA=","INC00LvQuNGC0LXQu9GM","44GX44Gm44KC","IM6yz4HOrw==","INC20L7QtA==","6Zeq","INC80ZbRgdGM0LrQvtGX","zrfPgc61","56CC","IGt0ZXLDvWNo","INCT0L7Qu9C+0LI=","IGjhu5lw","IHBhbsOt","2KrZhdin2K8=","wpw=","5Y2B5YWt","zrrOv8+C","0LXQstGL0YU=","5ouS","INGB0YLQvtGA0L7QvQ==","IHBow7NuZw==","INGD0LvRg9GH0Yg=","bXJ0","bXBhcg==","IFNsYXY=","IGtvdg==","7J247J2A","IOW6lA==","4Lix4Lia4LiE","IGvDrA==","IGHFpQ==","xZnDrXQ=","7LCM","2YXZhtiq","xLF5b3JsYXI=","5q2j5bi4","0L3Rj9GC0YLRjw==","cmFjw60=","INC/0LjRgtCw0L3QuNGP","4LiI4Liw4LmA4Lib","INin2YTZh9mG2K8=","IERvc3Q=","INCS0LDRgdC40LvRjA==","IO2DhA==","IG7huqFu","4LmI4Lit4LmE4Lib","2LHZiNi2","wrHYuA==","IGJ5Y2hvbQ==","4LiZ4Lin4Lii","44Gg44Gj44Gm","INCY0YHQvw==","4LiE4Lij4Lia","IOC4quC4luC4suC4mQ==","IOuCrg==","amnFoXTEmw==","INmB2YjYqg==","IENoxrDGoW5n","IOydtOujqA==","IHDFmcOtdG9t","dHVhbA==","YmV0dGU=","IHNhYmFo","zrzOrw==","IG3hu4duaA==","44Gu44Gg44KN44GG","IHphbcSbxZk=","5Y2B5LqU","IOyViuydhA==","2KfZhtmI","0LXQvdGD","INGD0LPQvtC0","IFbGsOG7nW4=","IOuTseydhA==","IGJlbGlydGlsZW4=","5p+E","IHRla2xpZg==","rII=","INC/0L7QtNCw0YLQutC+0LI=","INin2YTZhtmH","77y0","7JuD","IOCkueCksg==","INC40LzRgw==","INC60L7RgtC+0YDRi9C8","77yM5Lul5Y+K","INGC0LDQsdC70Lg=","4KS+Og==","INio2LHYrA==","IM6tzr3Osc69","INmK2YjZhNmK2Yg=","w73FoQ==","INmK2Kw=","INGC0YDQvtGF0Lg=","5p6d","IGTDoHk=","IEJ1cmFkYQ==","IM+Dz4XOvM6y","IM6Rz4HPhw==","IHNvY2nDoWxuw60=","INqv2Yg=","IHlhbsSxdA==","44Gv44Gq44GE","44Gu5LiK","IG7Dumk=","INix2YHYqtin2LE=","INmF2LHYp9iq","2LLZhdin2YY=","4Liy4LiI4Liy4Lij4Lii","INGH0LjRgdC70ZY=","INiz2YbYqg==","IMOWemVsbGlrbGU=","7Ie8","IMSNw61t","QURERQ==","44Gu44KI44GG44Gq","2YjZhNmI2pjbjA==","IO2ZnOyaqQ==","44CB44Gp44GG","IM6gz4HPiQ==","55m75aC0","INC90LDQtNCw0L3QvdGP","INC80LXRgNC10LY=","IOydtQ==","asOtY8OtY2g=","aXRvdQ==","2YLZiNmE","2YXYrA==","INio2YbYrw==","IMO2bsO8bmU=","IO+9sA==","0LfQsg==","INC10YHRgtC1","0KDQmA==","0YDQvtC7","YXlsYQ==","INC60LvRgw==","5o6o6Jam","INGA0L7Qt9GA0LDRhQ==","IOyDgeuLtA==","INmG2LPZhdip","INCy0LjRhdC+0LQ=","4KWA4KSG4KSI","INC/0YDQuNGB0YLRg9C/","2ZLYuQ==","IHRlxZ9la2vDvHI=","0LTRj9C60Lg=","IGZpa2ly","4Lix4Lio4LiZ","INii2LLZhdin24zYtA==","IGJpemk=","z4bOsc+B","5pyq5p2l","5pC6","IM60z4XOvc6x","INix2YjZhQ==","IGJ1bmRhbg==","INmC2KfZhNio","IGhhZnQ=","5b+9","INCc0L7RgA==","IHrDoXBhcw==","IOu5mw==","5bu3","5LqI57SE","IGtodXnhur9u","IM6RzpM=","IOyekeyXhQ==","4KSh4KSw","IGplZG5vZHVjaA==","4KWJ4KSu","IGRlxJ9pbGRp","IGtvbG8=","INiv2YLbjA==","0LvQsNC80Lg=","IEjhu41j","IOCkquCkuA==","IM6gz4HPjA==","IOKXkQ==","INC90LDRgdC70ZbQtA==","INC00LjQstC4","IHDFmWVzbsSb","INCi0LDQutC40Lw=","IHJ1a291","5LiA5YiH","INGB0L/RgNC4","ZW5za8Op","5pem","INmC2YY=","IMO6c3Rhdg==","4KS/4KS24KSk","4LmMKQ==","IFRyYW5n","IG1vaGxh","IM6VzrvOu863zr0=","INC/0L7QutC4","INii2YXYp9ix","5ZC+","INGA0LXRgdC/","IHRha2Rpcg==","IHJhaGF0c8Sxeg==","6Z+z5LmQ","IOKUgw==","aWxpcw==","INmI2KfZhNil","5a6Z","0YPQvNC+0LI=","INCb0LjRgg==","Ojo6Onw=","5YW9","INmG2LLYr9uM2qk=","0LXQu9GW0LI=","zrjOv8+Nzr0=","7JeQ7ISc64+E","6LWE5qC8","55CG6K66","IEtlbWFs","INC60LXRgA==","4Lip4Liy4Lii4LiZ","IOWNjg==","KeyXkA==","IOuKmA==","44Od44O844OI","INCX0LQ=","2KfYtdmK2YQ=","IGthdMSx","44KC44GX44KM44Gq44GE","INC60LDQttC00L7Qs9C+","INC00YA=","IGZ1dGJvbA==","2YTZitmB","IOyngOuCnA==","INm+24zYtNmG2Yc=","w7xsw7xr","IOC4leC4s+C4muC4pQ==","IGLhuq1j","IOWboA==","aWtsZXI=","z4HOuc6s","INCy0LLQsNC20LA=","IHZ5cGw=","INCy0L3QuNC3","7YA=","55y+","INGB0LjQu9Cw","INC90LDQu9C40YfQuNC4","INi52LHYp9mC","INin2YTZhdmD","5bCx5Lya","INC80ZbQsw==","IM6MzrzOuc67zr/Pgg==","0YnQtdCz0L4=","IO2WieyglQ==","wqBtcGg=","IG1hbMOp","INuM2KfZgdiq2Yc=","IG1ub2hh","zrPOrA==","INC/0L7RgdGC0YDQvg==","INin2YTZhdmI2LM=","IG9sbWE=","64m07Iqk","IHR1dGFy","44O844OT44K5","4KWN4KSl4KSo","LdC70LjQsdC+","5qWt5YuZ","INC+0YHQvtCx0LvQuNCy0L4=","6K6A","2YHZh9mI2YU=","IGvhurs=","IMWhdMSb","IGPhuqdt","IMSNbMOhbmt5","IMSQaeG7h24=","KD0=","T1bDgQ==","dWxkdQ==","YWZ0","IGzDo2k=","IGRvbGR1cg==","wqDCoMKgwqDCoMKgwqDCoMKgwqDCoA==","zrLOuQ==","44Gj44Gm44GN44Gf","7Lac7J6l7JWI66eI","5a+d","IOu2gO2DgQ==","INin2YTYp9iu","IM6zz4XOvc6x","4KSP4KSu","4KWM4KSy","2LnYp9iv2Kk=","IM66zr/PhQ==","INmF2LfYsdit","INGH0LXQu9C+0LLQtdGH","IG51bWFy","INC00LjQvdCw","z4TPgc6t","zrvOuc66","INC00L7Qu9Cz0L4=","IG5oacOqdQ==","INCy0L7RgdGB0YLQsNC90L7Qsg==","YXDEsQ==","IGthbsSx","IEvhur8=","44KJ44Ga","IGhhcmVr","44Gg44GR44Gn","5ruF","IG9obGVk","0LXRgNC40Lw=","INit2YrZhg==","INmC2YfYsQ==","IOCkrOClnQ==","2KfZvtuM2YU=","6LaF6L+H","IOaF","INiq2YHYsw==","YXPEsXlsYQ==","0LHQuNGC","INit2KfYrA==","INGC0YDQtdCx0L7QstCw0L3QuNGP","IOaOqA==","IOexsw==","44Kz44O844OJ","INGD0YHQuA==","INin2K7ZhNin2YI=","IGRvc3R1cA==","INi52YTYp9mC","4KS/4KS14KS4","INC+0LTQuA==","dGVq","IHRo4buPYQ==","4Lix4LiB4Lip4LiT4Liw","INGA0LDRgdC6","INCd0LDRgNC+0LQ=","INC30LDQutGD0L8=","b8W+ZQ==","INin2KzYsdin","6rSR6rOg","0LDRgNGC0LDQvA==","INC/0LXRgNC10LY=","6JGj5LqL","INGP0LrQvtGB0YLRlg==","INCy0YPQuw==","0LzQvtC9","IGNobGFw","INGN0YLQvtC80YM=","0LDRgtGW","IO2SiA==","6KGX6YGT","2LPYrw==","2YjYsdmH","INiy24zYp9iv","5Zyo57q/6KeG6aKR","2KfZiNmK2Kk=","77yM5bCx5piv","ZWxlcmluZGVu","0YDQsNC20LQ=","INC/0L7Qt9C0","INC30L3QsNGC0Yw=","4Lix4Lia4Liq4LiZ","4KWH4KSW4KSk","IOabsA==","6rO87KCV","6a6u","IFZp4buHbg==","IGR2b2o=","zq/Ovc61z4TOsc65","IG9zb2Juw61jaA==","IOKAqg==","6Zm1","INiu2YjYr9i0","INin2YbYsQ==","INC/0YDQvtGE0LXRgdGB0LjQvtC90LDQu9GM","a8OhbQ==","INmF2YPYp9mG","INin2YTYo9iv","IOqzteu2gA==","IMSR4bupYw==","IEN1bWh1cml5ZXRp","5Ye644GX","0LTQsNC80Lg=","IOyImOyDgQ==","INmB2KjYsdin2YrYsQ==","IHPDvHJlc2k=","INio2Kw=","IOaUvg==","2K3bjA==","56CU56m25omA","5Ye654mI56S+","INmF2YjYqtmI2LE=","JiYmJg==","INC/0LXRgNC10Lk=","IOyEoOqxsA==","IMO6c3DEm8Wh","2KfYsdqp","IGV0dGly","IOy2nOyepQ==","IEthbnVu","INGD0LzQtdC90YzRiA==","INC30LDRgtCy0LXRgNC00LY=","INin2YTYr9mI2YTZig==","IOODkw==","IEJhesSx","5a2Q44Gu","5Yev","IHNlYmVi","IOWFsQ==","IGRuxa8=","5L2N5LqO","IFpk","5omx","INiq2KzYsdio2Yc=","w5RORw==","IOyYrOudvA==","z4nPhM61z4E=","INGB0LLQuNC0","5q+U6LWb","44Gr5ZCR","7JyE66W8","44GX44G+44GX44Gf","IGThu4s=","INCg0YPRgQ==","IHbhu48=","4KSC4KSh4KSy","INC/0LjRiQ==","IHNtcnRp","4LiI4Liy4LiB4LiB4Liy4Lij","INGB0LDRhdCw0YA=","IHRob8OhdA==","2KzZhdip","INC/0L7Qt9Cy0L7Quw==","INin2YTYq9in2YbZitip","2LLYp9iv2Yc=","44CB5Lit","zq7OvM61z4HOsQ==","5qac","bGFjYcSfxLE=","INC90LDRiNC40YU=","7JSA","INCY0YHRgtC+0YDQuNGP","w7xuZGVraQ==","INC/0LXRgNC10Ls=","IOuqqeyGjA==","INGB0YLQsNGC0YPRgQ==","0L7QstCw0LvQuA==","xZlheg==","INC00YDRg9Cz0L7Qs9C+","2YPZiNmF2Kk=","0YfQuNGB0YI=","zrzOvA==","5Y+N5bqU","aWNhcmk=","INm+2KfaqQ==","0LDQu9GM0L3QuNC8","IEJ1bmE=","0LjRgtC40LI=","0YTRgNCw","44O844OW44Or","INGC0L7QsdGC0L4=","65+s7Iqk","INin2YTYp9i5","5YWs6ZaL","5aWJ","2YjZhNiv","5ZCN54Sh44GX","5rCR5Li7","4KWB4KSc4KSw","7IKs66y0","IMO2bmNlbGlr","IOWo","0Y/QsQ==","55yJ","4KWN4KS14KSv","IEjDrG5o","55qE5Zyw5pa5","INin2YTYqtiz","5LiI5aSr","INC/0YPQsdC70ZY=","IG7Em2pha8Op","xJDhu5Fp","INGB0L7RgdGC0L7Rj9C90LjRjw==","4KWAKQ==","IMSR4bqtdQ==","amVk","6raB","IHNlbmlu","IEjDs2E=","4pmg","0LvRj9GO0YLRjA==","6Zey","7J247Yq4","2KrYqNmH","IOCksOCkluCkpA==","INGB0LvQvtCy0LDQvNC4","INi32KjZgg==","IHV5ZHU=","4Li44LiH4LmA4LiX4Lie4Lih4Lir4Liy4LiZ4LiE4Lij","IFNhbmF0","4LmJ4Liy4LiK","INC60L3QuNC2","zIFj","2KfZhdis","zrTPjg==","xa4=","IGJpbmg=","6L6G","bmXEn2k=","2LfZhg==","5biV","IOyHvA==","0L7RgdGA0LXQtA==","IM6/z4DOv86vzr8=","a8Sxcg==","4KWI4KS2","IOC4h+C4suC4mQ==","IGRydcW+","ZW1hdGlr","YWTEscSf","6L6e","IHBvdcW+w612w6E=","IGt1cnRhcg==","IHNhxJ9sYW4=","44CP77yI","IG3Fr8W+ZW1l","INio2KfYrw==","5pyf6Ze0","2KfYqtmB","IHlhesSxbMSxbQ==","IOyXsOqysA==","2YrZgdip","IGVtaW4=","INC90LXRgdC60L7Qu9GM0LrQuNGF","27TbsA==","5a+n","zq/Ots61zrk=","IGTDqWw=","dmVyacWf","5L6h5qC8","INin2LPYqtin2K8=","INCw0LvQutC+0LM=","LkhDTQ==","zq/Ov8+C","zrHOug==","2LfYuQ==","44Gj44GN","0Y/QtdGC0YHRjw==","0LvQuNC60LA=","INGG0Y8=","IOuniOyngOuniQ==","INCw0YDQvNC4","IM6zzrvPjg==","RU7DjQ==","666k","rZDvuI8=","IOavjw==","IOaWvA==","IM66zrHOu8+N","INCi0L7QvA==","dWx1cg==","IGFrY2U=","INmF2YjYrNio","ZXNpeg==","0L3Rj9Cy","0LDQu9GM0L3Rg9GO","0LDQu9GW0YHRgg==","INCy0LDRgNGW","INmF2KTYsw==","INmF2KfbjNmE","IM68zrXPhM6xzr7PjQ==","5Ye644GZ","IHbhu51p","65+0","77yL","5q+O","IHRhYmk=","4oKD","5qOL54mM","IMOQ","INC/0YDQvtGE0LXRgdGW0Lk=","0YPQstCw0L3QvdGW","zpzOoA==","INC20LjQuw==","2pjZhg==","0LvRg9GI","4b20","0L7QstC10YA=","6L6844G/","INCc0LDQutGB0LjQvA==","INCy0LfQs9C70Y/QtA==","INC90LDRgtGD","4KSu4KSV","INGF0LjQvNC4","INGA0L7Qt9GC0LDRiA==","2YjYsdin2YY=","INi02YfYsdmH2KfbjA==","5qmf6IO9","2K7YsA==","INGB0LLQvtGU0Zc=","0L3Rj9C10YI=","IGdo4bq/","IHDFmWVkY2g=","0ZTRiA==","0L7Qs9GA0LDRhNGW0Y8=","IOC4l+C4s+C5g+C4qw==","5Z2K","z4HPic69","4Liy4Lij4Liw","IEvhur90","IGNo4bq3dA==","IOmZiA==","IGTEm2xhdA==","INCx0YPQtNGD0Yk=","IEHDp8Sxaw==","5qCq5byP5Lya56S+","INCf0LDRgA==","IEtodQ==","44CB5paw","INCx0L7QuQ==","66eI7Yq4","INGB0L7Qv9GA0L7Qsg==","2LPYp9io","0L3QuNGB0YI=","5byD","INi02YbYp9iz","0LXQvdC90L7QvA==","IOmhuQ==","6Im65pyv","0L7Qt9C10Lw=","INGA0LXRiNC10L3QuNGP","bGFkeQ==","INCy0YHQtdC5","5pS75Ye7","IOqysOyglQ==","44CA776e","IOqwkOuPhQ==","LdCQ","IG3DrXI=","4KWB4KSq4KSP","0L3RltGG0LjQvw==","0LHQvtC8","IMWhdA==","6ZyN","INGA0LXRiNC10L3QuNC1","INC00LjQsNCz0L3QvtGB0YLQuA==","aXBhcg==","2KfbjNiy","w6NuZw==","4Lix4Lin4Lij","INGG0LDRgA==","IHNseQ==","zr3Pjg==","IEt1emV5","2LHbjNio","IGNlbnU=","IGNlcnRpZg==","INGC0YDQtdGC0Yw=","4Li04LiU4LiC","INC/0LDRhtGW0ZTQvQ==","xZlpdg==","6ISC","orA=","IFBo4bqnbg==","INC80LXRgtC+0LTQuA==","4bqk","7IaU","5ZCM5a2m","IOWAiw==","0LzQvtGC0YDRjw==","IHV2w6Fk","27Hbudu2","6YG45oqe","IcK7","65iQ","INuM2YjYqtuM","INin2YTYrdix2Kg=","0L7Qu9C+0LPRltGP","bmlsYQ==","IMSR4bqjbmc=","w6F6aQ==","0YDQvtGJ","IG9ydGFkYW4=","INin2K7YqNin2LE=","IOCkheCknA==","IOunpOyasA==","INC/0L7QuQ==","INis2Yo=","0LrRg9Cy0LDRgtC4","IOG7ng==","INio2LTYsQ==","INmD2YrZhA==","0YnQtdGB0YLQstC+","IOyXrO2WiQ==","2KfZhdmK","0LLRltC70Yw=","IFBydm7DrQ==","INmI2LPbjA==","IMSQ4bs=","5oi/6Ze0","5Zyo57q/6ZiF6K+7","5pW3","IHRyYWk=","5L+X","INGB0LDQvNC+0YHRgtC+0Y/RgtC10LvRjNC90L4=","INGC0YDQtdCx0YPQtdGC0YHRjw==","zrTPgc6x","INGA0LXRh9C+0LI=","INCy0ZbQug==","INGA0YPRhw==","5aWn","IG9sZHXEn3VuYQ==","0LXQstGL0LU=","IOC4hOC4pQ==","2KfZhNmC","INGW0LzQtdC90ZY=","5pS75pKD","INGD0L3QuNCy0LXRgNGB0LjRgg==","IHRoxINt","INC70LjRgdGC0L7Qv9Cw0LTQsA==","4KWo4KWm","2K7Zig==","zpXOoA==","IGFydHTEsXI=","INiz2K7Yqg==","77yI5pit5ZKM","IM6fz4U=","0LjQstCw0L3QuNGP","IHN0YXZlYg==","4oWl","zrPPic6zzq4=","2ak=","INC40YHRgdC70LXQtNC+0LLQsNC90LjRjw==","5YCL5Lq6","IOuLpOyatOuwm+q4sA==","IM+EzrXOuw==","wrBO","INio2KfZhNmG","4LmM4Lie","IG5lbcWvxb5l","INCz0L7Qu9C+0LLQsA==","4LmM4LmB","5qKv","wpg=","zrTOt8+C","7J247Kad","bGF5xLFu","4b23","INmG2KrYp9uM2Kw=","INGB0L7QsdC70Y7QtA==","INC00LLQuNC20LXQvdC40Y8=","7Iw=","IHBvdsSb","IOyghOyXkA==","5aaC5LiL","INin2YTZhdiv2LE=","77yM5oiW","2KfYsdin","5rCR5peP","INio2LHZgg==","INC30LDQv9Cw0YE=","4LiZ4LmD4LiI","w6lm","IOC4n+C4ow==","IOuztOuCtA==","IOasp+e+jg==","LdGC0LDQutC4","6ama","0YDRltGP","5p+P","INC/0L7QstGW0YLRgNGP","57WE57mU","ZGHFnw==","IOCkueCkruCksg==","INGA0LXRlNGB0YLRgNCw","zqzOsg==","IM6gzr8=","IOq3uOumvA==","0YfQsNGO0YI=","4LiH4LiV","7YOA7J20","5oms","IHBvamlzdA==","IOeglA==","IOWPlg==","IMO8emVyaW5kZWtp","asWhw61jaA==","4KWA4KSm4KS1","5qqi","INC80LDRgtC10YDQuNCw0LvQvtCy","0LjQstCw0L3QvdGP","IOWwhg==","0LvQuw==","INC90LDQsdC70Y7QtA==","IEfDtno=","INCy0LfRjw==","55S16KeG","INCy0LDQug==","57+U","INCy0LfQsNC40Lw=","IGdpdHRp","aXRlbGVyaQ==","5Lu35YC8","INin2YTYqti1","4KS/4KSo4KSV","6YCa44KK","INGB0YTQtdGA","55m65aOy","4p2k","INqv2YjYtNuM","0LDQs9Cw0YLQvg==","IM+Dz4XOs866","0LDQstC40YE=","5oKj6ICF","INiu2KfZhQ==","zpnOms6XzqM=","xLFuxLF6ZGE=","cGFuxJts","IMSQ4buLYQ==","4LmB4Lil4Liw4Liq","IOOCgg==","IHNvbnVjdW5kYQ==","7J2N","ZWxlc3M=","IE5oYQ==","IHpha8Oheg==","INCy0L7RgdGC","IHZ6ZMSbbMOhdsOhbsOt","LeC4oQ==","IG1ldHLFrw==","INm+2KfbjNuM2YY=","INGA0LDRgdGC0LXQvdC40LU=","IG114buRaQ==","6LWE6YeR","IMWfw7xwaA==","2YrZhNmF","IGTDvMWfw7xuYw==","INC60ZbQvA==","IM+Hz4nPgc6vz4I=","w6F6ZXY=","IERlxJ9lcg==","5bel5qWt","INix2YXYsg==","IGFsZXNwb8WI","INC/0YDQtdGB0YLRg9C/","INi52YTYp9mI2Yc=","IG1lcmFr","4LmMOg==","546w5Zy6","0YbQstC10YI=","IOCkquClnA==","IOuLpOydjOqzvA==","dWRpYw==","IExlcA==","INC+0LTQvdGW","IGFsYXJhaw==","5a6J5o6S","IOC4guC4meC4suC4lA==","cmV6ZW50","aXNpbmRlbg==","2LHZiNuM","IHBsdQ==","56uL44Gm","0YvQstCw0L3QuNGP","IHJhc3Q=","IGTDvHplbmxlbQ==","amV6ZA==","INCy0LXRidC10YHRgtCy","INC00LjRgNC10LrRgtC+0YA=","0YTRhA==","dGFpbm1lbnQ=","INin2YTZiNiy","bGFuZGE=","INmG2q/Zh9iv","INC/0YDQvtGC0LjQstC+0L8=","44Gj44GP","44Go44Gq44KK","IOuwnOqyrA==","aWN0b3I=","44K444Kq","zp/Opg==","INGB0LrQu9Cw0LTRlg==","IG9ic2FodWpl","IFVrcmE=","5pWm","IM+HzrHPgc6x","INGA0LXQs9GD0LvQuA==","5L+644Gv","4Lix4LiV4Lin","6YSJ","INio2KfbjA==","6Yq3","IE7hurVuZw==","0LvQvtC0","2KfYsdmB","5rSB","IOuPmeydvA==","0YLQuNCy0L3QvtCz0L4=","4pSB4pSB4pSB4pSB4pSB4pSB4pSB4pSB4pSB4pSB4pSB4pSB4pSB4pSB4pSB4pSB","IC06LQ==","7Lus","INGI0LDQsw==","7KCE7J6Q","55qE5LqL5oOF","INGA0LXQs9GW","4KS/4KSv4KSy","INCd0LDQtw==","INCZ0L7Qs9C+","INCg0L7QvA==","IMOWcm5lxJ9pbg==","INC/0YDQtdGB","dWx1xJ91","INC30LDQtNC+0LI=","xZllaA==","5q+V5Lia","IHRo4bqtcA==","64K4","IGRsb3Vob2RvYg==","0LTRltC70YM=","YWxhdA==","5Luw","0L7QutC+0Lw=","INGE0ZbQu9GM","IE5nw6Ju","INiq2LHZgw==","INGC0Yk=","2LHZiNiv","w6d1aw==","cmFuw60=","IGRvbGHFnw==","IFF1YW5n","IHDFmWVkcG9r","IG7DoW3Em3N0w60=","0L7QudGH0LjQsg==","54uA","INCx0LjQt9C90LXRgQ==","44Gf44GP","IOyduOyynA==","0L7RgNC+","IEvDvHJ0","6re465+s","0YbQsNGC0Yw=","IELDqm4=","IGFjxLE=","2qnYtA==","77yI5bmz5oiQ","IOiBlA==","KeOAgQ==","ZGlsZXI=","0YfQuNGC0Yw=","xq/huw==","6Zm2","aWxlY2XEn2luaQ==","IHbFoWVt","5byA5aWW","6KeE5qih","dWxtdcWf","IOWImA==","0LXQvg==","INC/0LXRgNC10LLRltGA","5YiG5Yir","IGplZG7DoQ==","bGnEn2U=","INix2YXYttin2YY=","xLFrbMSx","2YfZgA==","6YeN54K5","0YfQuNCy0LDQtdGC0YHRjw==","66Gc7ISc","z4TOtc+Bzr8=","5Zyw5LiL","0LTQvdCw0L3QvdGP","IG5nxrDhu6Nj","4KWq","IM6Rzrs=","IGFsYWNhaw==","IOC5gOC4hw==","2KfbjNmG2K8=","IGjDoGk=","0YDQvtC40Lc=","INCn0Lg=","INGP0YE=","2K7YsduM2K8=","IGh1ZGVi","5Zyn","IOyEvA==","5ZSv5LiA","INCy0ZbQu9GM","INio2KfZhNin2KrYsQ==","4Lit4LiB4Liy4Liq","IFTDtGk=","4Lih4LiC","b21vcg==","IE9sb21vdQ==","IHhvbmc=","IGRvbcOhY8Ot","INin2K7YqtuM","INGC0LXRhdC90ZbRhw==","IGnFn3Rl","4KWM4KSm","INC90LDQtNC10LY=","2K7bjNi1","5Yqq5Yqb","INiq2KzZh9uM2LLYp9iq","IHZvbGU=","a2luY2k=","IGhlc2Fi","INGB0LXRgdGC","2qnYpw==","0YLQtdGA0L0=","4Lij4Lij4LiE","5Y+C6ICD","INCa0LDQsQ==","IMSwbXBhcg==","IG7DoXZyaA==","5ZKo6K+i","4LiW4Liy4Lih","IHllcmVs","IMOWbA==","54yb","INin2YTZiNi32YbZig==","IOydtOyWtA==","4Li04LiX4Lii4Liy4Lio4Liy4Liq4LiV4Lij","IEHFnw==","INC30LXQvNC70Y4=","INC00L7QvNCw0YjQvdC40YU=","INGD0LLQtdGA","QUxJ","0LPQsNC9","IOOAgOOAgOOAgOOAgOOAgOOAgOOAgOOAgOOAgOOAgOOAgA==","IGRvc3Rhbg==","ZXpwZQ==","44GL44GE","2LHZgdiq2Yc=","INC80YPRgQ==","4LmM4Lif","6Ka6","0LDQu9C40LfQsA==","INGD0YfRgNC10LbQtA==","INqp2KfZhA==","IGV0a2lzaQ==","5L2G5piv","IHNvdXZpcw==","IFNhdmHFn8Sx","INio2LPYqNio","zp/OuQ==","6Jo=","IOau","IOyYgeq1rQ==","2KfYs9uM2YjZhg==","INin2YTYp9iq2K3Yp9iv","INCz0LvRjw==","4LmH4LiB4LiL","INis2YjZhg==","INin2YTYsdiz2YXZig==","wqBH","INGC0L7QsdGW","woc=","IOuMgO2WiQ==","54q25oWL","IOq3uOuDpQ==","INC40LzQvw==","INiq2YbYuNuM2YU=","2YTYp9uM2YY=","0YHRgtCy0LXQvdC90YvQvA==","0L7Qv9C+0Ls=","2LHZiNis","IOC4hw==","IOeCug==","IFVsdXNsYXJhcmFzxLE=","4KWM4KSC","44CB44Gd44GG","INiz2KfYr9mH","zq3Osc+C","IOCkhuCksg==","LdGE","IM6gzr/Ou865","INC90L7Rj9Cx0YDRjw==","2YjZhNmK","5puc5pel","5oyB57ut","IOq8rQ==","ZWNlxJ9peg==","INuM2KfZgdiq","IOWPuA==","4KS+4KSX4KSk","IOaVhQ==","INCw0LvQu9C10YDQsw==","IHR1eg==","0LXRgNGC0Lg=","IHRo4bqndQ==","44CA44CA44CA44CA44CA44CAIOOAgA==","LeCkhQ==","INC40LzQvNGD","0YDQsNC5","5Li7576p","IGJhxZ9sYXI=","IOS4iua2qA==","2LnYpw==","IM6Zz4k=","4Li14Lii4LiH4LmD4Lir4Lih","INin2YTZhdiv2YrZhtip","0YHRjNC60L4=","INiq2KfYsduM2K7bjA==","YXTDrW0=","4oCa2Lc=","2KLYrtix","IOuEow==","INmG2YXYp9uM2K8=","44GV44KT44GM","IGLDsg==","IOC4leC4suC4oQ==","67O07JWY64uk","0LDRgtGW0LI=","INGE0LjQuw==","IGvEsXNtxLE=","aeG7h25n","IGF5ZMSxbg==","6YGV44GE","0LXQstC4","IOW+rg==","KO2BrOq4sA==","INqv24zYsQ==","7JWE7ISc","IM60zrfOvM65zr/Phc+BzrM=","44Gr44GK44GE44Gm","IMOcTsSw","0LjRgtC+0Lw=","2LnZhNin2YU=","5ZCO55qE","IHBsw6E=","4Lib4Lij4Liw4LmC4Lii","56KO","IOmYsg==","64qU64uk","IOaXpeacnw==","IGdlw6dlcmxp","0LvQsNGC0Ys=","IG11dGxha2E=","2YjYug==","4LmA4Liu","IO+7ow==","ZWRla2k=","4LmM4LmA4LiZ","INC90LDQudCx0ZbQu9GM0Yg=","77yK","IOC5guC4o+C4hw==","IGZvdGJhbA==","IOmAgQ==","4oCM2KfZhNmF2YQ=","z4nOvM6sz4TOuc6/","IMO6a29s","5Y2a5aOr","ZHVi","xLFsxLHEnw==","65Oc66W8","54uQ","zrHOu867","5q275Lqh","INC/0YDQtdC00L/QvtGH","57W1","INC80YPQt9GL","0LXRgNGC0LI=","INmD2YbYrw==","IHVsb8W+","zp/Opc6b","Z2lsaQ==","w7xzdMO8","0L3QutC4","INmC2YjYp9mG","zrnOsc66","IMWfZXI=","INC60LjRgdC7","2YHYttmE","INCQ0YQ=","zrPOtc69","IGRvc3RhbA==","IHNhxJ9sxLFrbMSx","5a625peP","xJBU","0LXRgNC40L0=","IOydtOufrO2VnA==","IGTDvG55YWRh","IG5o4bqvYw==","wqDCoAo=","zr3Ot8+Dzrc=","zrPPgc6xzrzOvM6x","IHRha3Nvbg==","IFTDvHJrw6dl","INmB2LHYp9mG2LPZhw==","5aSp5aCC","5rq2","IG90bw==","6LW1","Y2h5Yg==","IOW+kg==","z4TPjQ==","w6Fobm91dA==","4KWN4KSq4KSw","IHZsYXM=","IO2aqOqzvA==","IHRoYW5n","IG9sbWFzxLFuYQ==","INC/0L7RgNGD0YjQtdC90L3Rjw==","IHF14bu5","IO2dkA==","IOyIqA==","IOuyiOynuA==","4bq5bg==","INC30LPQvtC0","INiq2LI=","INin2K7Yqti1","INC30YPRgdGC0YDRlg==","IHThurduZw==","4b+2zr0=","IOy9nA==","0L7QstCw0L3QuNGF","4oCM2LTYrw==","IGFyYXlh","cm92w6k=","INin2K7YqtmE","0LvQuNCy0LjQuQ==","INin2KrYrdin2K8=","IGFrxZ9hbQ==","INqp2YTYp9iz","44Ki44OD44OX","IHppaA==","5YeM","5bGx5biC","IMOnZXZyZXM=","INC+0LPRgNC+0Lw=","INii2K/ZhQ==","IHTEm2xv","77yM5pys","INqY2KfZhtmI24zZhw==","IGtyYWpl","zrzOr86x","6IW/","4oCeVG8=","5rG65a6a","7Ic=","IOmS","IM6jz4TOsQ==","INis2YXZh9mI2LE=","IEdlbsOn","csOhbQ==","INCg0LXQtw==","IHZ5dHbDoQ==","INC/0YDQvtC40LfQstC+0LTRgdGC0LLQsA==","INmF2LDZhw==","IGlodGl5YWM=","44Kv44K7","IG7DqnU=","5b6z","IOuTnQ==","0L3QsNGH0LU=","IM+Dz4XOvM68zrU=","z4bPic69","0LLQsNCy0YHRjw==","INCy0LjRgtCw0LzQuA==","zIF0","IGZpbmFuxI1uw60=","5Y+K5YW2","4oCF","54u8","4Lix4Lia4LiV","44Gb44KL","zpnOms6f","zrvOu865","0YLQvtGO","2KfYudmK2Kk=","dsOtY2U=","0L7QvdGW0LI=","7KOE","5bug","INi02YrYoQ==","INCi0LXQvA==","INin2KjYstin2LE=","IFRIUFQ=","zrPOs8+BzrHPhg==","IOuMgO2VtOyEnA==","IFBo4bqhbQ==","0YbQuNC+0L3QvdC+0Lk=","fC8=","IOOCuOODow==","0YzRjtGC","0YPQt9GL","INmF2KfYrw==","IG3Em2x5","IOeIsQ==","IHLDoWQ=","4LiE4Lin4Lia4LiE","4KWIPw==","IGxpZGk=","bWFtxLF6","IOC5geC4gQ==","44Kv44K344On44Oz","4Lit4Liz4LiZ4Lin4Lii","ZXPDoXQ=","IHZpw6pt","6KGM5Yqo","4Lih4Liy4LiB4LiB4Lin","INiu2YjYp9io","IHNlcmJlc3Q=","xZnDrXo=","IO2YhOuMgA==","44CM44Gd44GG","54K4","b21paw==","IMSwcmFu","IGVyacWf","INGB0LXQu9Cw","INin2LHYstuM","44OI44Oq","IELEmw==","0LXQutGD","0KfRgtC+0LHRiw==","IGFubGFtZGE=","zpHOmA==","IExJTkVBUg==","5pyJ54K5","0YLQsNGA","aXRsZXI=","IG7DrcW+","INCh0YHRi9C70LrQuA==","5bY=","INCy0L/QvtC7","INiv2YLbjNmC2Yc=","IOS9kw==","2LHZiQ==","65Sw","IOCkleCktQ==","INC20LjRgA==","5pGG","IOykkeyLrA==","INC60YPQsQ==","IHpsZXA=","INGA0YvQsQ==","6bO0","4LmB4Lic4LiZ","IO2A","INCd0LXRgg==","xb5pdMSb","IGLEg25n","IEhhdmE=","IOuqqOuNuA==","IEjDo3k=","IOydtOqygw==","IOyDgeyEuA==","bWVtacWf","IM64zq3Pg863","4KSj4KSo","IHNrdXRlxI1uxJs=","IFRhcmloaQ==","IHRleHR1","77yM6YCZ","INin24zZhtiq2LHZhtiq24w=","INm+2KfYrw==","4Li04LiZ4LiB4Liy4Lij","IE5n4buNYw==","INGA0L7QsdC40YLQuA==","7ZaI6rOg","IOCkruCkow==","0JvQmA==","INC/0L7RgtC10YA=","0YHQvtC8","INin2YjZhNuM2Yc=","6ZuR","IEdpw6E=","IGthbmFs","IGF2YW50YWo=","IHJ5Yg==","2K7YqtmH","INmI2LHZiNiv","0JLRgg==","z4nPg861","6riw66Gc","INCb0ZY=","IHThuqNuZw==","INi12YTZiQ==","INGD0LvRiw==","IGN14buRbg==","INCQ0L3Qsw==","INiv2KfZiA==","INGI0LvRj9GF0L7QvA==","IMSNbG92xJtrYQ==","ZGV0ZQ==","0YrQtdC8","4LmM4LmD4LiZ","4KSV4KSo","5Yik5pat","luyXkA==","z4TPic+Dzrc=","INmB2YbYp9mI2LHbjA==","IHlhxZ/EsW5kYQ==","IM+Dz4fOrQ==","IHnEsQ==","IHDFmWVu","INGE0L7RgNC80YPQstCw0L3QvdGP","w7xtw7zFnw==","IM60zr8=","xLFtxLF6xLFu","IOmihA==","0L7RgdGC0YzRjg==","INC+0YLQutGA0YvRgg==","INij2LrYs9i32LM=","IEFzcA==","INGD0LfQvQ==","INmI2KfYs9iq","ZWxlcmxl","6JWJ","INiq2qnZhg==","0YPQvNGD","4LmM4LiL","4KS+4KSm4KSo","IOKAi+KAi+KAiw==","IGFsxLF5b3I=","IO6h","2YXYr9ip","IM+DzrXOuQ==","IOi/mQ==","IMWeZWhpcg==","0LXQvdGC0LDQvNC4","44K/44Or","4Lir4Liy4Lii","0LDQudGC","IGhhcmM=","44CC44GK","INiq2KPYq9uM2LE=","4Liy4LiK4LiZ","IHRo4bqtbQ==","IOa/","IG3FqWk=","IHBydm7DrW0=","INCx0LDQs9Cw0YLRjA==","44GV44KJ44Gr","YmnDqm4=","5bqU5b2T","7J2067KE","IHBvdcW+w610","IG9rYW3Fvg==","ZXNpbg==","dsSbbA==","INi22Yg=","6Luf","LdC3","4KWI4KSk","6KiI566X","cmFiaWxpcg==","INCg0L7RgdGW0Zc=","IHBsYXTDrQ==","IGRvc3DEm2w=","INix2LbYpw==","IG5vdsOpaG8=","INC90LDRhtC40L7QvdCw0LvRjA==","INCQ0LE=","44GM44GC44Gj44Gf","IOu5iA==","4oCM2YU=","5bGe5LqO","IHRhbmU=","2YrYp9mH","IM6yzr8=","IOuKpQ==","44OH44Kj44O844K5","INiw2YPYsQ==","IG9idnlrbGU=","IGJpcmluY2k=","INin2YTYstix","7J2067mE","INil2K8=","IEVrb24=","0J/QvtC7","INCy0LXRgNC+0Y/Rgg==","IHlhcmFybGFu","INCw0YDQvtC8","IOmE","IGlkZGk=","acSNa2E=","c3RydWtjZQ==","bcO8xZ90w7xy","z4XPhM+M","66Gx","IGFsbWFrdGFkxLFy","0LXQvdC40Y/QvNC4","4Li14Lii4LiZ4Lij","4LmH4LiZ4Lin","0LjQutGD","0LXQvdC60LA=","4oCZeWk=","IHBvaG9k","INiy2LE=","IHjhuqV1","IOC4oOC4suC4qQ==","wqDQng==","IM60zrnOug==","INC90LDQt9C40LLQsA==","5Y+q6IO9","5aSn6YeP","IMSR4bq/","IOesrOS6jA==","IGtpxZ9pbGVyaW4=","IGRvYnLDqQ==","6am+","IGTFr2xlxb5pdMOp","66Gk","zrzOrc69zr/PhQ==","IHRyw7o=","IGJpw6dpbQ==","INCd0JA=","IOW+jA==","IGR1eWc=","5Z6C","0IbQhg==","IGV0bWV5ZQ==","INmE2KjYp9iz","INC00LLRlg==","IOq4tA==","0YbRltC50L3Qvg==","zrrPhM6u","772d","INGE0LXQstGA0LDQu9GP","5a+r","IOqyqA==","IHnEsWxsYXJkYQ==","INC30YPQvw==","IG9iY2hvZG7DrQ==","INin2LbYp9mB2Yc=","0LLQtdGA0LY=","IOaghw==","2KzYp9is","INix2YjYs9uM","IHN0YW5kYXJ0","w6lydQ==","KeydhA==","0LTQtdC60YE=","IOKImg==","IMSwbmdpbGl6Y2U=","6Iqd","6Lqr5LiK","2J/Ynw==","IG3hur0=","zpHOlA==","0LXQvdGB0LjQsg==","4oCZdGE=","4LmJ4Liy4LiB","zp/Om86fzpM=","5LuY44GR","IHPDoG5n","IOCkueCknw==","0YvRiNC70LXQvQ==","INiu2LfYsQ==","INC90LDQudGC0Lg=","55u45L+h","z4nOtA==","4KSU","IGRvcGFk","4LmE4Lif4Lil","5oG1","7YKs","xLHFn21h","44GP44KM44Gf","IG5hcHJvc3Q=","INGB0L7RgdGC0LDQstC1","INmI2LPYtw==","4LmV","6ZaL55m6","INC00LXRgNC10LLQsA==","LdCU","4LiH4LiK","4Li04LiV4Lii","INin2YTZgtin2YbZiNmG","44K544Kr","bMOtxb4=","INCw0L3QsNC70LjQtw==","IHByb2Jsw6lteQ==","5paH5a2m","55eF6Zmi","0YHQtdC0","77yM5bCP","INi52LTZgg==","44Gw44GL44KK","INi52YLYrw==","2K3Zitip","IOuwlOuejeuLiOuLpA==","aW5jbHU=","IOuTnOumveuLiOuLpA==","5Y2r55Sf","INCy0LjQtNGD","4Li44Lia4Liy4Lil","0YDRg9C60YI=","INC+0YHQstGW0YI=","IHZlbGvDvQ==","IGNodMSbbA==","5omT5byA","INC30LDQutC+0L3QvtC00LDRgtC10LvRjA==","0LDQvdGB0Lg=","7LaY","INmF2LHYp9is","5YGc5q2i","INCy0L7QvdC+","7KCV7J20","IHJvenNhaA==","IOaZtA==","IHphamlzdA==","wqDQvA==","dMSxxJ/EsW7EsQ==","IGhpem1ldGk=","Ls6R","INmF2LnZhdmI2YTYpw==","IMW+aQ==","IGfhu41u","6IyC","IGh1eg==","zrbOtc65","4KWJ4KSf","INC40LfQtNC10Ls=","7J6W","IOuUsOuluA==","IGtpYQ==","IHpuxJtuw60=","INC+0YDQs9Cw0L3QuNC30LA=","2KfYstin2Ko=","IHJlxb5pbQ==","INCy0LXQvdGC0Lg=","YsOhY2g=","INC+0LTQvdC+0LzRgw==","IGtpdGFi","IGZyYW5jb3V6","INij2YQ=","INiz2LHZiA==","2ZHZhA==","INC80LDQvQ==","67CN","INC60YPQtNCw","2Y/Ysw==","44CC5q2k","2KfYtNip","4LiC4Lit4LiH4Lic","5Li75Lu7","0LjQstGI0Lg=","IOC4geC4o+C4gQ==","0LXQutGB0Lg=","0LjRgtC10YI=","INij2YTZgQ==","0LDQvdC40LzQuA==","44Oa44O844K4","INC/0YDQsNCy0LjQuw==","5aqS5L2T","0Y7RidC10LU=","5LiA5Lq6","zrLOvw==","7Iu4","0L7Qt9C90LA=","5aSJ5pu0","INmF2LTZh9iv","5rOV5Lq6","IEJha2FuxLE=","INGF0L7Rh9Cw","IM6xzr4=","IHZlcmlsbQ==","IGtvbnVz","zrzOtc69zrc=","IOmmrA==","IOyLpOygnA==","IGplZG5v","INCx0LDQsQ==","5YON","5piv5LiA5Liq","LdC1","IHDFmWVrdmFw","4Lit4Lie","IFlvbA==","INGD0YHRgtCw0L3QsNCy0LvQuNCy0LA=","6rK8","IOS7tg==","2KfZhNi0","INC+0LHRg9GH","5Zib","INGF0L7Rh9GD","INCV0LI=","0YTQvtGA0YI=","IOCksOCkqA==","4oCeVg==","6Jyc","IGRvbWE=","5pSv5o+0","INin2K7Yqg==","5b6q","4KWC4KSa4KSo","4KS+4KS54KSo","IOWkjw==","INin2YTYo9mF2LE=","INCx0LXRgNC10LzQtdC90L3QvtGB0YLQuA==","IFRo4buxYw==","6aOO6Zmp","IMO8bGtlbWl6","55Wq5Y+3","0YHRgtGA0LU=","0YjQu9C+","INi12KfYrdio","zrnOvc61","IEvEsXM=","IFByYWh5","5rm/","IHbDvW0=","55uS","zp/OlA==","44Gg44Gq","IHDFmcOtbGXFvml0","IOyWuOygnA==","INGI0LLQuNC00LrQvg==","IHNpdHVhY2k=","5YWD57Sg","xLBURVPEsA==","IFZhaw==","IG5lcmVkZXlzZQ==","aWlpaQ==","0YDQsNC30LQ=","INC/0L7Qu9C40YI=","INC/0L7Qs9C+0LQ=","INC/0YDQvtGG0LXRgdGB0LU=","INC80LXQvdGI0LU=","5LqM5Lq6","INmF2YjYp9i3","IHDFmWlr","6Leh","IHNlcmc=","INGA0LDRgdGB0YLQvtGP","0LjRh9C90L4=","IM6UzpfOnA==","wqjYtw==","2LXYqNit","4Liq4Liw4LiU4Lin4LiB","2K/YsduM","a8WvbQ==","56eB44Gv","IHR2b3I=","4KWN4KS14KS1","IHDFmWl2","IO2PtA==","IHN0w6F0dQ==","IGVkaWxtacWfdGly","2K3ZhQ==","INCx0YPRhQ==","4Liq4Liz4LmA4Lij","INiq2YjYttuM","44Gd44KM44Gv","IOCkheCkteCkpw==","6Z6L","4oKsCg==","IOm6","IMSMZXM=","IHBvcHJ2w6k=","77yM5Zug","IGFsbcSxxZ8=","bGFs","INiu2YjYqNuM","IM66zr/PgQ==","7Jq064+Z","bWF5xLFu","IGFrdGlm","INin2YbYrNmF2YY=","INGB0YLQsNC6","INGB0YLQsNGA0LA=","2YTZgdip","IHBhcsOnYXPEsQ==","INC60L7RgNC/0YPRgQ==","44CB6auY","IS4u","IM6gzpHOnQ==","INmH2YbZiNiy","aW9uw6FsbsOt","IHByw6F2bsOt","wp0=","INiq24zYsQ==","IOWfjg==","INC30LPQsNC0","IHNhbGTEsXLEsQ==","5p+l55yL5pGY6KaB","6auq","2YHYtdmE","44GZ44G544Gm","0LXQstC+","6rSA66as7J6Q","IOyYhg==","dWRpY290cw==","2YjYsdmG","IGNlbGtlbQ==","44Kk44K6","7Iqk6rCA","6LKp5aOy","IO2MjOydvOyyqOu2gA==","66Kw","IGVuZXJnaWU=","ZXNpZGly","IG1p4buHbmc=","6Zm3","INCz0LDRgNCw","IGJpbGl5b3I=","542y5b6X","0LXRgtC10YDQsQ==","4LmI4Liy4LmA4Lib","IM68zrHOts6v","IHpwcmFjb3bDoW7DrQ==","0YHQvA==","IGhhbGE=","INiy2YjYrA==","INCy0ZbQtNC90L7Qsg==","4LmA4Lir4Lih4Liy4Liw","INCg0LXRgdC/0YPQsdC70Lg=","5Ye65ZOB6ICF","0YnQuNC90Lg=","4Lix4LiZ4LmA4Lib","IHTDvWRlbg==","INio2YrYqg==","0YHQutC+0LzRgw==","INmH2YjYp9m+24zZhQ==","0L7RgdC90L7Qsg==","6bif","IHNvdWtyb20=","IGZhaXo=","IGRlbW9r","IGt0ZXLDqW0=","IOuFuQ==","0LvQsNGH","INC+0YLQstC10YLRgdGC0LLQtdC9","IO+8vDo=","IM67zr8=","xIxlc2s=","6rCA7JqU","IOODig==","IG5odeG6rW4=","INGB0LjQu9C4","INCc0L7QvQ==","IMOnYXA=","IFJvd0JveA==","INC80LDRgdGC","INCc0LA=","INC00YDRg9Cz0L4=","INij2LQ=","67Cp7Iah","INC/0ZbQtNC/0LjRgQ==","6Ieo","5Ymp","IGhp4buDbg==","INmC2LHYp9ix2K8=","aXN0cmF0","0J/RltC0","z4TOtc+BzrE=","IHBvemTEmw==","IGJhxZ90YQ==","5aSr5Lq6","0LvQuNC90Lg=","INC60LDRh9C10YHRgtCy0LA=","IGt1cnR1bA==","IOyijA==","44Gr44GK44GR44KL","5Zyw5Y2A","INGH0LDRgdC+0Lw=","7LWc6rOg","IG5nYW5n","2KfZh9iv","INCo0LXQsg==","IHDFmWl0b20=","IGNo4bqlbQ==","INCc0LXRgdGC0L4=","INGB0L7QstC10YDRiNC10L3QvdC+","w61jw60=","reW3ng==","5Yib5paw","5LqU5pyI","INin2LnZhdin2YQ=","INCy0L7Qt9C80L7QttC90L7RgdGC0Lg=","INC/0YDQvtC00L7QstC2","bsSbdA==","INCd0LDQv9GA0LjQvNC10YA=","INin2YTYr9mF","IOC5geC4muC4mg==","55Sf55qE","INGF0LDRgNGH","IFNvbnXDpw==","IHLFr3puw6k=","INin2LA=","4LiV4Lit4Lia","UMWZZWQ=","INC00LXRgNC10LLRj9C9","67SQ","IOuKkOuC","2KzZhdmK2Lk=","IELDtnlsZWNl","6LWP","INio2LPZig==","IMOHYcSf","INiq2KfbjA==","IG5lanZ5xaHFocOt","6Jap","z4fOtc60z4zOvQ==","IOuTseydmA==","ZXlo","5paZ55CG","2KfYqtmH","5omr","IOWp","INC/0YDQuNCy0LXQtA==","5om2","IOqyrA==","INin2YXbjNix","4KS+4KSv4KSy","5qGR","4LiZ4LmA4LiV","0LjQu9Cw0LrRgtC4","5a625LyZ","IGJ1bHVudXlvcg==","eXNh","woY=","IELEsFI=","7Yak","4KSC4KSX4KSg4KSo","zpTOtc69","4KWM4KSV4KSw","6ZaT44Gr","INC80L7QsQ==","IE1vcmF2","6KeE5YiS","INGB0LLRltGC0ZY=","dWx0cw==","IHplbcOt","wqAgwqAgwqAgwqAgwqAgwqAgwqAgwqAgwqA=","INCf0L7Qvw==","44GC44GS","IHBvbW9jaQ==","INC30LzRltGB0YI=","5Li75Lq6","IFPEsQ==","44Ob44OG44Or","INGD0LLQsNCz0YM=","5buz","4LmA4Lih4LiV4Lij","ZXN0bGk=","IGxv4bqhdA==","44Ki44O8","IM6UzrU=","IGJ1bmxhcsSx","IOeCueWHuw==","IELDoGk=","IOS4lg==","IOqzoOqwnOulvA==","INCt0YLQvtGC","IG1lbW51bg==","IOClpAo=","INC40YHRgtC+0YDQuNC4","IOywqQ==","4KWn4KWv","INCe0LTQvdCw0Lo=","IHZlZGU=","z4bOrc+BzrXOuQ==","w6Ji","54q25Ya1","5Y2P6K6u","IOqwnQ==","0LXQstC40LQ=","am11","INC60L7Qu9C40YfQtdGB0YLQstCw","w5s=","acSNZQ==","IGZpcm1hbGFy","6ICA","0LrRltC9","IOq1reuvvA==","IOuqqeuhnQ==","IM6azrHPgQ==","IGhpc3NlZA==","77yr","IFTDqm4=","INGC0YvRgdGP0Yc=","2K3Zitit","INCy0L/QvtC70L3QtQ==","IFPEsW7EsWY=","IM68zrfOvQ==","IO2RuA==","INin2YTYt9io2Yo=","INiy24zYqA==","INC/0YM=","IHByYcW+","7JeG64qU","zrjPgc+J","IGnDp2k=","INCx0ZbQuw==","0KDRkQ==","IOy2leq1rA==","IGzhuqE=","IOODleOCoQ==","IOiW","zrzOsc+Ezr8=","6YeR5bGe","w6FsaQ==","INmB2KM=","IEthcmxvdg==","IFrDoXA=","44Oq44Oz44Kw","YWJpbG1law==","INCh0Lg=","IGPDrXJr","IGvhu4tw","IOCkkeCkqOCksg==","INmI2K3Yr9ip","44OL44OD44Kv","IG7GsOG7m25n","INCw0LrRgtGD","5bid5Zu9","IG7DoXpldg==","INGA0LXQvNC+0L3Rgg==","INGA0LjQvdC60YM=","IM+AzqzOvc+J","z4TOuc66zr8=","IOyCvOyEsQ==","INGB0LjQvNC/0YLQvtC80Ys=","INGA0LDQvdGW0YjQtQ==","IErDoQ==","INGB0YfQuNGC0LDQtdGC0YHRjw==","INC/0L7RgNGW0LI=","INCc0LDQuw==","6Z2i56ev","INmE2Lo=","INis2LTZhg==","INC90LXQtNC10LvQuA==","IOymneqwgA==","44aN64+Z","IGzGsOG7o3Q=","IMSQ4buLbmg=","IOC4reC4reC4meC5hOC4peC4mQ==","IHlhcGFyYWs=","IMSRYWk=","INC+0YTQuNGG0Lg=","IM61zrzPgA==","zr7Otc65z4I=","INC60L7QvdGE0LXRgNC10L0=","IGFyYXPEsQ==","4LiV4Liy","IOu0kA==","0L7QstCw0L3QsA==","7KeA6rCA","IFbDoW0=","4KS/4KSc4KSo","IOe8lui+kQ==","zrbPjA==","IM+Ez4HPjA==","IMO8Y3JldHNpeg==","INqp2KfZhdmE2Kc=","Ojo6Lw==","4LmMCgo=","IOmWoumAow==","IGthcmE=","INCx0LXQt9C/0LXQutC4","IHptxJtueQ==","IOq/iA==","dnJk","bGnEn2luZQ==","INin2YbYqtiu2KfYqNin2Ko=","INC00L7RgdCy0ZbQtA==","IGt0ZXLDqWhv","0LXQvdGC0L7QvA==","6rO167aA","7KCd","IOunjOyhsQ==","IOaR","5Ye65Y+j","5bu66K6u","0L7RgtGP","INKR","7ZSE66Gc","IGdpw7M=","44K344Kn","IM67zrXPgA==","7ZWY66Ck","IHlva3Nh","IGlzdGlo","77y2","INin2YTYudmF","INqp2KfYsdqv2LHYrw==","4LmA4Lie4Lij4Liy4Liw","IG5vdsO9Y2g=","INGB0L3QsA==","IHNhbmE=","4KS14KSk","xLHFn21hbg==","5Y+m5aSW","7Lac7J6l7IO1","5amm","INC60L7RiNGC0ZbQsg==","INmI2KfZhNmG","INio2KfZhNil","IOaKgA==","INC80L3QvtC20LU=","4KWC4KSh","IEPhu6Vj","IGV2ZXQ=","6IGU5ZCI","IMKgwqAgwqDCoCDCoMKgIMKgwqA=","55qE5b+D","IGTDoW5n","2KfbjNiz2Yc=","IGVya2Vu","5rOh","2KfYptio","IHlhcMSxbGTEsQ==","IFF14bqjbg==","5pe25Luj","7Juo7Ja0","INCz0ZbRgA==","b2tvag==","2YPYsdip","0Y7Qug==","IHbDvWo=","IGhvZGlueQ==","INC10LvQtdC60YLRgNC+0L0=","bcSxeW9y","IOyeiOuLpOuKlA==","4LmJ4LmJ","0LjRgtC10LvRjNC90L7QtQ==","IHnEsWxsYXI=","xI90ZQ==","IMSNaW5ub3N0","4Li44LiT4Lig4Liy4Lie","7ZOo","0L3Qsw==","4Li54Lij4LiT","INC/0L7RgNGP0LTQutC1","IOuLueyLnA==","INCc0L7RgdC60L7Qsg==","IGtyZWQ=","dXJ1bQ==","INGC0Y8=","2qnZhtin2YY=","0LTQuNC4","0YDQuNC80ZbQvQ==","INC+0YDQs9Cw0L3QuNC30Lw=","IOmbhg==","zrnPg8+Ezr8=","5L+h55So","5Y2B5Zub","4LmI4LmD4LiK","INGD0LLQuNC0","4Lix4LiH4LiB4Lil","5Y+m5LiA","44Or44OV","4Lix4Lia4Lib4Lij","IMOcc3Q=","6Kqs5piO","0LLQsNC5","0LDRh9C1","5qyj","IGthdMSxbA==","IENlbQ==","INin2YTYrNmH","INCz0YDRg9C3","INC30LDRgdGC0LDQsg==","Y8SxbGFy","INGF0L7RgtC10Ls=","IHNuw61t","77yM6KKr","INCy0LjRiQ==","IGRlbW9rcmF0","4KWH4KSf4KSw","5ZGo5bm0","IG9kcGFk","IGRhxYg=","IOS7ow==","4LmH4LiZ4Lit","INGB0LrQvtC70YzQutC+","IM6xz4Y=","IHDFmWVzdsSbZA==","IOWTgQ==","INC40L3RhNC+0YDQvNCw0YbQuNC4","55uX","44G+44Go","INGB0LDQvNC+0LI=","IHBvY2l0","IO2OuOynkQ==","INGB0LzQtdGB0Yw=","IHBvamnFoXTEm27DrQ==","44Gu44KC","4LmI4Liy4LiB4Liy4Lij","INuM2YjZhg==","IOq4sOyWtQ==","aWNrw71taQ==","YWxhY2U=","6Zu75b2x","0Y7QstCw0L3QvdGP","55u45ZCM","IOOAgw==","INC00L7QutGD0LzQtdC90YLRltCy","77y5","5Yiw5bqV","w7N6","IEFobWV0","INmF2LPYp9it2Ko=","IGhsYXZvdQ==","w7xsZWJpbGly","44CC5L2g","4LmH4LiB4LiK4Liy4Lii","wqTCpA==","IOaEjw==","IGNo4bqtbQ==","LtC0","IGNjYQ==","IG9sdW1zdXo=","wp4=","54qs","INC/0L7RgdGC0L7Rj9C90L3Qvg==","IC4qKioqKioqKioqKioqKgo=","INin2LPYqtix","INC00LDQu9GM0L3QtdC5","xa9y","5L+d6K23","0LHQvtGA0LDRgtC+0YA=","w7c=","z4PPhM6xzr0=","INmB2YrZhNmF","w6dlaw==","7J6Q6riw","IOalrQ==","0L3RltC/","6ImH","IG1vY2k=","7Jy1","66as6re4","INCa0L4=","6YKj6YeM","INCh0YLQsNGA","INiq2YjYp9mG24zYrw==","IG5ndXnhu4du","IOC4quC4suC4oeC4suC4o+C4lg==","0ZbRh9C90LA=","IOiiqw==","4Li44LiV4Liq4Liy4Lir4LiB4Lij4Lij4Lih","INi52LXYsQ==","IMOcTsSwVkVSUw==","IHRlaGR5","INmI2LXZhNin2Ko=","5L+d6K+B","IEV1ZGljb3Rz","IM6gzq0=","5bu66Kit","IOyghOq1rQ==","INit24w=","44Kk44OE","INit2KfYtdmE","INis2YbZiNio24w=","44CB5pel5pys","w5k=","IOC4l+C4suC4hw==","INmG2K3ZiA==","2KfZh9mK2YU=","5b6M44Gr","4LiI4Liw4LmE4LiU","5Yeg5Liq","4KWB4KSB","64yA7J2Y","IGzDoG4=","7JuU67aA7YSw","xqA=","INC10LTQuA==","IHNwaXM=","5pyJ5LuA5LmI","IG5lYnlsYQ==","IO2VtOyZuA==","66Gc67aA7YSw","0LDRgNGF","bGlsaQ==","IO2VmOujqA==","bWFtYXPEsQ==","0YfQsNC10YI=","INit2KfZhNip","IELDtmzDvG0=","55u46Zec","INC00YDRg9Cz0LjQvNC4","55uj552j","4KWI4KSc","INi52KjYr9in2YTZhNmH","IOi/ng==","INCc0LjQvQ==","IOq4sOuLpA==","IOqzteqyqQ==","6KGM5YuV","4KS+4KSu4KSV","5rGC6LSt","5qih5Z6L","0YHQvtGA","cmFuZQ==","4LmH4LiI4Lie4Lij4Liw","INmF2LPbjNix","6KOF572u","7JWk","bsSbasWhw61jaA==","zrHOu8+Nz4TOtQ==","IEhha2s=","6K6/6Zeu","INGC0LXRhw==","IEzhu4tjaA==","INiv2LTZhdmG","zow=","IM+AzrU=","INC30LDQvNC+0LI=","IGJpcmlt","44K344K544OG44Og","IM+Az4HOv8+K","iuydgA==","0LLQuNCz","IOuPheydvA==","INGA0LXQstC+0LvRjg==","IOmmmea4rw==","IGxleg==","INio24zZhdin2LE=","IGR1eWd1","IOubsA==","IGFtYWPEsQ==","4KWN4KSv4KSq","IOyekOyEuA==","2KfZiNuM2LE=","IHNwb2xl","w5ZM","INis2Lk=","2YTbjNmF","44Gq44Gp44Gu","4Lib4Lij4Liw4Liq4Lia","IG5hxaFpY2g=","INC/0YDQtdC00YHRgtCw0LLQu9GP0LXRgg==","INC30LTQvtCx","IG9ib3U=","2K7ZiNin2YY=","44Os44OD44OI","0L7QtNC10LnRgdGC0LI=","2qnYsduM","INin2KrYp9mC","INGN0LrRgdC/0LvRg9Cw0YLQsA==","772i","INmE2YTYpQ==","INin2YTZhti42KfZhQ==","IO2UhOuekeyKpA==","xLFzxLF0","5a2Z","IMW+w6FkbsO9","2YLZiQ==","4Lix4LiB4LmA4Lij","IOuyoOyKpO2KuA==","IOODqw==","5Y+U","bmlja8Op","IM61zrnPgw==","44Or44OJ","INiv2KfYsdmF","INCz0LXQvA==","IOWtuA==","4KS+4KSo4KS44KSt","0LDQu9C40LfQuA==","0L7QstCw0L3Rlg==","INC+0LHQvg==","7KCE7JeQ","IFNpbmg=","INmG2Lk=","INC+0LHQu9Cw0YE=","z4XPgA==","6IO2","IGF6YWx0","5YWo6Z2i","IEtyb23Emw==","IEN6","5oql5ZCN","IG7DoXNsZWR1asOtY8Ot","INC90LDQv9GA0LjQutC70LDQtA==","44Gq44GR44KM44Gw","4Lit4Liy4Lii","55yL55yL","IOC4geC4o+C4geC4jg==","ZWRub3U=","2KfYstmE","44CB5pys","0LXRgdC4","IHRhcno=","44CA776K","IHJvenVt","44Kr44O844OJ","IOCkh+CklQ==","IHByb3N0xJs=","IM6Tzro=","56m0","IEjDvGs=","bGF2w60=","6r8=","6bih","INCy0L7Qt9C90LjQutCw0LXRgg==","0Z/Rn9Gf","INC/0L7QvdC40LzQsA==","0J/Qng==","44GU44GW44GE44G+44GZ","44GF","IHRydmFs","INC00LDQu9C10LrQvg==","INmG2YrYsg==","INCy0YvRj9Cy","4Li04LiX4Lii4Liy","IGzhu5c=","4LmA4Liq4LiZ","INGB0YLQtdC90Ys=","4KWN4KSh4KSy","IGplZG5vdGxpdsO9Y2g=","INC/0YDQuNCx0LvQuNC3","aWthdA==","INC/0L7QtNCw0LI=","2LHbjNiy","INii2YbYrNin","56S+5pyD","IOCknOCkqOCkteCksA==","IGFpbGU=","4Li14Lib","IOiF","44Gn44GX44KH44GG","0KHQng==","44CB44CK","7J2867O4","b3Zhbm91","zr3PjA==","5bGl","2LnZhNmC","IOyJvQ==","INCz0LvQuNCx","IOqyg+yeheuLiOuLpA==","INC90LXQvtCx0YXQvtC00LjQvNC+0YHRgtC4","INiq2K7Ytdi124w=","2KfYs9ix","77yM6K+0","INCd0ZY=","IHZ5cm9i","0YjRg9GO","5oi/5bGL","wqDQlw==","4LmA4Lie4Lil","5YaF6YOo","INiv2YTYp9ix","INC/0YLQuA==","xaF0aQ==","IGFyYcWfdMSxcm1h","INC30L3QsNC60L7QvA==","IM61zrvOu863zr0=","IOG6pW0=","0YDQsNC6","44Kt44Ol","IHRo4bqtbg==","6K2c","66qF7J2Y","IHlldGVy","INC90LDRgdC70LXQtA==","INCa0LDQvQ==","INCy0YvQsdC40YDQsA==","IM6jz4c=","INGC0LXRgNC80ZbQvQ==","IOa0uw==","INin2YTYqtmB","IEphcG9u","6YKq","67aE7ISd","INC70LjRhtC+","IG3Dqg==","4LiE4Lin4Lij","IOCkheCkl+Cksg==","INmH2Kw=","65+s7Jq0","INCy0L7QudC90Ys=","2KfZiNix2LLbjA==","INGB0L/RgNGP","54S8","6KKW","IGnDp2VyZW4=","IOuFuOuemA==","INCn0LXRgNC10Lc=","2YjYrNmI2K8=","0Y/RgtC40LU=","4Lit4Lil4Lil4Liy4Lij","6Leo","IE1pbGxp","5Lu25LqL","IOacnQ==","zrLOv867zq4=","INC60L7Qsg==","INi02YfbjNiv","5LiL5Y67","IOygleyLoA==","0L7Rh9C60YM=","77yM5L6/","zrPOus61","INmF2KjYp9i0","IGF5xLFuZGE=","IOS7uw==","0YHRgtC+0YDRltGP","5Lit5a2m","57iu","INGE0ZbQuw==","44CB44KE","IOaYpQ==","IHRlcsO2cg==","INC/0L7QstC40L3QtdC9","IG1pbGlvbsWv","INmB2KfYsdiz","INCy0LLQvtC0","2LfYp9mE","IOq2geq4iA==","IHVrw6F6","55Sc","5pqC","2LXYqg==","0JrQvtCz0LTQsA==","IOCkruCksg==","zqzOvc6x","INC00L7QutGC0L7RgA==","INC60L7QvNC80YM=","INC/0ZbQtNGB","IOC4geC4o+C4geC4juC4suC4hOC4oQ==","wqDQsw==","IMO2bmU=","IMSQ4buB","5LqL5YuZ","IHNyb3Y=","IM6szr0=","64+E6rCA","YWNhxJ/EsW0=","0LrQvtC7","IGLhu5Np","INm+2LHYr9in2LI=","IOS4mg==","64uk7Jq0","INC/0YDQtdC00LXQuw==","INGE0LXQtNC10YDQsNC70Yw=","INin2YTYo9mD","44CA44CA44CA44CAIOOAgCDjgIA=","IHRy4bqlbg==","INC00LvQuNC9","INGW0LzQvw==","IHNtxJtyZW0=","sOuLpA==","IHLhu6tuZw==","aWNpw6FsbsOt","6KGG","zrzOuc6/","INin2K/Yp9ix2Yc=","INGC0YDRjA==","IMSwbGk=","4Lih4LiZ4LiV4Lij","4KWN4KS14KSa","0LXRgNC+","IEtVUg==","c2vDvW1p","zrTOrw==","dXRpbg==","IHZlcmlsZXI=","4Liq4LiW4Liy4LiZ4LiX","INC30LDRhdC+0LTRltCy","INmB2LHZiNiv2q/Yp9mH","IOeUsQ==","4Li54LmB4Lil","6YOR","IEpha28=","INGA0LDQt9Cy0LjRgtC40LU=","4KSJ4KSo","2YrYr9in","IOC4nuC4pOC4qeC4oOC4suC4hOC4oQ==","66y87J2E","66CA","LdCb","44CC44GC","INC/0L7QtNCy","77yJ77ya","6K665Z2b","2KfYpti5","44KS44GZ44KL","INij2LU=","0YfQuNC60Lg=","INGB0YLQuNC7","bGV5aWNp","0YHQuNC70Yw=","IGJ1bHVuZHU=","INGB0LXRgNC10LTQvtCy0Lg=","4KSC4KSw","INin24zZhtis2Kc=","5Zyt5Zyt","IG15xaFsZW4=","INGA0L7Qt9Cy0LjRgtC+0Lo=","IGl5aWxlxZ8=","INCy0ZbQtw==","64KY66y0","5oSP6KeB","zrnPg8+Ezrc=","44OD44OE","5LqL5pWF","bWFkxLHEn8Sx","IOCkheCkquCksA==","INqG2LHYrg==","INC/0LvQsNCy","5Lul5p2l","IOupgA==","VHV5","44O844ON","INC40LfRg9GH","IHN0xZllZG7DrQ==","6K++56iL","IOq3uOuFgOuKlA==","INC00L7Qs9C+0LLQvtGA0YM=","IMSR4buLY2g=","IGthcmFyxLE=","5ZC0","2YPYp9mF","INC/0L7RgtC+0Ls=","0LLQvtC6","IETDvHo=","zqTOsQ==","5bU=","4oCZbmE=","0LDQtNC2","IGTFmcOtdmU=","5qKo","IEF2dXN0","5Yqb44KS","4LmA4LiB4Lil","INC/0L7QsdC10LQ=","INC/0YDQuNGH","INCR0ZY=","5a2k","INCg0LXQsw==","IHlldGnFnw==","INC90LXRjg==","IGLDrWw=","7JeG7J2M","IMSwdGFseWE=","0JLRgdC1","5b6M44Gu","IGplasOtbQ==","INCy0LjQs9C70Y/QtNGW","0L7Qs9GA0LDQtA==","IGJvaGF0","IOWFiw==","INC00LjRgtC40L3QuA==","0LvRj9GC0L7RgA==","0LzQsNCz0LA=","64uI7Iqk","INCg0LDQtNC4","z4DOv8+Fz4HOsw==","Jlplcm9XaWR0aFNwYWNl","IHN0cnVr","5pCe","IOOBneOBruS7lg==","7J247J2E","INC/0YDQvtCy0LXRgdGC0Lg=","5ryr55S7","IOeOqeWutg==","INmI2LHYsg==","INGB0LLQvtGX0Lw=","IExSVg==","4Li04LiV4Lig","4KS44KSk","IO2dlA==","4peP4peP4peP4peP4peP4peP4peP4peP4peP4peP4peP4peP4peP4peP4peP4peP","IHR2b8WZw60=","INCf0J4=","6auY5bqm","Lmh3cA==","4LiV4Liz4Lia4Lil","INiv2LM=","7IiY6rCA","7JSp","77yJ44CCCg==","5ouz","IGzDtA==","IEvDvGx0w7xy","2KfYt9i52Kk=","IGt1Y2h5","IHN0cm9q","zrzOtc69zr8=","INC60L7QvdGB0YLRgNGD0LrRhtC40Lg=","5bCP5a2m","IOWNmg==","IOiAgw==","IGFzxLFs","5oiR5YCR","2K7Ysdin2Kw=","IE9udW4=","IOe+juWbvQ==","4KWC4KSs4KSw","IG11xb5p","5aer","INCy0LE=","INC00L7QvNC1","INCw0Lw=","IGt1cnU=","5rGX","bGVkacSfaQ==","IHbhur0=","5b6T","INCz0YPQsdC10YA=","INGB0YLQsNC90L7QstC40YLRjA==","IHplbcSbZMSbbA==","2YTZhA==","IHJhbWVu","IHByxa9ixJtodQ==","IGJsb2s=","w712YWw=","dm91","zr3OrA==","65SU7Iuc","0YbQuNC+0L3QvdGL0LU=","IOqyjOyLnO2MkA==","44Oz44OH44Kj","5LiA57qn","0LjRh9Cw","INiz2LHbjNin2YQ=","aWxpbg==","4KS+4KSv4KSo","2YbZiNuM2LM=","INCU0Lg=","INin2K/YqNuM","INGD0LTQvtCy","INCX0LDQvA==","4KWB4KSt4KS1","0YHQvtC6","INGA0LDQudC+0L3QtQ==","IEVL","5oKJ","IHNvcnVtbHU=","IHp2ecWh","4LmA4LiL4Lit4Lij","aW7DocWZ","IHVkcsW+","0L3QvtCy0LjQtA==","IHNwb2xlxI1uxJs=","5oiQ5LqG","77yk","4Lix4Lie4LiX","0LDRiNCw","INmG2KfYr9mK","4LmD4LiZ4LiX","5aGa","INiz2qk=","44OB44Ol","INC80LDRgNGI","0LDQu9C10L3QvdGP","INit2YXYp9uM2Ko=","44Oz44K4","4Lij4Lip4LiQ","INC60YDQtdC8","IEthxb5k","6r0=","IHBhcmxhbWVudA==","IMWfdW4=","IGt5cw==","z4TPgg==","6rCc7J2Y","IHZlbGljZQ==","IGNlc3R1","2LjYqQ==","6K+K","IMO6dA==","INiu2YjYsQ==","INCi0LU=","INC+0LHQu9Cw0YHRgg==","4LmI4Lit4LiV","IEFjYWRlbQ==","44CC5pys","IOmiqA==","0YHQtdC9","44Oi44OH44Or","INC30LDQstC00LDQvdC90Y8=","44G+44KM","0LzQvtGC0YDQtdGC0Yw=","IGto4buV","4LmI4Lij","2K/Ysdiz","IMSMZXNrb3Nsb3Zlbg==","IOiuoQ==","INGC0LDQutC+0Lw=","INmE2KfYudio","IE11aGFtbWVk","INmF2YTZgQ==","INmI2LPZhNmF","44K344Oj44Or","INC+0LrRgNCw","4KWB4KSu4KSk","IOuIhOq1rA==","IG5lZGVuaQ==","IOuCoOynnA==","L2tt","INC00LXQvNC+0L0=","INi12YbYp9uM2Lk=","bWFzxLFuZGFu","5YmN44Gu","5oiQ57up","4KSy4KSX","IOWMhQ==","4Lit4LiB4LiI4Liy4LiB4LiZ","2KfYr9in","IGF5bMSxaw==","INmF2YLYrw==","IMO2bmVtbGlkaXI=","IOyInOqwhA==","IGRpbmg=","IG7DoWt1cA==","aXN0aWNrw6k=","5bqf","7Iqk7Yag","IGRueQ==","IOyeiOuPhOuhnQ==","7JuQ7J2Y","44OV44Os","cG96","INC10LI=","IGTDvMWfw7xy","4KWN4KSw4KSa","IOqysO2YvA==","INGG0LXQvdGC0YDQsA==","5Z+L","77+j772A","5q2m5Zmo","4LmI4Liy4LiZ4Lih4Liy","IOCksOCktQ==","2ZHYrw==","zrzOrc69zr/OuQ==","IOunkOyUgA==","IHBvxZlhZA==","INio2Lo=","IM+MzrvOsQ==","4LmJ4LmE4LiC","4LmA4LiB4Liy4Liw","IGLhuqFj","IGTDoQ==","ZMSbbGE=","IHRlYg==","IGvDqG8=","44KP44KM","IGlzdGl5b3J1bQ==","zrvOrs+C","0JDQsg==","IGFzbGE=","IHBlcmZvcm1hbnM=","IFbDoWNsYXY=","z4HOr86xz4I=","IHTEm2w=","5oyZ","0L7QsdCw","44GR44KM44Gp","IOuUuA==","2YjYp9ih","INqp2YjYr9qp2KfZhg==","INC/0LvQuNGC","IGJpbGly","0YPQttC1","z4TOrc67zrU=","IOCkhuCkleCksA==","INGC0YDRg9C00LA=","INiv2LHbjNin","zKc=","IG5n4buNdA==","2YbYs9in","0LDRgdGC0Lg=","772j","wqDQvdCw","0LXQvNGL0LU=","INiz2LnZiNiv","IGFsxLFt","6LSr","5Yiw55qE","IGtlc2lubGlrbGU=","IHrDoXNhZA==","IOyKpO2KuA==","IGRhaGk=","dMOp","5Y2B5YWr","IHphecSxZg==","2LDYp9ix","INin2YrYsdin2YY=","IGhvZG5vY2Vuw60=","RFNU","IOyWmA==","5piH","6Zmj","INC60LvQtQ==","IHVwbGF0","INin2YTYqti52YTZitmF","z4DOv86vzrfPg863","0LXQutGC0L7RgNCw","IOunkOydtA==","INmB2LHZitmC","5biu5Yqp","55Sf44GN","5YaF44Gu","6IGU55uf","0LPRgNCw0LQ=","IGNodXnhur9u","44KC44KK","INGH0LDRgdGC0LjQvdCw","44Gq44GP44Gq","0ZTQsg==","INGE0LDRhQ==","a3Vr","55S35oCn","INmF24zZhNin2K/bjA==","IGJlZGVu","6rCA66W8","4KSu4KSw","IOyWtOuouOuLiA==","6IGU572R","wqBtaQ==","IHphaHJu","5rKW","IGtodeG6qW4=","IG9wcsOhdg==","4KS+4KS54KSV","INqp2YjYqtin2Yc=","INC+0LHQvtC7","IHBow7pj","csOhbsOt","4KWN4KSw4KSl","5o6q5pa9","INCy0L7Qu9C+0LQ=","IHNww63FoWU=","IG3GoQ==","0YrQtdC6","bmfDtnI=","4KSJ4KSk","a3NpeW9u","0LDRgtC1","INis2LLYoQ==","w6F2a2E=","0JLQoQ==","bGHFn21h","IOe/","4Lit4Liy4LiK","0L3QuNGG0YM=","IOC4q+C4suC4gQ==","44GL44GX","7Y+0","INCz0LDRgNCw0L0=","IM+DzrHOvQ==","INC00L7QsdCw0LLQuNGC0Yw=","INGA0LDQt9GA0LXRiA==","4b4=","5piv5Liq","zrzOrc+C","IMSwbXBhcmF0b3I=","5qiZ5rqW","0YHRgtGL","IGfDvGPDvA==","IO2DgOydtA==","IOWFtuS7lg==","IHTDtG5n","IHZlZGVuw60=","65Oc66Gc","IG1lc2Vs","IMSNZQ==","amRl","z4HOtc65zrE=","44KI44Gt","0KDQnQ==","6Led56a7","INmC2KfYptmF2Kk=","4Liy4Lia4Liy4Lil","INGB0LDQudGC0ZY=","IOCksOCkuA==","INmC2LHZhg==","IG7DoXZy","2qnZhQ==","55qE5omL","IHNvcnVudQ==","L07EkA==","bnV0w61t","INiu2YjYsdiv","IG5n4bud","IDoufA==","IGJ1ZG91Yw==","acSNa3k=","INiv2LHYrw==","0YDQvtC90LjRh9C10YE=","576K","IOyVhOuyhOyngA==","IEthbnVudQ==","INC/0YDQuNCy0L7QtNC40YI=","zqzOu8+Fz4jOt8+C","IFZsYWRpbQ==","IGFsxLFw","INC10YLQsNC/","IOCkl+CksuCkpA==","INix2KfZh9mG2YU=","IHBvemlzeW9u","IGfDtsOn","6LWe","INC80L7QuQ==","IM6gzqw=","IOyIoA==","INii24zZhtiv2Yc=","YW7DoQ==","5Lic55yB","INmF2KrYudiv2K8=","IOWNig==","44CA44CAIOOAgCDjgIAg44CAIOOAgA==","IHRo4bud","INCy0LTRgNGD0LM=","0L/QsNGC","INC/0YDQvtCy0LXQtNC10L3QuNGP","2YbYsg==","INin2YTYqNit2Ks=","5oGi","IGJha3TEsQ==","IOi3rw==","INC30LDQsdC+0LvQtdCy0LDQvdC40Lk=","INCV0LLRgNC+0L8=","IHRhcmlobGk=","6rmo","INqp2YjZhw==","IOyWtOugpA==","IHRpdHVs","IHZ5ZMOhbsOt","6Zi25q61","4LiI4Liw4LiV","INC80L7Rjw==","INC60L7RgNC+0Ls=","INCx0LDQvdC6","4Lin4Lij4Lij4LiT","INmD2LPYp9ix2Kk=","IEtob2E=","INGD0L3RltCy0LXRgNGB0LjRgtC10YI=","44Gr6Zai44GZ44KL","cnVhcnk=","IOC4guC4suC4og==","IHN2YXo=","INi02LHZgg==","INC00YvRhQ==","INC40LfQsdCw0LI=","INGP0LrRltC5","IM6czr/OvQ==","IGfDtm4=","IFVrcmFq","4Lix4LiZ4Lit4Lit4LiB","IOC4oeC4geC4o+C4suC4hOC4oQ==","0LjRgtC+0LI=","IGFuYWzDvQ==","INC+0YLQvNC10Yc=","INio2LHYp9mJ","4oiP","4Lix4LiB4LiB","5oul5pyJ","INGW0L3RiNC+0LPQvg==","INC60L7QvNC/0LDQvdGW0Zc=","IGvFmWVz","INGA0LDQsdC+0Yc=","YWTDrQ==","7KCg","4LmE4Lir4LiZ","4KWB4KSs4KS5","4oCZZGVraQ==","54Wk","INC/0LDRgNGD","7ISt","INC90LXQv9C+0YHRgNC10LQ=","IMSwYg==","IOC4nuC4pOC4qOC4iA==","7Yu0","IOugiOydtA==","IFRo4buV","0Y/QtdGC","2KfYptis","u+eSgw==","0JLQng==","5ZaK","IOesrOS4iQ==","INCy0L7QutGA0YPQsw==","0YfQtdC90Yw=","IG9sYW5haw==","dHVyYQ==","INmF2YrZhA==","ZXlkaQ==","INmF2K/Zitix","IG5lbHpl","4Lix4Lin4Lit4Lii","7IWc","IGhsYXZ1","IGtvcnV5","0YbQuNC9","INC00LjRgdGG0LjQvw==","INmF2KfZhtiv","INC/0L7QtNGA0L7QsQ==","0KLQng==","2YLYsdin2LE=","4LmB4LiZ4Liw4LiZ4Liz","66y47J2E","5oyv44KK","UMWZaQ==","IHnDqm4=","4KS24KSV","wqBqZQ==","INCa0L7QvdGB0YLQuNGC0YM=","4KWB4KS5","INm+2Kc=","7IaM66W8","INC00LXQu9Cw","0LrQuNC0","4LmC4LiK","7Luk7Iqk","ZMSbbGVu","4KSU4KSw","5LqO5piv","INmH2YXbjNi02Yc=","IGJhxZ9sYW0=","IOybqA==","IGRlbmV5aW0=","IMO8eWU=","IM69z4w=","IOCkluCkoQ==","bsSbbA==","INGB0YTQtdGA0ZY=","4Lit4LiU4Lig","5LiA5bm0","IHZ1cmd1","xJ7EsA==","4oCZCg==","INGW0L3RiNC40LzQuA==","INC30LzQtdC90Yg=","IOCkiw==","INCy0LXQutCw","INit2qnZiNmF2Ko=","INiq2YXYp9mF24w=","IHNtcnQ=","IGjhu6d5","IHlhcMSxbG3EscWf","4LmJ4Lic","IFllbg==","INGD0Ls=","IFN2xJt0","4Lix4LiE","IG3Em3PDrWPFrw==","0LTQtdC90YLQuA==","IO++mA==","INC/0L7Qu9C40YLQuA==","c2t5dA==","5Lmf5pyJ","IOqwmeyKteuLiOuLpA==","IOq3uOuemOyEnA==","z4TOtc+Bzrc=","0YfQtdGA","IMOcTsSwVkVSU8SwVEVTxLA=","4Liq4Lig","IOC4quC4ow==","4KS+4KSo4KSm","IGHFn8SxcsSx","zrvOr86/z4U=","INmE2YE=","w61udQ==","4Lit4Liy4Lij","0YLRg9GA0LA=","IMSNZXNrw71jaA==","IHBo4bupYw==","5Lul5Li6","z4HPic+AzrE=","INin2YbYsdqY24w=","wrsp","YWxhcmRhbg==","INGB0YLQstC+0YDRjg==","IHRyw6F2","4KWs","44GK44KI44Gz","75yL","YWRpbA==","IM6kzrk=","IOuQqeuLiOuLpA==","IM61zrzPhg==","IOq1rOyhsA==","7Jet7Iuc","INin2YTYrNin2YU=","5Li76aKY","44K544Od","IOyXreyLnA==","INqp2YXYqtix","IFNwb2xlxI0=","0L7Qu9C+0Yg=","IFN1cml5ZQ==","0KfQtdGA","5oiY5paX","IHrDoXZpcw==","5pu46aSo","IG11c2Vs","IOed","2YXZhQ==","INin2YTYrtin2LHYrA==","INCT0J4=","INCy0LDRgNGC0L4=","z4HOsc6y","IOCkquCkueCkmg==","dWJsaWNl","0YbQuNC+0L3QvdC+0LPQvg==","6Iyo","INiv2YHYqtix","INmB2LM=","IOCkqOCknOCksA==","dGFyxLE=","INC+0LHRgNC+0LE=","INCg0LA=","INin2YTYtdmG","2LTYqQ==","IOyXhuyXiA==","b8W+bsOh","5pyA57WC","2aU=","cmVjaA==","INin2YTYo9iz2LE=","INC80L7QstC4","IOyhsOq1kA==","0ZbQvNC10Yc=","44Ov44O8","0LHRg9GA0LM=","INiz2YTYsw==","5a2m5Lya","IOum","5YWL5pav","5paH54yu","IHjGsMahbmc=","IHlvbGM=","IOyCrOustA==","44KP44Ga","INGA0LDRgdGC0LXQvdC40Lk=","INmB2LbYp9uM","IG5hb3Bhaw==","INC/0YDQuNCy0Ys=","INiv24zYr9mH","4LiB4Liy4Lij4LmD4LiK","IOWe","55Gf","5Lul5ZCO","IHDFmWlibGnFvg==","IGTDvMWfbWFu","IHRlbWlu","INGD0YHQu9GD0LM=","IOCkpuCkrA==","IOyDiOq4gA==","INGD0YHRgtGA0L7QudGB0YLQstCw","INCi0YPRgg==","z4TOr86/z4U=","IMSwc2zDom0=","2aQ=","5Y+C5LiO","INC60YPRgdGC","6ZmQ5Yi2","2KrZitmG","INC+0YHRgtCw0L3QvdGW","aWNhdGlvbnM=","2KfaqduM","0L3QvtGB0Y8=","xJ9hbg==","44GP44KM44KL","IHlhcMSxeW9y","IOqwleuCqA==","2YXZitmF","5q2Q","INix2Lk=","IGJvxJ8=","INC40YHRhdC+0LQ=","6Kqg","5qC35a2Q","IGJ1ZGVtZQ==","INGB0LXRgg==","zrnPg868zr/PjQ==","IOW+kuatqQ==","dcOhbG7DrQ==","INin2YTYudmC","INiz2KjaqQ==","INin2YTYo9iu2LHZiQ==","RUZB","5Zu65a6a","IOOCrA==","IOyekOyXsA==","4Lii4Lin4LiC","2KjYsw==","dW5tYQ==","INC30LDQvdC40Lw=","4LmD4LiZ4Lij","6ICD6JmR","5re35ZCI","5bCL","IMOnxLFrxLHFnw==","IG1hbGl5ZXQ=","6ZyK","44Gf44KB44Gu","INm+2LQ=","INC30LvQvtGH","IHbDvcWhaQ==","IHNjaHbDoWw=","INmG2YXZiNiv2Yc=","zoY=","IHrDoWNo","IM+Dzro=","44K544Oe","INmF2LPYp9im2YQ=","INin2YTYp9is2KrZhdin2Lk=","5Zyw54K5","2KfbjNin2YY=","INCe0Lo=","6riU","ZWxlYXNl","INi32KjZgtmH","6ZGR","IOy9lOuhnOuCmA==","6byg","5aSn5YWo","INC/0YDQuNCy0LXRgdGC0Lg=","INin2KjYqtiv","66as66Gc","INGB0YLRgNCw0L3Riw==","IHphdMOtbWNv","IGh1eeG6v3Q=","2LPbjNmI2YY=","IHNvcmR1","4oCM2LHYsw==","INGE0YDQvtC9","IGVkaXA=","2Ybar9uM","INC60LjRgA==","IO2VtOyVvA==","7Lu0","0YbQuNC60LvQvtC/","INC/0YDQuNC80LXQvdC10L3QuNGP","INC+0LHQuw==","6Zqq","IGtyb23Emw==","5qC45b+D","cmFoaW0=","0L7RgNC0","IGzDoG5o","INC+0YHRgtGA0L7Qsg==","O3w=","YnV6","IM+Ez4HOvw==","INCS0LDRgA==","5omO","xLFsxLHFnw==","6Z2i56mN","6Lqr5Lu9","6aKG5Z+f","INin2YTZgtix2YY=","INC/0YDQuNC60LvQsNC0","44OB44O844Og","IOC4quC4nuC4mw==","INC+0YfQuNGB0YI=","INC80LjQu9C70Lg=","0LDRhtGW0Zc=","4Li14LmA4Lit","IHRhbsSxbg==","54i25Lqy","IG1zZ3N0cg==","INi024zZhduM","INmB2LHYp9mH2YU=","IOunpQ==","44CC5b2T","INC60L7QvdGG0LXQvdGC0YDQsA==","6rWQ7ZqM","44KJ44KM44Gm","IHlhc2Fr","INCR0L7Quw==","IOa+sw==","54eV","INis2Kc=","65GY","INiv2LHYrtmI2KfYs9iq","IG3DrXN0bsOt","woLDjA==","IGJhc2vEsQ==","IHXDp2Fr","5LuT","IOycoOyngA==","INC/0L7QsdCw","IHplcHRhbA==","57uZ5oiR","IEF0YXTDvHJr","INmF2YbYp9iz","0ZI=","IGFyYWPEsQ==","0LvRjtGU","IG5pdGVsaWs=","IE1lemk=","IM6tzr3Osc+C","z47Ovc+EzrHPgg==","dmHFvg==","IGt1emV5","IM+Oz4HOsQ==","INGA0L7Qt9C/0L7Qsg==","4LmI4Liy4LiB","44CB5LiJ","INGB0YLQsNGA0Lg=","IGhha2vEsQ==","INii2YXYp9iv2Yc=","7YyU","0L7QvNGW","IOKAoA==","44GL44KP","44CM5L2g","5rOV5Zu9","2ZDZitmG","5omV","0L3QuNC70Lg=","INGD0YHRgtCw0L3QvtCy0LrQuA==","IGzDtG5n","4KSk4KSu","2YjZhtmK2Kk=","2YrYqtmK","IOqyjOyLnOusvA==","IHZlxaFrZXI=","zq3Pgc6z","INGD0YHQtQ==","IGvEsWw=","IGlsZ2k=","zrzPic69","INC30LLRltC70Yw=","IMO2bmxlbQ==","4LiB4LiO4Lir4Lih4Liy4Lii","IEhp4buHcA==","INCz0L7RgNC8","0LvRj9GO0YLRjNGB0Y8=","bGFtYXlh","INGB0L/QvtGB0L7QsdC+0Lw=","44G444Go","56aB5q2i","INGA0LDRhdGD0L3QvtC6","INC+0YLQstC10YDRgdGC0Lg=","LjouOi46Lg==","IG3DvGRh","0L7QvdCw0YU=","zKNj","IHlhcGFjYWs=","INC90LDQt9Cy0LDQvdC40LU=","5a+55pa5","64yA7ZGc","54it","0LLQsNC90LA=","4KS54KSo","INC/0YDQvtCx0LvQtdC80LA=","INC20LXQvdGJ0LjQvdGL","6J66","IGhvc3BvZMOhxZk=","INCh0YLQtdC/","IG9kcG92xJtk","IFPhu60=","ZXZpZXc=","5Yeg5LmO","55+i","5p2l44Gf","INC/0L7Qu9C+0YE=","INGB0LXQuw==","5bGG","INC/0LXRgNCy0L7QuQ==","INC/0YDQvtGG0LXRgdGB0LA=","44CA44Kd","2KrYp9mF2KjYsQ==","0LjQu9Cw0YHRjw==","77yM5peg","INCy0LvQsNGB0L3QvtGB0YLRlg==","7ZWY7J6Q","0LDRgtC60Lg=","IELDoA==","IEthcmVs","6Le1","2LHbjNmH","IOuCmOulvA==","INC+0LHQtdGB0L/QtdGH0LjQstCw","4KWN4KSw4KSq4KSk","44GX44KH","5Y2S","IOWlpQ==","INC/0YDQvtGC0LU=","IOaLmw==","INCh0YLRgNCw0L3QsA==","INGA0LDQsdC+0YLQsNGC0Yw=","INiq2LTYrtuM2LU=","0LXQutGB0YM=","IOumrOq3uA==","INi12KfZhNit","IGJhxZ9sYW3EscWf","INm+24zYp9mF2KjYsQ==","2LLYpw==","INC80LDRgdGB","IM6gzrHPgQ==","65287ZS8","IHlhcsSx","INGC0LjQv9GD","0J7Qvw==","44GR44Gq44GE","ZW1lbQ==","IG7Em211","INmG2LTYsQ==","IM6RzrjOrs69zrE=","2YHYsdin2YY=","IOe2sg==","INC/0YDQvtC80LjRgdC70L7Qsg==","IEJ1Z8O8bg==","7J6U","INC20ZbQvdC+0Lo=","IOC4m+C4o+C4sOC5gOC4oOC4lw==","INCy0LjQutC+0YDQuNGB0YLQvtCy0YPQstCw0YLQuA==","INCi0LjQvA==","KeulvA==","0LXQttCw0YLRjA==","IHNvbmE=","2LTZhtio2Yc=","IG5pY2jFvg==","5Ymb","INmB2KrYrQ==","INmF2YLYr9mF","IEfDvHZlbmxpaw==","ZXVt","57uP6L+H","6Led6Zui","wqDQvdC1","INin2LXZiNmE","IHphxI3DoXRrdQ==","4Li04LmA4Lin4LiT","IOCkleCknw==","IGtyaXo=","IHDDoW4=","INCx0L7RgNGM","2LjZhdip","IOqyveu2gQ==","INin2YTZitmF2YY=","INin2YTYudix2KjZig==","IGhsdWI=","IGNo4bud","6KWy","65Oc66as","44OW44Oq","INGB0YLQvtC70ZbRgtGC0Y8=","2LHYqNmK2Kk=","IOawuA==","IOqxsOydmA==","IM6yzrHPgw==","IGFyeg==","44Oi44Oz","INGA0ZbQstC10L3RjA==","5LiN55+l","5a+86Ie0","2KfZiti0","INC/0YDQtdCy0YvRiA==","INC/0L0=","IM6Sz4HOv8+Hzq4=","IOi6qw==","IMSQ4bqndQ==","IM+MzrzPic+C","asOtxb4=","IM67zq/Osw==","INGI0LrQvtC70Lg=","44Gj44Gx44GE","emR5","IOqzpw==","dGXFnw==","0YDQtdGJ","zrrOtc65","c2FodWpl","IOCkieCkuOCkuA==","IFRhbnLEsQ==","5LiN5aW9","6YOt","INCy0YvQs9C70Y/QtA==","IMOnb8Sf","INC40L3RgdGC0YDRg9C80LXQvdGC","cmVq","6IiM","44GL44KJ44Gq44GE","INC90LXQv9GA0LjRj9GC","INC60YDQvtC80LU=","zrbOtw==","INC70L7Qsw==","4KS+4KS14KSw","64WV7ZWY7IS47JqU","4KS+4KS54KSw4KSj","IGfDvHZlbmlsaXI=","VOG6oWk=","INi02YfYsdiv","IM6kzrU=","0L7RgNCw0Lc=","IGzDoG5n","77yp","5oqV5rOo","IHNpeWFzZXQ=","0JvRjg==","IHTFmWV0","IM+Az4HPjs+Ezrc=","INGD0LvRi9Cx","IEzDom0=","0YPQu9GM0YLQsA==","5Z+65Zyw","IHNrdXBpbmE=","5rC45LmF","0LvRg9Cz0L7Qsg==","INGG0ZbQuQ==","IFBvaA==","adC0","IFRydXk=","55qE5LiA5Liq","67KE7KCE","IHjhu6k=","4LiH4LmB4Lij4LiB","4LiE4Lit4Lih","IGVsZWt0cm9uaWs=","IGHEn2HDpw==","IOCknOCkrw==","INC/0L7QstC10YDRhdC90L7RgdGC0Yw=","INin2YfZhduM2Ko=","0LvQuNCy0LjRhQ==","IG9sZHXEn3VuZGFu","77yJOg==","0YbQuNGP0YU=","6KO95L2c","4LiX4Lij4LiH","ZXlpbQ==","IG7DoWtsYWQ=","Y2lsaWs=","INCT0LvQsNCy","IFV5Z3U=","INGA0LXQs9GD0LvRjg==","4KSC4KSc4KSo","IGtheW5hxJ/EsQ==","4LmJ4Liy4Lit","IGfDtnJtZWs=","IO2MrA==","IOWujA==","2KvZhdin2YY=","INGC0LDQutCw0Y8=","INC90LXQuNC3","IHpwcsOhdnk=","INin2YTYtNiu2LU=","IOyYpO2bhA==","INin2YTYt9io","YXTEsXLEsW0=","2LHZitix","INmF2LnZhdin2LHbjA==","w5xSSw==","INKQ","IOyErA==","5omL44Gr","IOuzgO2ZlA==","dWxhY2U=","IHPhu6M=","0YDQuNGH","4Lih4Lir4Liy4Lin","IGvDog==","INGB0L/RgNC+0LE=","2YfYsdmH","4KS+4KSn4KSo","IM+AzrHOuQ==","2KjYudiv","INin2YTYqtmI","57uP55CG","cMWvc29i","5qyg","INC30LDRhdCy0L7RgNGO0LLQsNC90L3Rjw==","2K7YqQ==","2obYp9ix","IGJvenVr","XeKAjw==","IFNvY29ycm8=","IGhyYWQ=","0L3QsNC00LvQtdC2","INGD0YfQsNGB0YLQuNC1","5aSJ44KP","IHlhbnM=","INil2YQ=","2K7YqNix","0YbQuNC60LvQvtC/0LXQtA==","zrnPjs69","z4PPhM+Bzr8=","IGJhbmth","IHNvxJ91aw==","IMO8bmzDvA==","6aKc","INix2YHYuQ==","55Cz","INGB0L7RgdGC0L7Rj9C90LjQuA==","zr3Ov869z4TOsc+C","INCw0LrRgtC4","IM+Azr/Ou8+F","INC80L7Rlw==","IOagvA==","57KX","INGB0LvRg9GH0LDQuQ==","7J287JeQ","INGC0YDQtdCx0YPQtdGC","IOWPguiAgw==","YW5nbA==","YW1paw==","IMSwxZ4=","5rmv","IMSRw6Fv","4Lil4Liw4LiE4Lij","0YHQvg==","wqBvYg==","IGtsaW0=","6IOG","7IOd7Zmc","44OR44Oz","LeCkrA==","INC60LDQtA==","4LmI4Liq4Liy4Lih4Liy4Lij4LiW","INmF2LPZhNmF2KfZhg==","57+w","IELDvHTDvG4=","IEtyYWo=","INC/0LXRgNGB0L8=","IGVuZXJq","44GV44Gb44KL","6L6+5Yiw","4KS+4KSK","INqv2LHZgdiq2YY=","0YjQutGD","INCf0LvQvg==","w61ueQ==","IEhyYQ==","INqG2YbYp9mG","IOC5hOC4l+C4og==","dmlzZWrDrWPDrQ==","27Pbsw==","INCc0ZbQvdGW0YHRgtC10YA=","4LmC4Lit","INiv2YfbjNiv","5q+U5L6L","z4POuc61z40=","x5A=","44CB44Gq","IOCkpOCkuA==","IMSwdA==","IOyghOyfgQ==","4LmA4LiI4Lij","IGVsZWt0cg==","IGTGsA==","4pSU","IOyDpA==","5Luu","4LiB4Liy4Lij4LmA4Lil","INC80YPQu9GM","IOW6pg==","IEh1eeG7h24=","0LLQtdC9","IGzGsOG7m2k=","IHByb3ZvenU=","0YPRgNGD","0YDRltGX","IMOnb2N1xJ8=","4Lix4LiQ4Lia4Liy4Lil","2YTZitmH","IFvigKZdLi4uCg==","5Y6f5aeL","IHNrbGFk","INiz2b7Yqtin2YXYqNix","IFRvbcOhxaE=","INiz2YjYp9mE","54Gt","44KT44Gp","0L3QsNC30L3QsNGH","IMSRxKlh","IHVkxJtsYXQ=","IOCkhuCkpuCkrg==","77ys","zrnOvc+M","acWfbGVyaQ==","xJDDonk=","INix2LPYp9mG2Yc=","2LnYp9mF","44O844OR44O8","IGRvcHJvdg==","INC80ZbRgdGC0L4=","77yl","0LXQu9GW0LM=","2KfYptiy","5LiN5LqG","INCQ0LvQtdC60YHQsNC90LTRgA==","INCy0YDQtdC80LXQvQ==","IGR2ZcWZZQ==","IGNo4bqjeQ==","IG90ZWw=","6IKv5a6a","INGD0YLQstC10YDQttC0","INCa0L7QvNC/","IOuCmOudvA==","INCy0ZbQtNCx0YPQstCw0ZTRgtGM0YHRjw==","44CB44CO","IGthcsWfxLFsxLFr","IGzhuqtu","54WZ","2Lnaqdiz","5byl","IHRlY3I=","IG5lb2Q=","5oiQ54K6","5YWl44KK","INCf0YDQvtC0","IM+Az4HOrA==","4Li34Lit4LiU","0YHRgtCw0YLQuA==","0LXQvdC+0Zc=","0YfQuNGB0Ls=","55yf5q2j","IOC4o+C4suC4hA==","0YPRgNC1","INi02KfZh9iv","2KfYudix","IOqyve2XmA==","4LiZ4LiE","44ON44Or","z4DOv8+FzrvOv8+C","IOCkruCkiA==","7Iqk7L2U","aXRlbG7DqQ==","5byA5pS+","542o","IHDFmWVjaA==","w7rEjWFzdA==","5aKT","IOW9sQ==","2YbYs9in2YY=","INC00LLQsNC0","INC40LTQtdGC","INC/0L7QtNC60LvRjtGH","7Yq567OE7Iuc","QsOgaQ==","xaFrdQ==","aWxlcmRlbg==","5Y+Y5b6X","64+Z7JWI","IHBvc3R1cG7Emw==","INC40YLQvtCz","IGTFr3ZvZHU=","c2l6bGlr","2YTYp9mG","6YKj56eN","INGH0LDRgdCw","5LiN5pat","INiu24zYp9io2KfZhg==","INin2YTYr9in2K4=","INGB0YLQvtGA0ZbQvQ==","IOy2nOyXsA==","5rKf","IGhyeQ==","IEfDnA==","IOyduOq1rA==","bGllZA==","INi52KfZhNmK2Kk=","INC/0YDQtdC00LLQsNGA","0LDQvdC90L7QuQ==","5Y+l6K+d","6aCT","67CU7J28","77yPLw==","INmF2K7Yqti12KfYqg==","656r","IMOnYWzEscWfbWFsYXLEsQ==","IHJlcHVibGlrYQ==","IOyz","4KS+KQ==","IOqxtOqwlQ==","IOqzteuPmQ==","6IWm","IOyEnOuhnA==","INC/0YDQvtCy0L7QtNC40YLRjA==","INC00LXQudGB0YLQstC40YLQtdC70YzQvdC+","dmXDpw==","2KvYp9mE","IGfDtnN0ZXJpcg==","xLFybGFy","INGB0LDQvNGL0Lw=","w6Fsbw==","6aKR5qyh","4KWI4KSX","2KfYr9mF","54yq","IFPhuqNu","IMOnxLE=","IGxldHk=","IHJlcHVibGljZQ==","5p2l6Ieq","IHbhur90","IGJpcmlr","IG1la3Q=","INin2YTZiNmB","IGppY2g=","5LiA6Kan","6Zyy5Ye6","IEhp4buHbg==","IGRp4buHdA==","INGF0YDQuNGB0YLQuA==","5Yia5omN","a2F0ZQ==","IGJhemVu","IHVyxI1pdMSb","IHVtb8W+xYh1amU=","6aGY44GE","L1HEkA==","IG1lbsWhw60=","z4POus61z4XOrg==","INGG0LXRgNC60L7Qsg==","IOi0rQ==","0L7QutGA0LDRgtC4","INGA0L7Qt9C6","zrHOvc6/z4U=","IHnDtm5ldGlj","IG9sbWFkYW4=","5Yac5Lia","IOuwlOuejA==","55Oc","0YjQsNC10YLRgdGP","INCa0L7RgdGC","INmF2LnYqg==","IOC4nuC4pQ==","INmF2KrZgdin2YjYqg==","44KJ44GP","6IiX","INiq2LnYsduM2YE=","6YmE6YGT","IHDDqcSNZQ==","7Lu1","INC/0L7QtNGA0LDQtw==","INCx0LDQvdC60YM=","xLBTxLA=","5qGQ","4LmC4Lij4LiE","INit2LDZgQ==","IOuj","0LvQuNC2","IOyCsOyXhQ==","INC/0YDQuNGH0LjQvdGL","INC90LDQt9C90LA=","44Oq44K544OI","7KCV67aA","z4PPhs6x","5aaD","INCz0L7Qu9C+0LLQuA==","65CY7JeI7Iq164uI64uk","IM61zr3PjM+C","44Kk44Oz44K/","IHNsdW4=","66C0","INGB0YPRidC10YHRgtCy0YPQtdGC","0LfQsNCx","5pu05Yqg","INCx0LvQsNCz0L7QtNCw0YDRjw==","IOuMgOq1rA==","6L6F","4Lir4Liy4LiB","IOaOpQ==","64yA66W8","5Lq657G7","amVtZQ==","5YiG5biD","7J6l7J2A","INC00L7Qv9C+0LzQvtCz0Lg=","7JmE66OM","b3N5","6Iux6ZuE","INmE2LM=","4KSu4KS5","IOC4geC4sw==","INiv2KfYtNiq2YY=","reygnA==","xLBuZw==","IFRoxrDhu51uZw==","7ZmA","0Y3RhA==","7ZW07JqU","INCc0ZbQtg==","0LXRgNGW0LPQsA==","IM614bw=","4LmB4Liq4LiH","44OA44Kk","IGNlc3R5","IHByw6F6ZA==","56ys5LiA5qyh","INmH2YXYs9ix","IHpldg==","wqBF","IEJlbGVkaXllc2k=","INC/0YDQvtC/0L7Qt9C4","IGFubGF5xLHFnw==","wqDZhQ==","INGA0LDRgdGB0YfQuNGC","INin2YTYo9mF2LHZitmD2YrYqQ==","IMW+ZW5h","ZGVuaXo=","IG5vY2k=","IHN0w6Fs","4Li44Lii","7KO87IaM","INC30LXRgA==","IOyGjOqwnA==","IGto4bqzbmc=","YXTEsWPEsQ==","xJvFvg==","INGH0YPRgtGM","IGPhuq11","INin2LfZhNin2Lk=","5rWF","IHN0cmF2","IFNhbmF5aQ==","INi32KjZig==","IGjEsXpsYQ==","z47Ovc6x","4KS/4KSc4KSy","2YXYrdmF2K8=","4Lia4LiB","IHZ6ZMOhbGVu","INGC0LDQutC40LzQuA==","44CC44Gd44GX44Gm","IGthbHA=","INC60L7QttC90L7Qs9C+","0KDCtQ==","2YTYudin2Kg=","INmF2YjZhg==","IOydvOydhA==","IOuwlOydtA==","IG1la2Fu","INis2KfZhdi5","INmG2YHYqg==","INin2YTYs9mF","0LvRi9GF","6IOM5pmv","IOqyg+uPhA==","IOyCtOyVhA==","eWTEsQ==","INC90LDQstC10YA=","5a2Q44Gv","bHVsdWs=","IGjhu5du","INi02YE=","INi52YTYqg==","4LiE4Lij4Liy4Lih","IM6az43PgA==","IOC5gOC4oeC4qeC4suC4ouC4mQ==","2YbYr9mC","INGD0YHRgtGA0LA=","IM6TzrXOvQ==","INCG0LLQsNC9","IFBob25n","5a6255qE","INCQ0LvQtdC60YE=","INC30LHQtdGA0ZbQsw==","IMWfYXJrxLE=","INi42LHZgduM2Ko=","INmF2LnZhtuM","INC70L7Qsg==","IOyCtg==","6IWQ","IOWvjA==","RVJH","INGB0YLQvtC40LzQvtGB0YLRjA==","xZlldA==","4KWJ4KSv","4LmI4Liy4Lij","INin2LHZiNm+2Kc=","INCx0YDQvtGB","INC+0YLQvdC+0YHRj9GC","IM6fzro=","0YbRjNC60LjQuQ==","z4rOug==","44GC44KK44G+44Gb44KT","INGD0L3QuNC6","IMSRaeG7g24=","IHbDvXprdW0=","IGjhu6k=","INmI2KfYqg==","IOW5s+aWuQ==","z4XOvA==","44KS5L2/","zrXOr8+EzrHOuQ==","5Lik5Lq6","IOWMuw==","0YDQsNGC0LjRgtGM","INin2YTYp9mG2Ko=","44Gu5Lq6","2LHYtA==","INCi0YPRgA==","cm7Emw==","5aSp5aSp","4Lih4Liy4Lij","IG9ydGFsYW1h","INC/0LXRgNC10L/QuNGB","IOyDneyCsA==","5b+G","7Ye0","77yM6K+l","6Yyi","z4DOsc6vzrQ=","INC80LXRgNC+0L/RgNC4","INCz0YDQsNCy","w5RuZw==","IOak","INin2YTYr9mI2YTYqQ==","INC+0YHRjA==","5aWU","IGfDvHZlbmxp","7ZWY7Iug","IOmK","6Z+z5qiC","IG1lZHlh","INio2YbYpw==","0LDQvNCw","IOOCreODow==","6Jel","bGFyxLFt","IFRp4bq/bmc=","aXlvcmxhcg==","77yi","5pSd","0ZbQudGB0YzQutC+0Zc=","IHlldGnFn3Rpcg==","INm+2LPYsQ==","44KJ44GX","wpo=","7IOk","4LiU4Liy4Lir","INiq2K3YtduM2YQ=","INCx0LXQvdC3","6YGj","INC90LDQsdC70Y4=","5L2T57O7","44Ov44Kk44OI","wqDCoCA=","5Lmm6K6w","IE3DvGhlbmRpcw==","cGxvcg==","bGF6","0LvRj9C70Lg=","IHBvbcOhaA==","INCx0LvQuNC2","INGH0LjRgdC70LA=","IHVieXRvdsOhbsOt","0YDQsNGC0L3Qvg==","IHRyxINt","INin2KjYsdin2Yc=","w6F0a2E=","IGnDp2luZGVraQ==","4Lix4Lia4LiZ","INin2YXbjNiv","bmF2ZQ==","ZWN1dA==","5bCx5Zyo","IHRyYWRp","2LfZhNmC","44Km44Kp","IGtodcO0bg==","7Iqk66Gc","z4TOrc+BzrE=","IM+DzrrOvw==","66eb","INmB2YbbjA==","4LmM4LmA4Lie","INin2YTYudi4","IHRow7Ru","6riw7J2Y","IOC4vw==","0YPRjtGC0YHRjw==","INmF2qnYp9mG","IOKXjg==","IOecgQ==","IOWNoQ==","INC/0LXRgNGI0LjQuQ==","IO2bhOuztA==","INii2LHYp9mF","44GM44GE","4Lii4Liy4LiZ","zrzOtc65","IE3DoXk=","IHrFrw==","IHBvZHBvcnU=","7Luo","0YHRgtGA0Lg=","z4DPhM+Jz4POtw==","0KTQmw==","5ZOq6YeM","INC/0LXRgNCy0YPRjg==","IHllcmluZGU=","INiy24zYqNin","IG9kc3RyYW4=","4KWA4KSX","INGA0ZbQt9C90ZY=","z4HOt8+Dzrc=","4oCM2KfZhNmF2YTZhNuM","2LnYp9iv","4KWN4KSq4KS3","0Z9O","772b","44O844Oc","6LSt5Lmw","IOyduOq4sOq4gA==","INmF24zYtNmI2K8=","INCx0LXQt9C+0L/QsNGB0L3QvtGB0YLQuA==","IM69zrXPhs6/zro=","44Gr44Go","INGG0LXRgNC60LLQuA==","2KrZgw==","IEjDoG5n","INmE2YTYsw==","IM69zrXPhs6/zrrOrM67z4XPiM63z4I=","cmFtYW4=","IHZ5dm9s","bmnEjQ==","2LHYp9mG2Yc=","IHBlxZ8=","44Or44Kv","5bSH","IGlta8Oibg==","5Yy755aX","IOCkquClnQ==","zqzOvc69zrfPgg==","INis24w=","IHByb2pl","IMO8bGtlbmlu","IEtldw==","INin2YTZhdmB","2KPZgw==","55m66KGo","IM60z4U=","IOWbveWutg==","IEtpxZ9pc2Vs","44Oz44Ks","IHpwcsOhdmE=","Vmnhu4dj","ZXJpZg==","IHN0csOhbmt5","6Zqg","6LyV","0LrQvtC3","IOCkuOCknA==","2YfYr9in2YE=","bG91Yg==","4Lig4Liy4Lie4Lii4LiZ4LiV4Lij","IO2VoOyduA==","IMSQw6Bv","INmG2KfYrduM2Yc=","KD0p","IMWeYW1waXlvbg==","IHBpxZ8=","INiw2Yc=","4KWv","INGB0YDQtdC00YHRgtCy0L4=","IOC5gOC4p+C4peC4sg==","INGH0YPQtg==","IHZlcmlsZXJp","INqp2KfYsdiq","0LDQstC4","IOCkleCksOCktQ==","IHJlc3RhdQ==","6rCc7JuU","INC80LjRgNC+0LI=","7LCu","IG7Em2pha8O9","IHNlc3Npeg==","2KfYodin2Ko=","INCX0LDRhQ==","0Y/RidC40YU=","0L/RgA==","INC/0L7QtNCw0LvRjA==","INC+0L/RgNC10LTQtdC70LjRgtGM","4KWt","INix2YE=","5bm456aP","4Ls=","IHbEm2RvbQ==","INGB0LLQuNC00LXRgtC10LvRjA==","IM6Tzr/PhQ==","xLFsxLHEn8SxeWxh","55m76Yyy","IOS4i+i3jA==","INC/0LvRjg==","0L3QvtC0","INij2KzZhA==","IOCkleCkpQ==","6YO95LiN","IHNlbmU=","IHDEmw==","6KiI5YqD","INCw0YPQtA==","INC+0LTQvdC+0Lw=","IOS4h+WFgw==","INmI2YXYpw==","INCU0YDRg9Cz","6LW344GT","0LLQsNGO0YLRgdGP","0LvQsNGC0YM=","INiq2YjZhg==","0YnQsNGP","zq7Ouw==","INCf0YDQsA==","INin2LPYqtix2KfYqg==","4Li04LiZ4LmA4LiU","4KWN4KSX4KSk","wqDQtw==","INC/0L7Qu9C+0YI=","5q6W","5qGG","IFNpc3RlbQ==","IHJ1a3U=","44OD44Kr44O8","INC+0LHRj9C30LDQvQ==","IGvDtsWf","IGFkxLFuxLE=","2LTZhdin2YTbjA==","bmHEjWVuw60=","IC7vvI8=","IOWumA==","IHRvcGx1bXNhbA==","6Kqk","INio2YfYqNmI2K8=","0YHRgtCy0LXQvdC90LDRjw==","INii2b4=","INis2YTYs9mH","44CA770=","5ZOt","5omA5bGe","5pKu","7KKA","IM61zrk=","7LmY66W8","IOqzvOyglQ==","dXVtbA==","zrTOrA==","INiy2K8=","7JuQ7J2E","IHbEm2PDrQ==","2K/Yqw==","IHNhbmtp","5YOP5piv","0LvQsNGA0LA=","7IKs7J20","44KP44KM44Gf","IMSRw7Nu","5ZCv5Yqo","IGdpw6BuaA==","IGvEsXJtxLF6xLE=","2K7ZhQ==","5pCN","5YiH44KK","44K144O844OT44K5","2YfYp9ix","2LDZg9ix","0L7RgNC+0Lc=","4KWI4KSC4KWkCgo=","IO2ZiO2OmOydtOyngA==","INmD2KjZitix2Kk=","0L3QuNC90LA=","7ZWY7Jqw","5byV55So6aKR5qyh","4KWu","INCx0LDRgtGM0LrRltCy","4Lif4Lit4Lij","4Li1Lg==","7KCd7Yq4","6ZiF6K+75qyh5pWw","IGl0aXI=","0YjQuNC9","IFbhuq15","54Ku","0LvQsNCz0L7QtA==","2LTZhtin2LM=","4buQ","INGP0LPQvtC0","IOykkeyVmQ==","2LHZiti3","IOyImO2WiQ==","IOS4gOiIrA==","INGF0LLQuNC70LjQvQ==","INCc0L7QttC90L4=","INC90LDRh9Cw0LvQtQ==","INC+0LTQvdC+0LI=","IMOcw6c=","0YbQuNC+0L3QvdGL0Lk=","IOyalQ==","5ryC","5bKz","2KrYr9mJ","zrrOt8+C","4oCZbmRh","77yQ77yQ","6KqJ","6aeF5b6S5q2p","INmB2LHYstmG2K8=","5YWs6Lev","zrHPg86vzrHPgg==","4Liy4LiT4Liy4LiI","65Gl","IM+Azr/OuQ==","INio2K/Yp9mG","0LrQsNC/","IOyeiOuKlOuNsA==","77yM5q2k","4Lib4Lij4Liw4LmC4Lii4LiK4LiZ","INqp2LTZiNix2YfYp9uM","4Li44Liq","44G544GN","INGB0LDQvNGL0Lk=","INC/0LvRjw==","INCx0LXQtA==","5Lq65omN","4Liq4Lir4Lij","4Li54LiV","IGt1bGxhbsSxbcSx","7ZWZ64WE","5rK755aX","44CC5LiN6L+H","5qOa","64Ko64+E","INii2KrYtA==","z4fOrc+C","IGZ1bmtjaQ==","0L3QvtC+0LHRgNCw0Lc=","4KWL4KSr","IGthcHM=","4Liy4Lip4LiO","KNi5","77yM5Yqg","4LmK4LiB","INmH2LQ=","INiv2LHZiNmG","INC80LXRhw==","INC/0YDQtdC20LTQtQ==","4LmI4Lii","INin2LHYtNiv","4Liy4LmA4Lil","5q+U6LyD","INiw2qnYsQ==","IOadoQ==","0Io=","0YPQutGA0LDRl9C9","2YrZhtin2Ko=","7KKL","0LTQuNGP","z4TPgc65","INCa0LDQtw==","2YLZhNin2YQ=","Xyws","INqG2Ko=","IOydvOyglQ==","INCf0YDQvtGE","5rOb","IGRydWjDvQ==","0YfRg9C6","bGVkaWs=","IGhleWVj","0YvQstCw0Ls=","IETDvG55","IOeZug==","IHDFmcOhdGVs","zrLOrM67","INi62LE=","64uo7LK0","7Juo65SU7Iuc","0YDQsNGJ0LXQvdC40Y8=","0L3RhtC40LrQu9C+0L/QtdC0","IHBvZG5pa2F0ZWw=","IOyLoOyehQ==","INmB2LHYog==","0LjQu9C40YHRjw==","IG9sdW1sdQ==","4KWN4KS34KSu4KSk","INmF2KrYrti12LU=","0LnQvtC8","2KTYp9mE","INCd0LDRgg==","7Jik64qU","IE3DvGTDvHJsw7zEn8O8","IEjDoG5o","INiz2KfYqNmC","77yJ55qE","IFF1w70=","bMOhZMOhbsOt","IOyatOuPmQ==","INCY0YU=","6Ku+","bMSxxJ/EsW7EsW4=","bGls","dcSN","INGH0LXQvNC/0ZbQvtC9","0YLQvtC2","IOS9mw==","0L3QuNGG0LU=","INC/0LXRgNCy0L7Qs9C+","INGB0L7QvA==","z4fPjg==","xZlpaw==","0LjRgtC10LvRjNGB0YLQstCw","IMSwa2k=","IGFza2VyaQ==","Y2lzaQ==","IGplZG7DrW0=","IHN0YW5pY2U=","6IKh56Wo","4Lic4Lih","VOG7qw==","xaFhaw==","z4TOr86x","0LzQsNC80Lg=","44GM5Ye6","zrzOv86v","0LzQsNGU","66Cl7J20","44KE44Gj44Gm","IOW8tQ==","2IwK","IMK7Cg==","2KfYrNin2Ko=","4b2z","5pmC44Gu","INC/0L7QutC+0Ls=","0ZbRgtC10YI=","IO2VtOqysA==","IGRlZGlt","INGC0LLQtdGA0LQ=","INC20LXQvdGJ0LjQvdCw","0LXQtNC40L3QuA==","INm+24zaqQ==","aXZlcnNpdGU=","INii2LPbjNin2Kg=","INGF0LDRgNCw0LrRgtC10YDQuNGB0YLQuNC60Lg=","INij2YbZh9in","INGD0LrRgNCw0ZfQvdGB0YzQutC+0Zc=","INin2K7YqtmE2KfZgQ==","IHRleg==","z4HOtc+F","IGtvbnVtdQ==","INGC0LXRhdC90ZY=","0LzRltCy","6Iqv","IM+DzrXOuw==","xKI=","zrzOuc+D","4Li14LmJCg==","IG1uZQ==","INC+0YLQstC10Yc=","IM6J","IOmHjg==","IGfhuqVw","INC/0YDQvtC00YPQutGC0Ys=","INCh0YDQtdC0","0ZbQu9C70Y8=","4Lia4Lit4LiB","IHTFmcOtZHk=","IHRo4buV","44OH44Kj44Ki","z4DOv865zrc=","zr3Otc65","5oiR5Lus55qE","IHByb2Zlc3lvbmVs","IFJha291","INCy0LjQtNC90L4=","IHpieQ==","INit2KfZhNuM","IOmjnw==","IEzDoG0=","INqv2LPYqg==","INCi0LjQvw==","zrjOuQ==","w6F2aXM=","2ZDYqA==","5Y+v6IO95oCn","INGB0LXQvNC10Lk=","44KJ44KM44Gm44GE44KL","7IOB7ZKI","IM6/z4U=","IOCkheCkl+CkuA==","0L7Qu9C+0Lw=","zrPOv869","INGB0LLRj9GJ","5pOm","z4PPhM63zrrOtQ==","6ICF55qE","LeCklQ==","0YLQuNC4","INCy0LjQt9C90LDRh9C10L3QvdGP","5Y+R5Ye6","0LTQsNGF","INC80L7RgNGP","5om+5Yiw","2YTZiNio","6IqZ","INGE0LDQutGC","5q+N5Lqy","aWRsbw==","IFN0YWQ=","0Y3QuQ==","7JuQ7J20","4KSP4KSo","5pW05Liq","IGbEsWs=","INmF2KfYqg==","z4DOv869","IOqyveq4sOuPhA==","IM6xzrQ=","IHZ6cG9t","IG7hu5Np","INmG2YLYp9i3","0L7QttC00LXQvdC40LU=","INC30LDQu9GW0Lc=","IHLhu6dp","6L6w","LjouOi46LjouOi46LjouOi46LjouOi46LjouOi46Ljo=","IE3DnA==","IGthcmk=","INGB0L7QsdGL","7Ja07KeE","2LHZitiz","dWJ1","INiu2YTZgQ==","2LjZudi3","5p2J","IOaZrumAmg==","INmF2YjYp9i32YbYqQ==","INGB0YLQsNC90YM=","IOq3uOuFgOydmA==","INmE2YPYsdip","IG9zbQ==","INGD0YDQvtC2","0LXQs9Cw","IGZlbHNl","5oCd6ICD","44CM44GI","INC90L7QstC40YU=","4LmQ","w7xtbA==","IO2UvO2VtA==","7J2867CY","IHTDvHLDvA==","INC80ZbRgdGC0ZY=","IGthxb5kw6k=","INmF2LPYrNiv","4bqlYw==","INmB2qnbjA==","IHlhc2Fs","5bCx566X","INC+0LHQu9C40YfRh9GP","INmE2K/Zig==","2KfYqNin2Ko=","INGB0L/QsNGB","6rWw7JqU","INC/0LDQtA==","INCx0YDQsNGC","6YeN5aSn","IGTDvHplbmxlbmVu","R8O8bg==","IGFwbGlrYWNl","4Lit4Lir","IOeF","INGB0L7RgdGC0L7QuNGC","6K+E5Lu3","IER1eQ==","2LfYp9mC","INC/0YDQuNC00LXRgtGB0Y8=","IHRvbGlr","IG9icm92","IHDFmWlwb2o=","IMSxxZ/EsQ==","2q/ZiNuM","5pyf5b6F","0LjQv9C70L7QvA==","IGluY2U=","INCh0L7QsQ==","0LXQvdGM0Y4=","6KeS6Imy","IOC4leC4ow==","IGLhuqFp","IOqwgOuKpe2VnA==","IGJsw616aw==","IHTDoWNo","INCy0LjQtNGL","0LjRh9C90LA=","IHZ5xb5hZA==","IOyGkOydhA==","INCd0ZbQvNC10Yc=","5Z+65LqO","INCa0YDQuA==","INi52LLbjNiy","dGlsZXI=","0LXQstGW","IG1vxb5ub3N0aQ==","2KjYp9iy","IOyCrOunnQ==","IHrFmWVqbcSb","7Zek","IMO8csO8bmxlcmk=","IM6gzrvOtw==","0LDQutC4","44KS6ZaL","YW5vdQ==","5Zu944Gu","IHlhxZ9hbmFu","INGB0LXQstC10YA=","IOapnw==","4Lih4Liy4LiB4Lih4Liy4Lii","IO2RnO2YhA==","4Lij4Liq","INi22LHYqNmH","IEV2ZXQ=","5oa2","INiv2YLbjNmC","INCy0L7Qt9C90LjQutC90L7Qsg==","7Jyg66i4","IO2RnOyLnA==","24zYtNmG","44OX44Op","0YLRjg==","2YjYs9uM","KeydtA==","6K+B5piO","44Gn44GN44G+44GZ","7IiY7J2Y","55aG","INmF2YHZh9mI2YU=","0L7Rh9Cw0YLQutGD","4KS+4KSy4KSV","5qGC","INC+0YXQvtGA0L7QvdC4","INin2LHYstuM2KfYqNuM","IOy1nOuMgA==","IHRob+G6o2k=","INCm0LXQvdGC0YDQsNC70Yw=","IOeVmQ==","4Lib4Lij4Liw4LmA4Lig4LiX","5rW35aSW","IMWedQ==","7Zmc64+Z","IGR2xJttYQ==","aXN0cm92c3R2w60=","IGFyYWPEsWzEscSfxLF5bGE=","IHRy4buZbg==","wrs6","7Yux","INmE24zarw==","LtCa","INmF2YLYp9uM2LPZhw==","INCy0LzRlg==","2LHZiNio","INin2YTYtNmF","IGRlbm7Emw==","0YPRh9Cw","5YW5","0YnQuNC8","IO2Kue2eiA==","INin2LPYqtin2YbYr9in2LHYrw==","4KWA4KSn","44K444Ki","4LmH4LmH","0LjRgdGB","IGthemFuw6c=","IHrDrXNrYWw=","5Zue5p2l","INC/0Y/RgtGM","IMSRw6Np","INmI2LHYrw==","IOyVlQ==","4Li44LiX4Lij","5Yqo54mp","IHB1Ymxpaw==","5oiQ5pys","5oiQ5ZGY","44Kk44Kv","2LTYsdmD2Kk=","4b+Gz4I=","IHlvbGE=","w7x5b3J1eg==","INC60YPRgNC4","INC/0L7RhdC+0LY=","IOygnOqwgA==","4KS/4KSv4KSk","2KfYptmE2Kk=","IOOBvg==","4KS84KWH4KSC","0YHRjNC60LjQvNC4","4oCc5L2g","aW1pemRl","7LWc7Iug","4bqs","6J8=","4LiE4Lij4Lit4Lia","44CA44CA44CAIOOAgCDjgIAg44CA","2KrYug==","IFbFoWVjaA==","4Lix4Lib4LiU4Liy4Lir","IGF0ZA==","0LLQvtGO","IHlhcMSxbQ==","b2xvZ2lja8Op","INC/0LvQtdC9","IGxhesSxbQ==","cnVuZw==","7ISc6rSA","IGppbsO9","IHRyw7Ju","INC/0L7Qu9GW0YLQuNC60Lg=","2KfZg9mF","2K/bjNqv2LE=","4KWI4KSCLgo=","INin2YfYrw==","IOODjQ==","INC/0YDQvtC00YPQutGC0L7Qsg==","5oKf","IHDFmcOtcGFkZWNo","IHphxI1hbGE=","5YWl44KM","INGA0ZbQstC90ZY=","5oSf5oOF","IM6nzrE=","7KO9","4Li04LiI4Liy4Lij4LiT","wqDQsQ==","0ZbRl9Cy","2KjYtA==","55qE6Zeu6aKY","IHphc3R1cA==","66Ck7JqU","44Gn44GZ44Gt","4oCM2K/Yp9mG","77yM5oKo","IHV2xJtkb20=","44Gm44KL","7IKs656M","bHVu","6ZuG5ZCI","66e5","IMW+aWQ=","4KSK","IHRycA==","0LvQtdC90LjRhQ==","77y/X18=","0JzQng==","5byL","zrvOrc6/zr0=","IMSRw7Jp","INC60YDQvtC6","bGF5xLFjxLE=","7Lac7J6l66eI7IKs7KeA","5ZGI","6Zye","INC/0L7Qs9C70Y/QtA==","2KrYsdmD","INiq2YHYp9mI2Ko=","IOWurg==","INiv2YjYsdio24zZhg==","5pS+5Zyo","INGB0LvRg9GH0LDQtdCy","IM+Fz4DOt8+BzrU=","66ee","44GZ44GZ","6rKg64uk","4Lij4Liy4Lii4LiB4Liy4Lij","IM+Az4HOuc69","INGB0LzQtdGI","5aeJ","IHbDvXNsZWRreQ==","IHBvdHZy","5Y+R6KGM","IHTDumk=","IOyCrOudvA==","56uZ5Zyo","IGpha8O9","IOC4muC4suC4hw==","IGRpa2thdGU=","INiv2LHYotmF2K8=","5o6S5ZCN","csOhbG7DrQ==","6rO87J2Y","5L21","0L7Qu9Cw0LM=","aXNpeWxl","IOa9","IOCkpOCkrg==","IGRpag==","IG5ow6FuaA==","IFJlaw==","6K6+5pa9","IHBvZG3DrW5law==","5bm25LiN","0LrRg9GC","IOqzoOugpA==","55qE5aOw6Z+z","5oiY5LqJ","0LTQsNGP","IOq0gOyLrA==","INGE0ZbQvdCw0L3RgQ==","IEvDtnk=","INC20LDQuw==","INGB0LvRg9C20LHQuA==","0LzQtdC90LA=","2KrZitin2LE=","INGH0LXQvNC/0LjQvtC9","z4DOuc+D","bGFuZMSxcm1h","bWFrdGFu","IOS4tg==","4LmI4Lit4Liq","IG3DvMWfdGVyaQ==","INi12YbYrw==","IGV0bWVzaQ==","INC/0L7RgNGC","zr3Ov869z4TOsc65","IOOFi+OFiw==","IEtBUg==","IHVjaA==","INiu2YTZgg==","4Liy4Lip4LiO4Lij","5q2h","INC40LzQtdC90Lg=","44Gg44GR44Gp","IOyLpOyLnA==","z4PPic+A","IOyj","dMSbxb4=","IMO2emVsbGlrbGVyaQ==","INio2b4=","INC40LfQvtCx0YDQsNC2","2YrZhdmD2YY=","IOODlA==","INCU0LjQsg==","INil2Yo=","2YPZitmE","IMWfaWs=","IOCkhuCklg==","bGFyxLFuxLF6xLE=","INCy0ZbQtNGA0ZbQtw==","INGA0L7QsdC+0YLQsA==","IHRhcmlm","INin2YjYqg==","xLFubWE=","6aOf44G5","IHV6YXbFmQ==","66O4","55uR552j","IDrvvLw=","zrjPhc69","4LiU4Lij","YWxhcsSxbmRhbg==","6Ieq5ouN","IHJvxI1uw60=","4KS+4KSH4KS1","INmD2YjYsQ==","IM+EzrHOuc69","INGW0L3QtNC40LI=","cnZl","IM69zrXPhs+Oz4POtc65z4I=","IGLhu5Fu","IOW/qw==","INGB0L7Qu9GM","bGnEn2luZGU=","4KS/4KSo4KSf","YWh0YXI=","IG5lYmV6cGXEjQ==","5pei54S2","IOuMgOyghA==","INmG2q/Zh9iv2KfYsduM","IHrDrXNrYXQ=","INC90LDQu9C40YfQuNC1","IGFrcw==","77yJ44CCCgo=","IHJvZGlueQ==","INC30LDRhdGW0LQ=","5b6u56yR","wqDQlNCw","cmFkdQ==","iW5o","cGxlcw==","IEtvbnM=","4Li04LmC4Lil","INin2YTZiNi1","5ZCs5Yiw","INGB0L/QvtGA0YLQuNCy","INGB0LDQudGC0LU=","INin2Lg=","bGFyxLFuZGFraQ==","IHThu5Vu","0J3Qhg==","IG5lZG9zdA==","INGC0L7RgNCz0ZbQsg==","INin24zYqg==","INin2K7Yqti12KfYtQ==","IMOceQ==","IFNhZGVjZQ==","INmF2K7YsdmI2Lc=","xIFu","w6dlc2k=","IOeK","44KC44Gj44Go","IOmfkw==","6LWW","INC/0L7Qu9GD0YfQtdC90LjRjw==","IOuY","4oCZ0Zc=","YsOtcg==","INCx0ZbQsdC70ZY=","IEThu7E=","0LbQtdC90LXRgA==","572R5YiK","IOCksuClnOCklQ==","INGD0YfQvdGW0LI=","6Iiw","IMOWxJ9yZW4=","IG9sYQ==","IOClpOKAnQoK","4Lij4Liw4LmA4Lia","4b2y","INix2LI=","0LXQuA==","0Y/Rh9C4","2K3YqA==","5pKk","44G+44Gf44Gv","0LHQuNC90LA=","IM6gzrXPgQ==","INC+0YLQvdC+0YHQuNGC0YHRjw==","5YmN55qE","IMWhxaU=","IHnEsWxkYQ==","Ojo6Ojp8","dXN0aWw=","2KfZhNil","IHNvdcSNYXNuw6k=","INmG24zYsdmI24w=","0YfQtdGB0LrQvtC1","2LjZgQ==","INm+24zYtNuM2YbZhw==","INi52YHYtA==","IHJvc3RsaW4=","572R5YiK5LiL6L295qyh5pWw","INC/0YDQuNCz0L7RgtC+0LLQuNGC0Yw=","44OM","INmI2YXYuQ==","IGJlY2Vy","IOOCsQ==","z4fOrs+C","0L7RgdGC0YPQvw==","IOuwnOunpA==","0ZbQudC90L7Qs9C+","IGhyZA==","INC/0YDQtdC/0LDRgNCw0YLRiw==","INmB2LHYtg==","IFR5dG8=","INC60YDQsNGX0L0=","INiy2KfYrw==","IGlrdGlkYXI=","7KeT","2ZHYsQ==","0YDRj9C00YM=","0LrRltC5","4pSj","INC60L7QttC4","INiq2KfYstmH","b2JlYw==","aW5hZQ==","IHZ5asOhZA==","INix2YHYqtmH","0KnQvg==","IEJ5bG8=","0L7RgtCy","INC00LXQvdGM0LPQuA==","6aeG","INC80LDRiNC40L0=","INij2Kw=","7LSI65Ox7ZWZ6rWQ","ZMSxxJ/EsW5kYQ==","0LHQsNGB","IOaguQ==","zpHOnc6k","2ZLYrQ==","IGplamljaMW+","7JeQ7ISc7J2Y","INCw0LTQttC1","IOyP","z4POv8+F","ZXRsZXJp","INio2LnYr9uM","IOyekOuPmeywqA==","4Li04LiN4LiN","IHRpc2s=","44O844K544OI","IOCkruCkpOCksuCkrA==","6rOE7ZqN","44Km44OI","IOC5gOC4oeC4leC4ow==","IG9wc2l5b24=","INGA0LDQstC90L4=","INio24zZhdmH","IOuovOyggA==","0LjRgtC10LvRjNC90YvQvA==","INC90ZbQsdC4","INC00LXRgdGP0YI=","INGB0LjRgtGD0LDRhtC40Lg=","0LXRgNGI0LU=","xL4=","4Li44LiV4Lij","IHnDtm5ldGltaQ==","6ZCY","INmF24zYqtmI2KfZhg==","INiy2YbYr9mH","44Ot44Oz","IEtCUw==","7ISc67mE7Iqk","77ug","ZWNrw6lobw==","INmC2KfYqNmE24zYqg==","44CC5LuK","w61uxJs=","INGB0LzQvtCz","INGB0LvRi9GI","2ZLZgQ==","cG/FmcOhZA==","0LXQu9GM0L3Qvg==","IM61zq/Ph86xzr0=","LdCf0LXRgtC10YDQsQ==","IENoaeG6v24=","w6lyeQ==","INGW0L3RgdGC0LjRgtGD0YI=","57uG6IOe","0YvRn04=","IHZ1YQ==","IOCkheCktg==","0YDQvtGB0YLQvg==","IHbFr8SNaQ==","67+Q","IGxp4buHdA==","IO2VtQ==","INin2YHYsQ==","IFRla25paw==","IHJvbGk=","INC/0L7Qv9GL0YI=","0LDRgtC60ZbQsg==","IMO8bml2ZXJzaXQ=","0LDRgtC+0YDRiw==","0Y7RidC40YXRgdGP","INiq2LY=","0LvRjtGH0LDQtdGC0YHRjw==","IO2WieuztQ==","IGF5csSxbnTEsWzEsQ==","INC60LjRgNC/","5ou8","64GU","0LvQsNGC0LA=","IGtob8Ohbg==","IGjDomzDog==","z4PPhQ==","0L7Qs9C70LDRgQ==","5o6l552A","6Z2p5ZG9","IHDFmWVi","4LmA4LiJ4Lil","INin2YTZhdmE2YTbjA==","5aCG","7Y+Q","4LiV4Lil4Lit4LiU","wrDQoQ==","7IKs656R","INCz0LjQsQ==","67KI7Ke4","5pS55Y+Y","6KGo546w","0LjRh9C10YHQutC40Lw=","4Liq4Lih4LmA4LiU","5bGF5rCR","wps=","IOyVhOydtOuUlA==","INC80LXQttC00YPQvdCw0YDQvtC0","IHllbQ==","IG3DvGw=","INin24zYs9iq","IOODtA==","4Lix4LiZ4LmE4LiU","4KWA4KSj","5YW25a6e","IGdlbGVuZWs=","67aB64+E","4LmJ4Liy4LiV","IOyJrA==","IM+Azq0=","INmD2KfZhdmE","INiq2LnZhduM2LE=","6Ki0","67mZ","aXlpbQ==","5bC/","6YKj5qC3","6rWt7J2Y","44GX44Gm44GK44KK","IG5pxb4=","IM66zr/OvQ==","4LmI4Liy4Lit","IM6zzrU=","INCh0LXQstC10YA=","ZWRpw6FsbsOt","44Gf44Gh44Gu","bWF5YWNhaw==","0Zk=","INGD0LPQuw==","IGthcGFz","0YPQstCw0LvQuNGB0Y8=","INC80LXRgdGP0YbQsA==","4buvdQ==","4Li04Lil4Lil","44KI44KK44KC","4KWH4KSj","IOWuog==","IGRlxJ9lcmxp","2YjYp9iy","4Li14Lit4Lii","IOWPiA==","IOC4lOC4ow==","INmG2KfYqA==","INiq2YTZiNuM2LLbjNmI2YY=","IG9sYW5sYXI=","5LyY56eA","2YPYp9mE","INC00LXRgdGP0YLQuA==","bcOhbg==","INGA0LDQvdGM","IOygnOy2nA==","6LOi","0LDQsdC+","IHRlY2huaWs=","IEtp4buDbQ==","dGVraQ==","4bk=","IG1uxJs=","IOqzteqwhA==","IE1law==","INin2LnYqtmF2KfYrw==","4LmM4LmE4LiU","zrXPgc+M","INGD0LTQsNGA","0L7Rh9GM","5qaC5b+1","0YDQsNC7","0LDQu9GM0L3Ri9C80Lg=","4KWB4KSw4KS4","csOhY2k=","INmC2YjZhA==","IOCkpuCktQ==","INC/0YDQsNCy0LTQsA==","IOW/hQ==","IGRvc3Vk","0L3Rg9GC0YzRgdGP","TsSDbQ==","4LiY4LiZ","IGRva3Vu","IOWcqOe6vw==","4Li54LmE","4buleQ==","INC90L7QstGL0YU=","IG1lenVu","IEPhuqdu","4LiB4Liy4Lij4Lie","IOyYiOyglQ==","z4POrg==","4LmI4LiZ4LmA4LiB4Lih","INmI2KfZhNiz","44Oz44OG44Kj","55yL6KeB","INiz2KfZhNmF","INCx0LDQs9Cw0YLRjNC+0YU=","IMSRw6Bp","INiv2LPYqtuM","4Lie4Lit","0LXQv9GC0Lg=","IOyghO2ZlA==","5pmC44Gr","IFNlem5hbQ==","0LzRltC90YM=","Oz8j","4KWA4KS44KSw","INqG24zYs9iq","zr3Ov865zrE=","4Lix4LiZ4Lit","IOC4hOC4sw==","IOuztO2YuA==","IGlkZGlh","IM6yzrnOsg==","6auY5Lit","2ag=","0JLQsNC2","INC40YHQv9C+0LvQvQ==","0YjRgtC+0LI=","IFRhxZ8=","7JuF","5Yq5","IOWPgw==","IHByb3N0b3J1","INGB0L/QsNC0","0LXRgNC40L3QsA==","IHDFmWVrbGFk","xaFvdg==","INmB2YfZhQ==","5oqR","INin2KjYqtiv2Kc=","44KS44GK","bGlrbGVy","INmF2KfZgw==","IGtvbnV0","INiv2KfZhti02KzZiNuM","INC+0L/RgtC40Lw=","INCx0YPQvNCw","INC70Y7QtNGP0Lw=","INC70ZbQutCw","INGA0L7Qt9C/0L7QstGW0LQ=","bmVzZW7DrQ==","IOC4oOC4suC4ng==","0LjRh9C90LjQuQ==","2KfYt9mE","0Y7RidC40LzQuA==","44GP44Go","6a2v","INis2YbYs9uM","0JjQog==","4KSw4KSy","INqp2YjYr9qp","0L7Qu9C40YI=","INGB0YLRgNGD0LrRgtGD0YA=","dmVraWxp","IOCkrOCkrw==","IGdlbG1pxZ8=","4KS/4KSw4KSr","INC90LDQudC60YDQsA==","INCU0LbQvtC9","IOODl+ODrQ==","IHlhxZ9sxLE=","IGthcsSxxZ90xLFy","IHbEm3TFoWlub3U=","IHZhemdlw6c=","4LmJ4Liy4LiE","bGVuZGlybWU=","IOeoiw==","6K+06K+d","IO2VhOyalO2VnA==","YcWZaWxv","IGxlxb7DrQ==","IEFtZXJpa2Fu","44KE44GZ","dmFqw61jw60=","0J3Qrw==","IOyXhOuniA==","IOWD","csOhbA==","IMOnYXk=","dHXEnw==","4Li44LiN4Liy4LiV","INGB0LvQuNCy","zr3Ov8+F","IE92","IENIUA==","IFplbcSb","IMSNZXNrw70=","IFRow6FuaA==","0LjRgtC10LvRjNC90L7RgdGC0Yw=","5oSP5LmJ","4KWN4KSw4KSu4KSj","INC00LjQsNC80LXRgg==","IGtsaW4=","INqp2LHbjA==","44Gn44Gv44Gq44GP","6aOv5bqX","IGvDqm5o","INGA0LDQvdGM0YjQtQ==","44KS44GX44Gf","INC/0YDQuNCx0L7RgA==","IOCkluCkpOCksA==","IHl1","6aeQ","INGA0LDQsdC+","INCh0KDQodCg","6Iqs","xb5pbGE=","0LXRgNGC0LA=","0LjRgdGC0YDQsA==","INC60L3QuNCz0Lg=","IEZyYW5jaWU=","INqY2KfZvg==","IM6azr/PhQ==","4Lix4Lin4LmA4Lit4LiH","IGzhuq9uZw==","INC90LDQvNC4","INC/0L7QtNC+0Lk=","0LTRgNC+0Lw=","b2J1cw==","0JLRltC9","IHN0YWxv","IOCkj+CknA==","IExpbmg=","ZWJpbGlyaXo=","INC30LDQstGC0YDQsA==","zrzOtc+Bzr8=","IM6tzr0=","0Y/RgtC90L4=","INC00L7RgNC+0LY=","5Y+C54Wn","z4POuc6/","4LmJ4LmA4LiB","YW7DvWNo","57eg","IOOAgOOAgOOAgOOAgOOAgOOAgOOAgOOAgOOAgOOAgOOAgOOAgA==","5Yqb55qE","IFPEsXI=","IOyngOuPhA==","57eK","IHBvxI10dQ==","77yM5LiO","5LiW57SA","0LXQvNC+0LPQvg==","IGh1c3Vz","IMO2bMOnw7xkZQ==","IHRy4bulYw==","4Lib4Lil4Lit4LiU4Lig","wqBwxZnDrQ==","IELDtmxnZXNp","0LzQvtC8","44Gr44Gm","IOyqveyngA==","xJt0xaE=","IOyEseqztQ==","4KSw4KSk","dXJkdQ==","IOybgOyngQ==","0Z/QrQ==","bsOta2Vt","IHNrdXRlxI1ub3N0aQ==","INC00LDRgg==","bmV1bQ==","INGC0LDQsdC70LXRgg==","anZ1","IHNlZG0=","2LPZitip","INC60L7RgNC+0LE=","ZW1tZWw=","44Gk44GR","6aaZ6JWJ","INi02K7YtduM2Ko=","5LiK5LqG","2YjYsdin","INCw0YLQvNC+0YE=","INC70LXQuQ==","IHpwcmF2","IOuVhQ==","4Li54LiX","INin2LPYsQ==","IEF5ZMSxbg==","INi52YXZhNmK2Kk=","INC00ZbRlA==","IGTDtms=","IOCkq+Cksg==","IOyCrOuejOuTpOydtA==","INC90LDRgtGD0YDQsNC70Yw=","5p+c","5rip5bqm","IGtsZXM=","INC40L3QstC10YHRgtC4","c8O8eg==","5pKw","IOOCouODqw==","IOiS","0LDQtNC60Lg=","IGtsw63EjQ==","z4fOtc6v","IFRoaeG6v3Q=","INiz2LHbjNi5","IM+AzrXPgc65zr/Ph86u","2YjZgtmB","z4HPic+Dzrc=","INiz2YTYp9mF2Ko=","65Ok64+E","IHZlxZllam7DqQ==","IHZpdGFt","INio2KfYstuM2q/YsQ==","INGA0LXRhtC10L/Rgg==","IOychO2VtOyEnA==","INij2YPYqNix","IGvDvHQ=","66+87KO8","IHTDqcW+","IOW8lQ==","0YfQsNGB0L3Qvg==","55qE5Zyw","IGFyY2hpdGVrdA==","INCx0LDQutGC0LXRgA==","IOOBjQ==","INC+0LTQtdGA0LY=","INiq2KzYp9ix24w=","6Z2I","IHJlY2Vw","6am2","INiv2YfZhw==","6LKM","57WQ5ama","xLFsxLHDpw==","44GL44KJ44Gv","5b+D6YeM","5oqV6LOH","6LKd","INC60YPQu9GM0YLRg9GA0Ys=","IOWwkQ==","4LmB4Lie4Lij","zrPOus+M","YXLEsW0=","INin2LPYp9iz24w=","IHBvc2xlZG7DrWNo","INmF2YXZhg==","INC/0L7Qt9C40YLQuNCy","7KCk","0YPQstCw0LLRgdGP","INis2LLYpg==","7J207J6Q","INC40L3RgdGC0YDRg9C6","IM63zrvOtc66","IGRlbWly","5Lit5paH5a2X5bmV","INi52KfYtNmC","INio2KfZhNmC","IG1heg==","zqzOvc65","IGTDvMSf","IM66z4HOsQ==","INCx0YPQtNGC0L4=","56aP5Yip","INC/0YDQtdC00L3QsNC30L3QsNGH","2YXZhNip","INCx0YPQtNC40L3QutGD","xaVhbg==","7ZWA","57S55LuL","2qnYsg==","INC60LDRhNC10LQ=","44Gr6KaL","4Liy4LiV4Lij4LiQ4Liy4LiZ","66Gc64qU","aXbEmw==","6IO95rqQ","77yM5YWo","INGD0LTQuNCy","IOunjOuCmA==","0JLQkA==","IEfDvHI=","INC00YDRg9Cz0LjQvA==","z4PPjQ==","IG/En2x1","IOqwgOq5jA==","INC30L3QsNGH0LjRgtC10LvRjNC90L4=","0L7Qt9GW","IG3hu7Fj","IEJlxZ8=","IGplemQ=","w6F2xJs=","z4TOt8+DzrU=","44Gm44GE44Gq44GE","INCh0LLRj9GC","IOCkruCktg==","IM6kzr/PhQ==","5aOw44KS","INGB0LDQvNC+0LU=","IOWMug==","IOyCrOuejOydgA==","INmF2YTYqg==","IGpva2Vy","IG5lb2I=","INGC0LDQutCw","INmH2YHYqg==","IM60zrXOtM6/","INC30LDRhdC+0L8=","INin2YTZhdiu2Ko=","0LXQt9C00LA=","IO2VnOuyiA==","INi52KfZhdip","IGRvc3RhdGU=","IHBsYXY=","5qW944GX","LjsuOy47Ljs=","0LLQsNGU","IGLhu6Vp","IMSR4buh","IG15c2zDrQ==","INmG2KfYsQ==","IG7DunQ=","INC80LDQu9Cw","zqTOoQ==","INin2YTYsdmF2LLZitip","bGFkxLFt","5LiA57eS","IGnFn8On","bGl2w6k=","66W06rKM","0LDQvdC90LDRjw==","2LjLhti3","IGThu6tuZw==","2YTZg9iq2LE=","562U5qGI","INmF2YjZgti524zYqg==","INGW0L3QvtC30LXQvA==","INC40YHRhw==","INC90LXQv9GA0LDQstC40LvRjA==","YmFrYW4=","IOeJiA==","0LXQvdC90Y4=","4LiH4LmA4Lio","4LiE4Lin4Liy4Lih4LiE","JS4K","4LmI4LmA4Lib","INii2KjbjA==","IHN0w6F0eQ==","INiq2LHYqtuM2Kg=","xI1lbcW+","IOm5","INmB2KfZhA==","IGJlbGlybGVu","IOKGmA==","6Iez5bCR","IEJ1bmxhcg==","IOS4kw==","INmF2K3Yp9iz","IOyEnOuyhA==","IGNhbmg=","INC/0YDQvtGC0Y/Qtg==","INC90ZbQvNC10YbRjA==","4KWI4KSg4KSV","64uJ","INC90LDQvdC10YE=","INCy0L7Qt9GA0LDRgdGC0LA=","IFvigKZdCg==","LuC4ng==","4Li04Lio4Liy4Liq4LiV4Lij","54G9","6rCZ7J2A","4Lil4LiH4LiX","44Kx44O844K5","IOOCouOCpA==","0YHRjg==","INmE2LE=","44GL44Gj44Gm","IOq4sOuwmA==","ICE6","INGB0Yo=","INi02YbYp9iz24w=","IOyVhOy5qA==","INi52KjYp9iz","IOC4leC4reC4mQ==","INC80LXRgtCw0LvQu9C4","0YjQuNC70LA=","IHBvZHJvYg==","0ZbRgdC90L4=","IOi1pA==","Y2lsZXI=","b3plbQ==","INC+0YHQvdC+0LLQvdGL0YU=","wqDgpJU=","4LiW4LiZ4LiZ","0LDQvdGC0LDQtg==","IETDrWt5","INqv2LDYp9ix24w=","5py65Lya","zr/Phc67zq/Ov8+F","0L7Rh9C10Lo=","INC90LDQv9C40YI=","INio24zYtNiq2LHbjA==","5L6N","INin2YTZhdmF","2YjYstmK2Lk=","IGfDtnpsZW0=","6LCD5pW0","wqBtaWxlcw==","IGtvYw==","4Lix4LiN4Lir","5rOz","IM6RzrPOsw==","INmG2YXYp9iy","4Li44LiX","44OP44Kk","IHRow7k=","0LrRg9C70Y8=","INC/0YPRgtC10Lw=","6Ie654Gj","IHZlcmdp","5aC05ZCI44Gv","INGC0YDRjNC+0YU=","IOuztOuptA==","4pay","z4XOsw==","INC00L7RgtGA0LjQvA==","5py1","IHVtxJtuw60=","6Imv44GE","wqDguJnguLLguIc=","0I7Ri9GfTg==","5LiJ5Liq","4Li14Lii4Lij4LiV","77yM5ZCM5pe2","INGA0L7Qt9GA0LDRhdGD0L0=","IERlcnM=","44Gq44Gu","IOq3uOulvA==","ZGlrbGVyaQ==","IGhheWF0YQ==","6KeE6IyD","57uT5ZCI","IHNjw6k=","IGPGoW0=","5a246Zmi","INCE0LI=","IMSNbMOhbmVr","INC00L7RgdGC0LjQsw==","4KS+4KSH4KS4","zrXPhc+Dzrc=","6YGp55So","z4POv869","xLFsbWFrdGFkxLFy","66qF7J2E","xLFi","IHN0YXLFocOt","IGNow61u","5LiA5Liq5Lq6","IEZyYW50acWhZWs=","bsSbamk=","77uo","INmE2YTYrw==","IHBva29q","IGppaA==","44CN44CC","INi52KjYr9in2YQ=","44KT44Gn44GE44KL","INC80L7QtNC10LvRjA==","IHRlxZ9raWw=","IMSMZXI=","4LmA4LiU4Lit4Lij","J25h","zrvOv86zzq4=","IGtvbGE=","44OA44O8","0LjRgtC10LvQtdC8","IM+Dz4XOvc6/","IEt1cnVt","IHNuYWRubw==","INin2YTZgtix2KLZhg==","IFbhu4E=","6auY44GE","IHnEsWxkxLF6","IGJpcmlzaQ==","IGtow7pj","2YjbjNmE","5pyA5L2z","IOC4quC4suC4gg==","INCf0L7Qug==","4omg","4LmC4Lib4Lij4LmB4LiB4Lij4Lih","4KWN4KSv4KSv4KSo","6JGh","IG5vdsSb","YXnEsXA=","IFNpbmdhcA==","6LCT","44K244Kk44Oz","INC90L7QstGL0LU=","IGjhuqNv","IOiXpA==","44Oz44OW","wqAKCg==","zrjOtc65zrE=","INC/0L7Qv9Cw0LTQsA==","IOuUlOyekOyduA==","INiv2KfYtNiq2YbYrw==","INi02YbYp9iu2KrZhw==","z4POvM6xz4TOsQ==","5bmz5pa55YWs6YeM","IGfDtmw=","0LXQutC+0YLQvtGA","IG3DoWxv","INin2KzYp9iy2Yc=","2qnYp9ix2KfZhg==","INC/0ZbQtNC/0YDQuNGU0LzRgdGC0LI=","5LiJ5bm0","INiz2YHbjNiv","IM68zq3Pgc6/z4I=","0JnQmQ==","IGjGsA==","2LPZiNio","INmE2LDYpw==","IG5lbW92aXQ=","IGTDrXY=","xLBz","wrbCtg==","IHBoxrDhu51uZw==","INmG2K3ZiNmH","0Is=","IHpieXQ=","ZWRpaQ==","bmVjaA==","INCw0LTQvNGW0L3RltGB0YLRgNCw0YLQuNCy","IG5ldsSb","INC+0LY=","IMSQw7M=","4Lib4Lij4Liw4Lin","IHZob2Ruw6k=","IHVtxJts","INGA0LDQt9C70LjRh9C90YvQtQ==","IHDFmWlyb3o=","INio2K7YtNuM","44Gu5aSn","INin2YTZg9mH","ZWNrw6E=","IHpvcnVubHU=","INCc0LjQutC+0LvQsA==","IGFtZWw=","0LrQvtCy0YvQtQ==","Ojo6Oi8=","5LiN5ZCM55qE","INmI2YPYp9mG2Ko=","4Lit4Lit","bMOhc2ls","INC/0YDQtdC00L/QvtC70LDQsw==","772x","IM69zrU=","INC90L7QstGL0Lk=","IOyYge2WpeydhA==","IOqwgOynhA==","5YOF","WUQ=","INio2KfYug==","INi02qnYs9iq","IGfDvG5leQ==","0LjRgdGM","44GL44Gq44GE","IFTDsmE=","INqv2LHYr9uM2K8=","2K3ZhA==","bHV2w60=","dsOpZA==","IOyYtw==","IM61z4DOsQ==","INGC0LjRgdGP0Yc=","IOq9gw==","IFBVUw==","INC00YPQvNC60YM=","IOKAnQo=","IOyKpO2PrOy4oA==","2YfZhw==","IGfhuq9uZw==","4Li04Lig4Liy4Lie","6YeM6Z2i","YnLEsXM=","IHrDoWI=","zrrOsc+C","IOWPjOe6vw==","4Lil4Lil","IMSQw6Bp","5a245qCh","INGA0LDRgdC/0YDQtdC0","INGB0YLQsNC90LXRgg==","INC70LDQug==","INC/0L7QtNC6","IGfDtnJlbg==","66W06rOg","INGE0YDRg9C60YI=","7ZOo7YSw","44GZ44KM44Gw","44KS5L2c","4Lit4Lit4LiB4LmB4Lia4Lia","IGt1bGFr","IO2UjOugiOydtA==","INit2K/Zitir","44GG44KT","INC80ZbQug==","4KSH4KS44KSV","INGD0YLQvtGH","INmD2KvZitix","IFlpbmU=","4Lix4Lin4Lir4LiZ","0L3RltGX","5Y2i","0YPRgdC70L7Qsg==","7JuM7YGs","IOCkheCklg==","INGG0ZbQutCw","7ISg7J2E","INij2LE=","0LPQsNC70YLQtdGA","YW5nbGlja3k=","INGB0L7RgdGD0LQ=","INGD0Y/Qsg==","INC/0YDQvtC00YPQutGG0ZbRlw==","IGNodWE=","IGTDoW4=","4KS+4KSu4KSX","2KbYqg==","INCk0LXQtA==","IGhyb20=","7ZW067O0","INii2YbZhNin24zZhg==","LdC/0YDQsNCy","IOykkeyalO2VnA==","INCy0LrRgw==","IOWkp+mYqg==","IHRlcms=","INC/0L7QtNGW0LE=","INCy0ZbQtNCy0ZbQtA==","4KWM4KSf","6LOj","INio2KrZhg==","INio2LnYttuM","44Gq44GK","5LuW5YCR","IHRhdnNpeWU=","IE3EsXPEsXI=","INil2LA=","IOaQ","7ZWY64KY","INmI2K4=","44CAIOOAgCDjgIAg44CAIOOAgCDjgIA=","IHRha292w70=","IOCkrOCkqOCkqA==","INC30YDQtdC90LjRjw==","INmI2YHZgg==","67mE7JWE","INC/0L7QvNC+0LbQtdGC","5YyX5biC","ZMSxa2xhcsSx","IOmTgQ==","IGFrdHXDoWxuw60=","INCy0LI=","44KC44Gq44GE","7Ya17Iug","z4TOsc+Dzrc=","IOyDgeuMgA==","IOagoQ==","44CC6YKj","INix2YjYs9uM2Yc=","IHRlbGV2aXp5b24=","5bm06b6E","INCR0L7RgNC40YE=","66as7Ja0","IHp2ZcWZZWo=","0LbQvdC+","INCe0YHRgg==","INC80YPQttGH0LjQvQ==","IHllxZ9pbA==","INCh0L7QstC10YI=","IELDlkw=","INCi0LDQutC+0LY=","IG9ibm92","INC/0YDQuNC90LDQtNC70LXQtg==","INCy0LjRgdC90L7Qsg==","2LfZhQ==","IOyXhuyWtA==","IE3DuWE=","5L2P5a6F","5Yy75a2m","INC90LDRgNC10Lc=","44OL776G","IE3hurd0","IHZ1w7RuZw==","5LiA5Yy6","IOG6om5o","0YDQuNGE","5L+d6Zmp","IM+Hz4HOrs+Dzrc=","5ZCM5oSP","IOaJkw==","ZXTEmw==","INmI2LDZhNmD","INGC0LjQtg==","IM6fzrnOus6/","INC80ZbRgdGG0ZY=","INGA0LXQsdC10L3QvtC6","IMWeYWg=","2LnZhNmI2YU=","bGFkxLHEnw==","IGdpZGVu","0LvQuNCy0L7RgdGC0ZY=","2ZLYsw==","IFRIQg==","IG1lc2xlaw==","wqDQndC1","zrzPhs+Jzr3OsQ==","INmI2KfYrA==","0L3QsNGB0LvRltC0","5pif5pyf","0JTQtg==","INGA0LDQsdC+0YLQsNC10YI=","IHPDoW5o","7Jqw66as","INin2KjZiA==","55qE5oOF","IOyZuOq1rQ==","IGthYmls","0LXRgNCy0YvQtQ==","IGdpw6B1","IHThu48=","wqDQkQ==","5a6M5pW0","IG11xb7Frw==","IHBvbcSbcm7Emw==","INmF2K7YtdmI2LU=","INCU0LXQvA==","44KP44KM44KL","INC/0YDQuNCx0Ys=","INqp2KfZhdm+24w=","77yt","IHRyaA==","INCR0L7Qu9GM0Yg=","wrQ6","0LjQstCw0LXRgtGB0Y8=","IOyCrO2VrQ==","6L+b5LiA5q2l","0YbQtdC5","44G+44Ga","0LDRgtC10LvQtdC8","6Yyv","IMW+YWxvYg==","0YbQtdC3","0LjQvdGD0LI=","IHZlcnpl","5Zue5Yiw","IGTGsOG7o2M=","2KfYptmK2YQ=","c3RvdXBpbA==","6K665paH","INCf0LDRgNC4","INC00LXQutC+0YDQsNGC0LjQsg==","2KfYrtiq24w=","INGB0YLRgNC10Lw=","44O74pSB44O74pSB44O74pSB44O74pSB","INGB0LDQvNC+0Lk=","0YfRgtC+","7IOB64u0","4omk","0YLQvtCz0L4=","65Co","xLFsYWNhaw==","5Lit44Gr","IM+Fz4DOrM+Bz4fOv8+Fzr0=","INCy0ZbQtNCx0YM=","546755KD","INCy0L/QtdGA0LXQtA==","IFBsemXFiA==","2q/Yp9io","4LmA4Lio4Lij4Lip4LiQ","77yM5pyA","2YXZhtuM","54Wn54mH","55uu5b2V","0YDQuNGC0YLRjw==","4oCM2KfYtA==","IOuMgO2ajA==","IMWZYWR1","LdGC0LXRhQ==","INmK2Yg=","IOC5geC4ng==","2KfZg9mG","IOq4sOyekA==","INCz0ZbQtA==","IOyasOumrOuKlA==","2LTZhdin2LHbjA==","IHRpY2FyaQ==","4pGi","INin2YTYqNiv","INGA0LDRgdGH","INin2YTbjA==","IHPDvHJlZGU=","INin2LnYqtix","INC/0L7QvdGP0YLRjA==","zrPOus6/","77yM5q+U","IFNlYg==","IOyLoOq3nA==","5pS255uK","INm+24zYtNmG2YfYp9iv","zpzOkc6k","67CU7J20","5L6b5bqU","0LHQuNC9","5Lq65rCX","44GP44KJ","IHNrdsSbbA==","IOuTseyepQ==","5ouF5b2T","IGlta2Fu","5pmo","77yM546w5Zyo","IHNyZGNl","7IKw7JeF","INC80L7QtNC10LvQuA==","5pys5b2T44Gr","0LDQvdC60LA=","IHnDvHLDvHk=","INC+0YfQtdCy0LjQtA==","INit2LPZitmG","0YnQsNGO0YI=","bMOpZGw=","0YbQvg==","IGPDrXNh","44GL44GR","6JeN","INiu2YjYp9mH2YbYrw==","IG11xb5l","INC90LDQutC+0L8=","ZGnEn2luaQ==","ZXJzZW5peg==","INC/0YDQsNGG0ZbQstC90LjQutGW0LI=","0LTQu9GP","IM6xz4PPhA==","5raI6LS5","IOiogA==","IGLDoXQ=","INi02YPZhA==","INGB0L/QuNGA","z4DOv8+EzrU=","INiz2KfZhNmH","ZWtpbA==","4LmB4LiK4Lih","IM+Dz4TOuQ==","INmF2LfZhNio","IOygleyxhQ==","6rSA6rOE","5bm557ea","IOS6rA==","6YCa6YGO","INiv24zar9ix2KfZhg==","INij2YXYpw==","5piv5LiN","IOuMgOuLtQ==","IEVyaw==","cGVydHk=","INC90LDRh9C40L3QsNC10YI=","IOq3uOumrA==","66Oh","IOybueyCrOydtO2KuA==","4KS+4KSw4KSo","5oSP6K+G","INCh0J8=","INio2KfZitiv","IGJha8SxbcSxbmRhbg==","L1RU","INmB2KfYtdmE2Yc=","INmF2KvZhNin","INC60LLQsNC0","INi02KfbjNiv","IHXEjWl0ZWw=","54i9","INi52LHYttmH","IOS6pA==","INGH0LXRgdGC0Yw=","4KWIPwo=","INiu2KfZhtmF","ZXRpeWxl","IM61zrPOus6x","INGB0YPRidC1","IOydvOyWtA==","INCb0LXQvdC4","IOWjsA==","w6FsaWU=","44Oh44O844K4","4KWA4KSk4KSw","0LPQsNC70ZY=","INC80ZbQvdGW0Lw=","IEXFnw==","INC/0YDQvtC40LfQvtGI","0J3QsNGB","INio2YbbjA==","6K6p5oiR","INC/0L7RgdGC0LXQvw==","IOyalOq1rA==","xLFsxLFw","INis2YjYsQ==","IOuMgOu2gOu2hA==","4LmH4LiV4Liy4Lih","INGE0LDRgQ==","IOygleq3nA==","0LvQsNC80LXQvdGC","xJ9lbg==","4KWH4KSCCgo=","INCY0LLQsNC90L7Qsg==","INit2qnZhQ==","IO++mg==","77y7","IG5ldmlk","INC70LDQsdC+0YDQsNGC0L7RgA==","4Lie4Lii4Liy4Lia4Liy4Lil","IGVkaXlvcnVt","IGhsYXZ5","IEV2cm9wc2vDqQ==","IHBow6Fp","44OT44O8","6rSR7Jet7Iuc","5Lqc","2K3Yr9in2Ks=","INC/0YDQvtGE0LjQu9Cw0LrRgtC4","cm9zdMWZZWQ=","INC80LDQu9GM","IG3DvGTDvHI=","2KfYs9in2LM=","INCz0LDQu9GD0LfRlg==","4Li14Lif","INi62LDYp9uM24w=","5a2Q5L6b","IGJhaHNlZA==","IEtyw6Fsb3bDqQ==","5Y27","ICUs","572X5pav","65o=","IOeR","IM6czrXPhM6x","INCt0YLQuA==","IO2Gte2VqQ==","INin2qnYqtio2LE=","IG3Em3PDrWNl","7IiY66Gc","0YTRltC6","INCS0L7Qtw==","0YfQtdGB0LrQuNC8","7Jq065Oc","IG7DoWtsYWR5","INC/0L7RgtGA0LDQvw==","INGA0YPQutCw0YU=","zrnOu86/","IEfDvGw=","66mY","4LmJ4Lii","bWFrdA==","44Oz44OQ44O8","INC90ZbRjw==","INC+0YLRgtC10L0=","bWVzaW5pbg==","INCy0YHQv9C+0Lw=","IOydtOuKlA==","ZHlieQ==","44K/44Oz","4peO","4LmJ4Liy4Lir4LiZ","2KfYr9qv24w=","z4fOr86x","IHNuYcW+w60=","IOCkmuCklQ==","zrzOrs68zrE=","INmD2LE=","IM66zr/OuQ==","6YC4","IG5ldXN0","INmG2LjYp9mF24w=","5Y2a54mp","IOuyvQ==","4b2x","IOy2nOyLnA==","IGFybcOh","INmH2YXaqdin2LHbjA==","55qE5oOF5Ya1","2YLYp9mF","2YLYqA==","IOmCow==","IOunoQ==","IG9sYXPEsQ==","zrLOrc+B","5L2V44GL","INGD0YfQtdCx","INCy0YPQtw==","INio2LHar9iy","J3lp","INC/0YDQsNC30LQ=","INCe0YDQsw==","IOW5tg==","INGB0LLQuA==","INmF24zYr9in2YY=","IG5hxaFlaG8=","IEJBxZ4=","5buK","zIg=","44GT44Gd","4LmH4LiZ4Lie","zr/Pgc61zrnOvw==","INCx0LDQs9Cw0YI=","zrPOtc65","zrzOtc6vzr8=","4LmI4LiH4LiK4Liy4LiV","IEhpem1ldGxlcmk=","IEFmcmlrYQ==","IHRlZGJpcg==","LO+9pA==","5LiJ57qn","0I7Ri9GfTtCO0YvRn04=","INCa0YDRltC8","IGFyYXk=","IGLDtnlsZWNl","0LrQvtGC","6Zmw","5Zu96Zqb","dMSbbA==","IHBvbGlz","IHV2b2w=","IOyImOqwlQ==","55S16ISR","IHNhbWk=","INi02KfYrtmH","INCy0YHRjNC+0LPQvg==","INit2K/Yp9mC2YQ=","IGlrZW4=","44Kv44Op44OW","IHrDoXZvZA==","4KSs4KSy","67Cw7Iah","6YeH6LSt","66Cs","IOClpAoK","IOqwgeqwgQ==","INC80LDQug==","z4HOsc+Dzrc=","IGnFn2xlbWk=","44GX44Gm44GE44G+44GZ","IFBlaw==","0Y7QvQ==","IHZlbGtvdQ==","5Yqe55CG","5a6D5Lus","IOiQrA==","INC90LDRgNC+0LTRgw==","IGNow7M=","IEhpw6c=","27PbtQ==","IOC4o+C4reC4mg==","27Pbtg==","4LiC4Lin","5L2N5pa8","INCh0YLQsA==","4Lix4LiZ4Lih","4KS+4KSq4KSV","INGD0YDQvtC6","44Ki44Oh44Oq44Kr","INC30LzQvtC2","c2vDqW11","IOi7ig==","INin2K7YqtuM2KfYsQ==","IFDFmA==","0LvRj9Cy","INC80LDQtw==","IMO2emVsbGnEn2k=","5ZG844Gw","IGJpcmluaW4=","INC+0LTQvdC1","zIY=","5LuW44Gu","5bu656+J","0L/QvtGB0LXRgNC10LQ=","4Lir4Lil4LiU","5aSa44GE","z4TOrs+DzrXOuc+C","INix2YjZhtiv","6IG9","7KSR7JeQ","7Iqk7Yuw","INC30LLRltGC","INCw0YDRgtC4","IGPGsOG7nWk=","xLFuZMSxcg==","INCz0L7Qu9C+0LQ=","2KfYstiv","4LmI4Liy4Lin4Lin","44Oh44Op","2LnZhtmI2KfZhg==","JSkK","INGF0L7Qu9C+0LTQuNC70Yw=","5Lq65Lus","Q8Sw","0JfQsNC/","IHDFmWlzcA==","IGR1cnVtbGFyZGE=","0YDRltC0","wqDQow==","IM61z4bOsc+B","IHNwcmF2","INC+0YLRgNC40LzQsNC90L3Rjw==","77yM5rKh5pyJ","0L7QstCw0LvQsA==","IG5n4bqhaQ==","44CC5aSn","INC00LDQtdGC","IHDDrXNlbQ==","0YbRj9GC0Yw=","b3ZuxJs=","66aJ","IOqygQ==","0YHRgtC40L0=","IFNhecSx","44CL55qE","IHlvbHV5bGE=","0LXQu9C10YTQvtC9","IHLDoW5v","IO2WieuPmQ==","INin2YTYrtin2YXYs9ip","INC/0L7QstC40L3QvdCw","xZlpbGE=","IOCkmuCksOCkow==","INio2LHar9iy2KfYsQ==","7Jq0642w","4LmA4Lib4Lit4Lij","IGRhbGVrbw==","bGVkbsOt","5ZCN56ix","0LvQuNCy0ZbRgdGC0Yw=","IOuquOydhA==","0L7RgNGW0LI=","0KbQtQ==","2KjYr9ij","67CY6riw","a3LDoXQ=","5LiN6Laz","IG9sZHVrbGFyxLE=","bGVuaXlvcg==","IOyLnO2WiQ==","INC/0YDQuNC90LjQvNCw0YLRjA==","4LiC4Lit4LiH4Lij","z4jOtc65","IOG6qW4=","2KrYsw==","INGC0LDQuQ==","INC90LXQstC+0LfQvNC+0LbQvdC+","5Y+K44Gz","cm90aQ==","772t","0LTQvtC8","0L7QudC90L4=","5aOK","6K+055qE","IHNrb3Jv","bmnEjW7DrQ==","IFByb2Zlcw==","INGF0YDQvtC90LjRh9C10YE=","IOyjvOusuA==","IFpu","INGB0LvQvtC5","zqDPgc6/","5oyH5pWw","INC/0LXRgNC10Yg=","4KWB4KSV4KS4","IOqwgOyglQ==","IO2VmOuptA==","27Hbudu0","0LrRg9C7","2YrZhNin","INiv2YjYqNin2LHZhw==","fGw=","INCc0YM=","0L3QuNC70LA=","44Gm44GE44G+44GZ","bWFjxLE=","44Gf44Gh44Gv","INin2YTZg9iq2KfYqA==","56e75YuV","zrvOvA==","X++8jw==","IOqwgOyehQ==","6IW+","INC/0YDQtdC30LjQtNC10L3Rgg==","IOu2hOyVvA==","YWh5","xaFldMWZZW7DrQ==","6ZO6","IHDFmcOtcm8=","0JXQog==","IOyalOyyrQ==","IG1vaGxv","5b+D55CG","IHZ5c29rw6k=","JnV1bWw=","z4TOuc66zrE=","7JeF7LK0","44Gn44GC","4Lij4Liy4Lii4LiH4Liy4LiZ","IHDFmcOtc3DEm3Y=","IGV0bWnFn3Rpcg==","5aW55Lus","z4DOu86x","4bupYQ==","IOivtA==","INGB0L7RgdC10LQ=","5YeJ","INCg0LU=","5Y6f5p2l","INCQ0YDRhQ==","2KjZitmG","5Zyw6K+0","IMO2cnQ=","IM6jzrXPgA==","wq3Zh9in24w=","INin2YTYp9mC2KrYtQ==","5bC9566h","0YLRi9C5","dGFpbnM=","2YDZhA==","56eR5oqA5pyJ6ZmQ5YWs5Y+4","5o+u","4Lix4LiV4LiW","4buXbmc=","4Lil4Liy4LiU","5pqu","INmG2YHYs9mH","IOeciw==","IOOBvw==","IHRhcsSxbQ==","27Hbudu1","IM6K","IGtvbXBsZXg=","IE5oxKk=","6LS555So","INqp2KfYsdio2LHYp9mG","xYhvdsOhbsOt","IGvFrw==","0LTQsNC/","zpXOpw==","6re4656Y","IGTDtm5kw7w=","5Lq65ZOh","IFRp4buDdQ==","INmI24zYsdin24zYtA==","IMO2bmfDtnI=","INmI2LrZitix","INGB0LrRgNGL","4oCQJw==","INC90LXQvNGD","IEjhu4c=","IGTDvHplbmxp","IHNvdXTEm8W+ZQ==","44CB44Oe","z4TOv868zrE=","xJtsw60=","INij2YTZhdin2YY=","56Cy","IHRyw6A=","IOS4lueVjA==","YXnEsXo=","xLFtbMSx","INin2YTYo9mB","7ZWY64qU642w","0LLQsNC90L4=","IHDFmWnEjWVtxb4=","2YPZitio","INC80LDRgtC10LzQsNGC0Lg=","0LzQtdC90Lg=","INC/0YDQvtC10LrRgtGD","4Li14LmC4Lit","0L7Rgw==","INin2YTYtNix2YPYqQ==","5rOj","2YjZgtmK2Ko=","0YjQuNCy","IHBlcnNvbmVs","2LTYqtix","4LiU4Liy","IOuqvQ==","5Z2Q5Zyo","0L7QutC1","IOuniOuylQ==","INij2YbYpw==","66C1","INmF2KjYp9mG24w=","6Iu55p6c","IOC4qOC4ow==","INCb0YPRhw==","zp/Opc6j","IMSNw6E=","44Gb44Gm","IGvEscWf","0YjQtdCy","5oyH5a+8","4LmB4Lil4Liw4Lih","IHZvbGVi","INGB0LjQu9GL","IGRydWhvdQ==","IOywrA==","IOyeiOydjA==","zqXOow==","5LiN5a6J","IOyXhuydjA==","IGRldGVybQ==","INin2YTZhdi52YTZiNmF2KfYqg==","7Zi5","4pmh","4KWN4KSs4KSo","INiu2LTaqQ==","IE5vdsOh","INGE0YPQvdC00LDQvNC10L3Rgg==","INC/0YDQvtCz0YDQsNC80Lg=","INi52YTZitmD","4KWkCgo=","IHZlcml5b3I=","INGU0LI=","IOyeiOuLpOqzoA==","INin2YTYo9mF2LHZitmD2Yo=","IOWklumDqOODquODs+OCrw==","IOS/rg==","INC/0YPRgtC4","IM6/z4HOsw==","INC+0YHQvdC+0LLQvdC+0Lw=","INC90LDRgNGD0LY=","INC80LjRgNC1","b3bEm3Q=","IO2DkA==","IHNva2Fr","IHNwb2x1cHLDoWNp","0JTQmg==","IOWY","4paN4paN4paN4paN4paN4paN4paN4paN4paN4paN4paN4paN4paN4paN4paN4paN","IMKgwqDCoMKg","IGhhecSxcg==","IOyZlA==","5oKo55qE","5oy6","IOuvvOyjvA==","IGhvdGVsdQ==","4Li14Lic","7J6Q64+Z","5Ly855qE","zq3Ovc+Ez4HOvw==","2LTZiA==","IOmk","IM67zrk=","IG9sbWFrdGFkxLFy","INC+0YHQstC10Yk=","INCy0LjQvdCw","INiu2KfYtdip","cmFuYQ==","zrPPgc6xz4bOrg==","0YbQtdGB","IGRvxJ9ydWx0","INmC2LHYp9ix2K/Yp9iv","INCa0LDQuw==","6rK97KCc","z4fPjA==","0YPRjtGJ0LjQuQ==","64uY7J20","64w=","0LvQsNC3","IG5n4burbmc=","aXNrdQ==","7ISg6rGw","INGN0LvQtdC60YLRgNC+0L0=","IFZvag==","0L3Rj9C80Lg=","INmI2KPZhg==","5Lqt","57uf6K6h","IMWfacWf","44CN55qE","5q2v","INC60L7Qu9C70LXQug==","INC00LLQuNC2","IG7hu61h","xI1hc8Ot","IHNvbnU=","INC80LXRhdCw0L3RltC3","xb5lbsO9","INC30LDRgdGC0YPQvw==","6rSA66Co","INGC0L7QstCw0YDRltCy","IOy8gOydtA==","4KWB4KSX4KSk","IHrDoXNvYg==","0LzQvtCy0ZbRgA==","dWZhYw==","xa9sZcW+aXQ=","INCy0LjQs9C+0YLQvtCy","INin2YTZhtmI","INi52KfZhdin","5oGo","IOydtOuvuOyngA==","IHR2b8WZ","IHZ5dcW+aXTDrQ==","IGdlbGnFn2lt","7LOk64uk","4Lir4LiZ4Lit4LiH","IOyduOyglQ==","4KWN4KSm4KSw","INC/0LXRgNC10LTQsA==","INC30LTRltC50YHQvdC10L3QvdGP","2YbYuQ==","6KGj5pyN","IGxvYQ==","7ZmI","6Iux5Zu9","IERydWg=","2K7Yp9mG","0LTQsNC8","0LDRgtC10LvRjNC90YvRhQ==","zrjPgc+O","INij2YXYsQ==","IMWZYWRh","IGJ1bHXFnw==","INGC0YDQsNC90YHQv9C+0YA=","INmC2KrZhA==","IFRhcmlm","UnVz","INC30LDRgdGW0LQ=","IMSwaA==","bGV5aW4=","IHZ5csOh","IETEmw==","0LjQsdC70Lg=","YXZvdQ==","INCT0LXRgNC8","0L3QtdC80YM=","INC60L7QvdGG0LXQvw==","INmC2KfYr9ix","IHNvdWJvcg==","IGzhu5Fp","IOe1kA==","0LvQtdC90L3Ri9C5","zrrPhQ==","INC00L7Qv9C+0LzQsNCz","4Lie4Lin4LiB4LmA4LiC","IHF1YW5n","INi32YTYpw==","IOmHjA==","INmG2YXZiNiv2KfYsQ==","IMWfYXI=","INGB0L/RltC7","wq1u","7KeA7JqU","5YmN5b6A","5YWz6ZSu","5a6e5Zyo","6Z+z5qW9","INmF2LPYptmE2Yc=","IHllbWU=","INGI0LDRhQ==","6riw7Iig","IOC4quC4s+C4mQ==","INmI2LHYsti024w=","44GX44Gf44KJ","zq/Pg8+J","0L7QutC+0L0=","44Gf44KJ","INil2YTZitmH","INii2LDYsdio","IHLhu51p","IG9kYWs=","INC80L7Qs9GD","INqv2YY=","6LK8","ZWRsYQ==","INC+0L/Ri9GC","bGFtYWt0YWTEsXI=","5bC85Lqa","6YO95Lya","IM6YzrXPg8+DzrE=","INCy0L7Qsw==","57uI5LqO","INGD0YDQvtCy0L3QtQ==","IHZsYWs=","INii2YTYqQ==","IM61zrnOtA==","4oc=","0LTRg9GC","0ZbQvdCz","INij2YXYsdmK2YPZig==","2KfYstmG2K8=","INio2KfZhNij","IOCkpOCkqA==","IGtheWRldA==","65+s66as","IGRyxb4=","INC/0LXQvdGB","IHDFmcOtxI0=","INCi0L7Qu9GM0LrQvg==","INCx0LDRgtCw0YA=","6ZOB6Lev","INm+24zahg==","IM6TzrXPiQ==","IM6xz4XPhM6s","xJ5J","INCw0LrRgtC40LLQvdC+","zpfOnM6R","IHZhcmzEsWs=","IOWPqg==","INC30LDRidC40YLRiw==","0LvQuNC8","INmF2LTYp9mH2K/YqQ==","0LjQutC+0Lw=","IOyhsOyCrA==","0L7Qs9C10L0=","IG3huqV5","Z2lp","6JuH","INiu2YjbjNi0","IG5vdsOh","0LrQvtCy0L7QuQ==","IGthbsSxdA==","6Z2i6K6u","INix2YjYs9iq2Kc=","7Ja06rCA","INC+0YLQvdC+0YjQtdC90LjRjw==","IGhvZG5vdHk=","2YjYsdin2Ko=","IHDFmcOtc3Q=","IHRo4buN","IMOnxLFrYXJ0","0L7QvtCx0YDQsNC3","IG5lbcSbbA==","wqBybw==","INiv2YjZhNiq24w=","4Li1LA==","5LiA5bqm","aWFvbWk=","5ZeO","2Y/YuQ==","INCy0LDRgNC40LDQvQ==","IHBvZGHFmWlsbw==","IOuCmOqwgA==","6JCl5Lia","INCw0LHRgdC+0LvRjtGC0L3Qvg==","IOu4jOudvA==","INCz0L7RgNC40Lc=","YcSfxLFu","IHllcmluaQ==","4LmJ4Liy4LiZ4LiU","5pCs","IGJhbMSxaw==","IMWfYW5z","6K6k6K+G","IGlzdGVkacSfaW5peg==","IGppc3TEmw==","IOyImOqwgA==","77yM5LiK","4KSc4KSs","INCy0LjRj9Cy0Lg=","66el","44GX44Gm44KL","2YrZg9in","IEjDvHM=","Y8SxbsSxbg==","IOCktuCkpA==","INGA0LDRgdC/0L7Qu9Cw0LM=","INGB0L/RgNCw0LLQtg==","4Li34Lit4LiW","INCy0LXRgNGC0LjQug==","IHZ5c3Rhdg==","INGA0LXQsNC70ZbQt9Cw0YbRltGX","0LLQsNC80Lg=","44K544OG44Kj","64WB","INGA0LXRh9GW","2YHYp9mE","4KS/4KSV4KSf","INCy0L7Qt9GA0LDRgdGC0LU=","0LrQsNGB","INCY0YE=","INC70ZbQug==","IM+DzrfOvM6xzr0=","0LzQtdC90YLRgw==","0L3Rj9GO0YI=","5p+0","IM64zrXPiQ==","54qv572q","INmC2LfYsQ==","0JTQkA==","LXw=","INGB0YLRlg==","IHV5dW0=","IHBvdMWZZWJh","INi52YXZhNuM2KfYqg==","5aWq","2KfYrtix","INqp2LPYp9mG24w=","2KrZhdix","0YzQtdGA","IE5leg==","7ZqM7IKs","IEJhbmthc8Sx","0LXQs9GA0LA=","4LiC4LiT4Liw4LiX","5ZCI5qC8","IOyXrOufrOu2hA==","eWFzYWw=","IOihjOaUvw==","5YqJ","ZMSxa3Rhbg==","44Ki44Or44OQ","INin24zZhtqG","IGRpaml0YWw=","5bCY","INGA0LDQt9C80LXRiQ==","INC60ZbQu9GM0LrQvtGB0YLRlg==","IEV2cm9weQ==","INGA0L7Qt9Cy0Lg=","0Y7RidGD0Y4=","IG9uZw==","IGhlcHNp","dmFpbGFiaWxpdHk=","INiq2LXZhdmK2YU=","0YPQudGC0LU=","4KS54KSy","IMWhaXJv","IHDDoXM=","Ozs7Ozs7","6YWN5ZCI","INin2YTYudin2YTZhdmK2Kk=","0JLQvg==","aGFm","bMOhdg==","IGLDrA==","IG3Fr2o=","6ruY7ISc","wqBCZg==","INGB0L/RgNC+0YHQuNC7","4oCM2qnZhtmG2K/Zhw==","2YbYr9mK2Kk=","54m56Imy","IOyVqA==","4Li44Lip4Lii","INCk0L7RgA==","0L/QuNGB0L7Qug==","dcW+ZWw=","xLFtbGFy","54q25rOB","IOODrOODh+OCo+ODvOOCuQ==","0YXQvtCy0Lg=","wqBLxI0=","0YfQuNC8","INiq2YjZhQ==","4LmA4LiB4Lip4LiV4Lij","IOyLseq4gA==","2YXYp9ix2KfYqg==","w6puaA==","IMWZaWQ=","5oqs","0YHQuNGO","5oWO","IMOnZXZyZQ==","44OI44Or","IHnEsWxkxLFy","IHrDoXpuYW0=","5py65Zy6","INC/0L7RlA==","INCy0YvRgNCw0YnQuA==","INmB2Lk=","67s=","INiv2KfYsduM2YU=","77yM5pu0","INC30LXQvNC70Lg=","2KfYqNmC2KfYqg==","IG3hu51p","a8O9Y2g=","2YTYp9ip","5bi9","2KjYsdin2YfZitmF","INC/0L7QsdCw0Yc=","4KS+4KSH4KSu","4LmI4Liy4LiH4Lib4Lij4Liw4LmA4LiX4Lio","IOyEuOyDgQ==","INC/0L7QvNC+0LPQsNC10YI=","IM+Ez4zPg86/","5pa3","INmB2LHYp9mI","4LmE4Lib4Lii","ZXJnaXNp","IOmZkA==","Lnh6","INGB0LvRg9GF","0LXQutC+0L3QvtC8","IE5o4bqldA==","wrHYtw==","IOuIiOydhA==","IO2ajOyCrA==","0ZM=","IOWQjeeEoeOBlw==","IM6/zrzOrM60zrE=","h4w=","bGnEn2luaW4=","2LnYp9mG","INiy2YbbjA==","VMO0aQ==","IGV0a2k=","IOyXsOudvQ==","INC60L7QvdGG0LA=","6LCL","INC30LXQvNC70Y8=","7ZmY6rK9","INmF2qnYp9mG24w=","55ay","IOei","IGt1cnVsYW4=","2KTZiNmE","2K/ZiQ==","INin2YTZhdmG2LfZgtip","IG7huq9uZw==","0J/Qmg==","0L7Qu9Cw0Lk=","WUs=","5ZGG","zrvOsc69","6KW/55yB","IM6SzrHPgw==","IO2ZleyLpA==","WkQ=","0L/RltC0","INC90LDRh9C1","IM+Ezqw=","5b27","4oCeRA==","IOiHug==","INC90LDRiNC10Lk=","IHTDrW10bw==","INiq2LPZhQ==","z4HOuM+Bzr8=","5Luk5Lq6","IFBhemFy","44KT44Go","56uL5Yi7","woFA","IGLhuq9j","7Iqk7YWM","IGthZMSxbmxhcg==","ZmlndXI=","44Gk44G2","IOa1meaxnw==","INC00LXQutGW0LvRjA==","6KGd","4Lii4LiZ4LmB4Lib4Lil4LiH","b2xldA==","IG5lZG9r","bmFtZW4=","5YWE5byf","4Li34Lit4LiC","6IKD","IGLDvG55","INGA0LDQtNGP0L0=","44CB5LqM","0LDQvdC90Y4=","IOaJi+acug==","INC+0YHQu9C+0LY=","INC+0LPQu9GP","INiz2KjYsg==","IGFrdGl2aXQ=","IOCkj+Ckqg==","56uc","IGRpcmVu","adCy","IFlhdMSxcsSxbQ==","0YbRltC50L3QsA==","INC00L7QvNC+0LI=","4bqzbg==","IENvxJ9yYWY=","2YHZiA==","5rCX44Gr5YWl","56eB44Gu","772N","4KWM4KSh","INCT0YDQuNCz0L7RgA==","IFBleWdhbWJlcg==","IM6xzrPOsQ==","IGVmZWt0","IOyeiOyWtOyEnA==","INC/0LvQsNGC0LXQtg==","IFRyYWI=","b3Zlcnk=","4oCm4oCm44CC","IHlhcG1heWE=","INC90LDQudCx0ZbQu9GM","INmF2YbYstmE","2YjZitmD","xLFsZMSxxJ/EsW5kYQ==","IHDFmcOtcGFkbsSb","IM68z4DOv8+Bzr/PjQ==","IOuTnOudvOuniA==","IOuwqeusuA==","INCh0LjQvA==","2qnYp9iq","0LXQutC+0Lw=","2LHZiti5","2YfYr9mB","5peP6Ieq5rK7","IHptxJtu","INCy0LrQu9Cw0LQ=","INio2YTYug==","IOeniw==","Tmdo","IGVuZGnFnw==","IEN1bWh1cmJhxZ9rYW7EsQ==","IEthZg==","IOC5geC4q+C4pQ==","IG11dGx1","INGB0LjRgA==","INCz0YPQvA==","5r+D","54KJ","IELDoW8=","4KWC4KS3","IOygle2ZlQ==","4KS+4KSo4KS4","77uk","0L3QsNGB0LvRltC00L7Qug==","cG/EjWV0","66eM7JuQ7J6F64uI64uk","IOyEnOyauO2KueuzhOyLnA==","zpXOmc6j","4Li44Lih4LiK4LiZ","INC80ZbQu9GM","5oWM","z4POus61z4TOsc65","IOOAnA==","IGthbGl0ZWxp","INGB0LzQtdGA0YLRjA==","6LyU","INCx0LjRgg==","IM6jz4TOvw==","4LiH4LmA4Lio4Liq","5Y6f5pys","IGtuw60=","5LqS6IGU572R","INGH0LXQu9C+0LLQtdGH0LXRgQ==","562S","4LiI4Liz4Lir4LiZ","5Ye65Y67","44Ki44OL44Oh","5bGV56S6","cnljaA==","4KSF4KSs","b8WI","asOtY8OtbQ==","2KfYrdir","INmI2KfZgti524w=","INCk0LXQtNC10YDQsNC70Yw=","0YHQsNC8","IOyYpQ==","5Zyw55CD","IHN1eXU=","c2VuaXo=","4KWJ4KSr","IOqwmeuLpA==","INC/0YDQuNC30L3QsNGH0LXQvdC90Y8=","IFPEsW4=","INin2YXZhtuM2Ko=","IGzDoXRreQ==","INCR0Lg=","IHPDvHJlY2k=","wrfCt8K3wrc=","IOqyveywsA==","INC60LDQu9GM","INC90LjQutGC0L4=","2ZHZhQ==","INiv2Yrar9ix","IGFsxLFubWFzxLE=","0LvQtdC90L3Rlg==","4Li04Lin4LmA4LiV4Lit4Lij","4Lib4LiB4LiE4Lij4Lit4LiH","INC30LDQutC+0L3QvtC00LDQstGB0YLQstCw","44CA44Kk","IOuFuO2VmOyasA==","IETDvMWf","INCz0YPRgdGC","INCS0LDRiA==","INin2YXYqtuM","IHBhcmFtZXQ=","IM6gzrHOvc61z4A=","4LmM4LiB4Lij","zrbOsQ==","IOuNlOyasQ==","2YjZhNin2Ko=","0LLQsNGC0LjRgdGP","IGvDtms=","2YbYqA==","INCy0YvRgdC+0LrQvtC5","44O844O8","6ZSm","PHxiZWdpbl9vZl90ZXh0fD4=","PHxlbmRfb2ZfdGV4dHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzB8Pg==","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzF8Pg==","PHxmaW5ldHVuZV9yaWdodF9wYWRfaWR8Pg==","PHxzdGVwX2lkfD4=","PHxzdGFydF9oZWFkZXJfaWR8Pg==","PHxlbmRfaGVhZGVyX2lkfD4=","PHxlb21faWR8Pg==","PHxlb3RfaWR8Pg==","PHxweXRob25fdGFnfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzJ8Pg==","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzN8Pg==","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzR8Pg==","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzV8Pg==","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzZ8Pg==","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzd8Pg==","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzh8Pg==","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzl8Pg==","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEwfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzExfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEyfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEzfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE0fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE1fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE2fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE3fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE4fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE5fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIwfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIxfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIyfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIzfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzI0fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzI1fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzI2fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzI3fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzI4fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzI5fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzMwfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzMxfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzMyfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzMzfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzM0fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzM1fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzM2fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzM3fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzM4fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzM5fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzQwfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzQxfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzQyfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzQzfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzQ0fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzQ1fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzQ2fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzQ3fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzQ4fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzQ5fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzUwfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzUxfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzUyfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzUzfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzU0fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzU1fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzU2fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzU3fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzU4fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzU5fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzYwfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzYxfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzYyfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzYzfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzY0fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzY1fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzY2fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzY3fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzY4fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzY5fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzcwfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzcxfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzcyfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzczfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzc0fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzc1fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzc2fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzc3fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzc4fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzc5fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzgwfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzgxfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzgyfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzgzfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzg0fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzg1fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzg2fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzg3fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzg4fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzg5fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzkwfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzkxfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzkyfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzkzfD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzk0fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzk1fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzk2fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzk3fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzk4fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzk5fD4=","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEwMHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEwMXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEwMnw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEwM3w+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEwNHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEwNXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEwNnw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEwN3w+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEwOHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEwOXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzExMHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzExMXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzExMnw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzExM3w+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzExNHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzExNXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzExNnw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzExN3w+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzExOHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzExOXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEyMHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEyMXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEyMnw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEyM3w+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEyNHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEyNXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEyNnw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEyN3w+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEyOHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEyOXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEzMHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEzMXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEzMnw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEzM3w+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEzNHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEzNXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEzNnw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEzN3w+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEzOHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzEzOXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE0MHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE0MXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE0Mnw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE0M3w+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE0NHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE0NXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE0Nnw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE0N3w+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE0OHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE0OXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE1MHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE1MXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE1Mnw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE1M3w+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE1NHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE1NXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE1Nnw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE1N3w+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE1OHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE1OXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE2MHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE2MXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE2Mnw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE2M3w+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE2NHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE2NXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE2Nnw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE2N3w+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE2OHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE2OXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE3MHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE3MXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE3Mnw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE3M3w+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE3NHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE3NXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE3Nnw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE3N3w+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE3OHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE3OXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE4MHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE4MXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE4Mnw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE4M3w+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE4NHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE4NXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE4Nnw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE4N3w+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE4OHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE4OXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE5MHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE5MXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE5Mnw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE5M3w+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE5NHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE5NXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE5Nnw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE5N3w+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE5OHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzE5OXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIwMHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIwMXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIwMnw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIwM3w+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIwNHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIwNXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIwNnw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIwN3w+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIwOHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIwOXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIxMHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIxMXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIxMnw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIxM3w+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIxNHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIxNXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIxNnw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIxN3w+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIxOHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIxOXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIyMHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIyMXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIyMnw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIyM3w+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIyNHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIyNXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIyNnw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIyN3w+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIyOHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIyOXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIzMHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIzMXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIzMnw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIzM3w+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIzNHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIzNXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIzNnw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIzN3w+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIzOHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzIzOXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzI0MHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzI0MXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzI0Mnw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzI0M3w+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzI0NHw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzI0NXw+","PHxyZXNlcnZlZF9zcGVjaWFsX3Rva2VuXzI0Nnw+"] diff --git a/model/model.go b/model/model.go new file mode 100644 index 000000000..e44797b6d --- /dev/null +++ b/model/model.go @@ -0,0 +1,228 @@ +package model + +import ( + "fmt" + "image" + _ "image/jpeg" + _ "image/png" + "log/slog" + "os" + "reflect" + "strconv" + "strings" + + _ "golang.org/x/image/bmp" + _ "golang.org/x/image/tiff" + _ "golang.org/x/image/webp" + + "github.com/ollama/ollama/cache" + "github.com/ollama/ollama/ml" + _ "github.com/ollama/ollama/ml/backend" +) + +type Cache struct { + cache.Cache + cache.Options +} + +func (c Cache) Sub(i int) Cache { + if c.Cache != nil { + return Cache{ + Cache: c.Cache.Sub(i), + Options: c.Options, + } + } + + return c +} + +func (c Cache) Put(ctx ml.Context, key, value ml.Tensor, opts cache.Options) (ml.Tensor, ml.Tensor) { + if c.Cache != nil { + return c.Cache.Put(ctx, key, value, opts) + } + + return key, value +} + +type Options struct { + inputs []int32 + + Offset int + + Images []image.Image + + Cache +} + +func (opts Options) Inputs() []int32 { + return opts.inputs[opts.Offset:] +} + +func (opts Options) Positions() []int32 { + positions := make([]int32, len(opts.inputs)-opts.Offset) + for i := range positions { + positions[i] = int32(opts.Offset + i) + } + + return positions +} + +type OptionsFunc func(Model, *Options) + +func WithInputIDs(ids []int32) OptionsFunc { + return func(m Model, opts *Options) { + opts.inputs = ids + } +} + +func WithOffset(offset int) OptionsFunc { + return func(m Model, opts *Options) { + opts.Offset = offset + opts.Cache.Position = offset + } +} + +func WithImage(img image.Image) OptionsFunc { + return func(m Model, opts *Options) { + opts.Images = append(opts.Images, img) + } +} + +func WithCache(c cache.Cache) OptionsFunc { + return func(m Model, opts *Options) { + opts.Cache = Cache{ + Cache: c, + Options: cache.Options{ + Position: opts.Offset, + }, + } + } +} + +type Base struct { + b ml.Backend +} + +func (m *Base) Backend() ml.Backend { + return m.b +} + +func (m *Base) SetBackend(b ml.Backend) { + m.b = b +} + +type Model interface { + Forward(ml.Context, Options) (ml.Tensor, error) + + Backend() ml.Backend + SetBackend(ml.Backend) +} + +var models = make(map[string]func(ml.Config) (Model, error)) + +func Register(name string, f func(ml.Config) (Model, error)) { + if _, ok := models[name]; ok { + panic("model: model already registered") + } + + models[name] = f +} + +func New(s string) (Model, error) { + r, err := os.Open(s) + if err != nil { + return nil, err + } + defer r.Close() + + b, err := ml.NewBackend(r) + if err != nil { + return nil, err + } + + arch := b.Config().Architecture() + f, ok := models[arch] + if !ok { + return nil, fmt.Errorf("unsupported model architecture %q", arch) + } + + m, err := f(b.Config()) + if err != nil { + return nil, err + } + + if err := loadTensors(b, m); err != nil { + return nil, err + } + + m.SetBackend(b) + return m, nil +} + +var mlTensorType = reflect.TypeOf((*ml.Tensor)(nil)).Elem() + +func loadTensors(b ml.Backend, m any, tensorPath ...string) error { + t := reflect.TypeOf(m) + v := reflect.ValueOf(m) + + if t.Kind() == reflect.Pointer { + t = t.Elem() + v = v.Elem() + } + + if t.Kind() == reflect.Interface { + return loadTensors(b, v.Interface(), tensorPath...) + } + + for i := range t.NumField() { + f := v.Field(i) + fullTensorPath := tensorPath + if tag := t.Field(i).Tag.Get("ggml"); tag != "" { + tensorName, _, _ := strings.Cut(tag, ",") + fullTensorPath = append(tensorPath, tensorName) + } + + if !f.CanSet() { + continue + } + + if f.Kind() == reflect.Ptr && f.IsNil() { + f.Set(reflect.New(f.Type().Elem())) + } else if f.Kind() == reflect.Interface && f.IsNil() && f.Type().Implements(mlTensorType) { + if tensor := b.Get(strings.Join(fullTensorPath, ".")); tensor != nil { + f.Set(reflect.ValueOf(tensor)) + slog.Debug("loaded tensor", "kind", f.Elem().Type(), "", f.Interface()) + } + } + + if r := reflect.Indirect(f); r.Kind() == reflect.Struct { + if err := loadTensors(b, f.Interface(), fullTensorPath...); err != nil { + return err + } + } else if r.Kind() == reflect.Slice { + for i := range r.Len() { + if err := loadTensors(b, f.Index(i).Addr().Interface(), append(fullTensorPath, strconv.Itoa(i))...); err != nil { + return err + } + } + } + } + + return nil +} + +func Forward(m Model, optsFuncs ...OptionsFunc) (ml.Tensor, error) { + var opts Options + for _, optsFunc := range optsFuncs { + optsFunc(m, &opts) + } + + ctx := m.Backend().NewContext() + t, err := m.Forward(ctx, opts) + if err != nil { + return nil, err + } + defer ctx.Close() + + return ctx.Compute(t), nil +} diff --git a/model/process_text.go b/model/process_text.go new file mode 100644 index 000000000..55e8d26cd --- /dev/null +++ b/model/process_text.go @@ -0,0 +1,311 @@ +package model + +import ( + "cmp" + "log/slog" + "strings" + "sync" + + "github.com/dlclark/regexp2" + heap "github.com/emirpasic/gods/v2/trees/binaryheap" +) + +type Special int32 + +const ( + SpecialBOS Special = iota + SpecialEOS +) + +type TextProcessor interface { + Encode(string) ([]int32, error) + Decode([]int32) (string, error) + Is(uint32, Special) bool +} + +type Vocabulary struct { + Values []string + Types []uint32 + Scores []uint32 + Merges []string + + BOS, EOS uint32 + + specialOnce sync.Once + special []string + + valuesOnce sync.Once + values map[string]int32 + + mergeOnce sync.Once + merge map[string]int32 +} + +func (v *Vocabulary) Is(id uint32, special Special) bool { + switch special { + case SpecialBOS: + return id == v.BOS + case SpecialEOS: + return id == v.EOS + default: + return false + } +} + +func (v *Vocabulary) Encode(s string) int32 { + v.valuesOnce.Do(func() { + v.values = make(map[string]int32, len(v.Values)) + for i, value := range v.Values { + v.values[value] = int32(i) + } + }) + + if id, ok := v.values[s]; ok { + return id + } + + return -1 +} + +func (v *Vocabulary) Decode(id int32) string { + return v.Values[id] +} + +func (v *Vocabulary) SpecialVocabulary() []string { + v.specialOnce.Do(func() { + for i := range v.Values { + if v.Types[i] == 3 { + v.special = append(v.special, v.Values[i]) + } + } + }) + + return v.special +} + +func (v *Vocabulary) Merge(left, right string) int { + v.mergeOnce.Do(func() { + v.merge = make(map[string]int32, len(v.Merges)) + for i, merge := range v.Merges { + v.merge[merge] = int32(i) + } + }) + + if id, ok := v.merge[left+" "+right]; ok { + return int(id) + } + + return -1 +} + +type BytePairEncoding struct { + Pretokenizer string + + *Vocabulary +} + +func (bpe BytePairEncoding) split(s string) ([]string, error) { + re, err := regexp2.Compile(bpe.Pretokenizer, regexp2.Unicode|regexp2.RE2) + if err != nil { + return nil, err + } + + var matches []string + for m, _ := re.FindStringMatch(s); m != nil; m, _ = re.FindNextMatch(m) { + matches = append(matches, m.String()) + } + + return matches, nil +} + +// fragment is a string fragment and their corresponding token IDs +type fragment struct { + value string + ids []int32 +} + +// pair is a pair of runes and its rank +type pair struct { + a, b int + rank int + value string +} + +type merge struct { + p, n int + runes []rune +} + +func (bpe BytePairEncoding) Encode(s string) ([]int32, error) { + fragments := []fragment{{value: s}} + for _, special := range bpe.Vocabulary.SpecialVocabulary() { + // TODO: process special tokens concurrently + id := bpe.Vocabulary.Encode(special) + for i := 0; i < len(fragments); i++ { + frag := fragments[i] + if len(frag.ids) > 0 { + continue + } + + var middle []fragment + switch i := strings.Index(frag.value, special); { + case i < 0: + middle = append(middle, frag) + case i > 0: + middle = append(middle, fragment{value: frag.value[:i]}) + fallthrough + default: + middle = append(middle, fragment{value: special, ids: []int32{id}}) + if rest := frag.value[i+len(special):]; rest != "" { + middle = append(middle, fragment{value: rest}) + } + } + + fragments = append(fragments[:i], append(middle, fragments[i+1:]...)...) + } + } + + ids := make([]int32, 0, len(fragments)) + for _, frag := range fragments { + if len(frag.ids) > 0 { + ids = append(ids, frag.ids...) + slog.Debug("encoded", "text", frag.value, "ids", frag.ids, "special", true) + continue + } + + // split fragment using pretokenizer + splits, err := bpe.split(frag.value) + if err != nil { + return nil, err + } + + for _, split := range splits { + // TODO: process splits concurrently + var sb strings.Builder + for _, b := range []byte(split) { + r := rune(b) + switch { + case r == 0x00ad: + r = 0x0143 + case r <= 0x0020: + r = r + 0x0100 + case r >= 0x007e && r <= 0x00a0: + r = r + 0x00a2 + } + + sb.WriteRune(r) + } + + if id := bpe.Vocabulary.Encode(sb.String()); id >= 0 { + ids = append(ids, id) + slog.Debug("encoded", "text", sb.String(), "ids", []int32{id}) + continue + } + + runes := []rune(sb.String()) + merges := make([]merge, len(runes)) + for i := range runes { + merges[i] = merge{ + p: i - 1, + n: i + 1, + runes: []rune{runes[i]}, + } + } + + pairwise := func(a, b int) *pair { + if a < 0 || b >= len(runes) { + return nil + } + + left, right := string(merges[a].runes), string(merges[b].runes) + rank := bpe.Vocabulary.Merge(left, right) + if rank < 0 { + return nil + } + + return &pair{ + a: a, + b: b, + rank: rank, + value: left + right, + } + } + + pairs := heap.NewWith(func(i, j *pair) int { + return cmp.Compare(i.rank, j.rank) + }) + + for i := range len(runes) - 1 { + if pair := pairwise(i, i+1); pair != nil { + pairs.Push(pair) + } + } + + for !pairs.Empty() { + pair, _ := pairs.Pop() + + left, right := merges[pair.a], merges[pair.b] + if len(left.runes) <= 0 || len(right.runes) <= 0 || + string(left.runes)+string(right.runes) != pair.value { + continue + } + + merges[pair.a].runes = append(left.runes, right.runes...) + merges[pair.b].runes = nil + + merges[pair.a].n = right.n + if right.n < len(merges) { + merges[right.n].p = pair.a + } + + if pair := pairwise(merges[pair.a].p, pair.a); pair != nil { + pairs.Push(pair) + } + + if pair := pairwise(pair.a, merges[pair.a].n); pair != nil { + pairs.Push(pair) + } + } + + for _, merge := range merges { + if len(merge.runes) > 0 { + // TODO: handle the edge case where the rune isn't in the vocabulary + if id := bpe.Vocabulary.Encode(string(merge.runes)); id >= 0 { + ids = append(ids, id) + slog.Debug("encoded", "text", string(merge.runes), "ids", []int32{id}) + } + } + } + } + } + + return ids, nil +} + +func (bpe BytePairEncoding) Decode(ids []int32) (string, error) { + var sb strings.Builder + for _, id := range ids { + for _, r := range bpe.Vocabulary.Decode(id) { + switch { + case r == 0x0100: + // this produces 0x00 aka NULL + continue + case r == 0x0143: + r = 0x00ad + case r > 0x0100 && r <= 0x0120: + r = r - 0x0100 + case r > 0x0120 && r <= 0x0142: + r = r - 0x00a2 + } + + // NOTE: not using WriteRune here because it writes the UTF-8 + // encoding of the rune which is _not_ what we want + if err := sb.WriteByte(byte(r)); err != nil { + return "", err + } + } + } + + slog.Debug("decoded", "ids", ids, "text", sb.String()) + return sb.String(), nil +} diff --git a/model/testdata/inputs.json b/model/testdata/inputs.json new file mode 100644 index 000000000..806cd920a --- /dev/null +++ b/model/testdata/inputs.json @@ -0,0 +1,588 @@ +[ + { + "base64": "aWVkIDQgwr0gbW9udGhz", + "ids": [ + 1142, + 220, + 19, + 220, + 27154, + 4038 + ] + }, + { + "base64": "RsO8aHJlcg==", + "ids": [ + 37, + 51853, + 261 + ] + }, + { + "base64": "", + "ids": [] + }, + { + "base64": "IA==", + "ids": [ + 220 + ] + }, + { + "base64": "ICA=", + "ids": [ + 256 + ] + }, + { + "base64": "ICAg", + "ids": [ + 262 + ] + }, + { + "base64": "CQ==", + "ids": [ + 197 + ] + }, + { + "base64": "Cg==", + "ids": [ + 198 + ] + }, + { + "base64": "Cgo=", + "ids": [ + 271 + ] + }, + { + "base64": "CgoK", + "ids": [ + 1432 + ] + }, + { + "base64": "CQo=", + "ids": [ + 1602 + ] + }, + { + "base64": "SGVsbG8gd29ybGQ=", + "ids": [ + 9906, + 1917 + ] + }, + { + "base64": "IEhlbGxvIHdvcmxk", + "ids": [ + 22691, + 1917 + ] + }, + { + "base64": "SGVsbG8gV29ybGQ=", + "ids": [ + 9906, + 4435 + ] + }, + { + "base64": "IEhlbGxvIFdvcmxk", + "ids": [ + 22691, + 4435 + ] + }, + { + "base64": "IEhlbGxvIFdvcmxkIQ==", + "ids": [ + 22691, + 4435, + 0 + ] + }, + { + "base64": "SGVsbG8sIHdvcmxkIQ==", + "ids": [ + 9906, + 11, + 1917, + 0 + ] + }, + { + "base64": "IEhlbGxvLCB3b3JsZCE=", + "ids": [ + 22691, + 11, + 1917, + 0 + ] + }, + { + "base64": "IHRoaXMgaXMg8J+mmS5jcHA=", + "ids": [ + 420, + 374, + 11410, + 99, + 247, + 13, + 11055 + ] + }, + { + "base64": "dzA0OCA3dHVpamsgZHNkZmh1", + "ids": [ + 86, + 23904, + 220, + 22, + 83, + 2005, + 42908, + 11729, + 3013, + 17156 + ] + }, + { + "base64": "0L3QtdGJ0L4g0L3QsCDQkdGK0LvQs9Cw0YDRgdC60Lg=", + "ids": [ + 79862, + 102118, + 13373, + 64571, + 34694, + 3114, + 112203, + 80112 + ] + }, + { + "base64": "4Z6A4Z624Z6T4Z+L4Z6P4Z+C4Z6W4Z634Z6f4Z+B4Z6f4Z6i4Z624Z6F4Z6B4Z6b4Z6F4Z+B4Z6J", + "ids": [ + 21549, + 222, + 98629, + 241, + 45358, + 233, + 21549, + 237, + 45358, + 224, + 21549, + 244, + 21549, + 115, + 21549, + 253, + 45358, + 223, + 21549, + 253, + 21549, + 95, + 98629, + 227, + 21549, + 223, + 21549, + 249, + 21549, + 227, + 45358, + 223, + 21549, + 231 + ] + }, + { + "base64": "8J+agCAobm9ybWFsKSDwn5i24oCN8J+Mq++4jyAobXVsdGlwbGUgZW1vamlzIGNvbmNhdGVuYXRlZCkg4pyFIChvbmx5IGVtb2ppIHRoYXQgaGFzIGl0cyBvd24gdG9rZW4p", + "ids": [ + 9468, + 248, + 222, + 320, + 8416, + 8, + 27623, + 114, + 102470, + 9468, + 234, + 104, + 31643, + 320, + 36773, + 100166, + 98634, + 8, + 26602, + 227, + 320, + 3323, + 43465, + 430, + 706, + 1202, + 1866, + 4037, + 8 + ] + }, + { + "base64": "SGVsbG8=", + "ids": [ + 9906 + ] + }, + { + "base64": "IEhlbGxv", + "ids": [ + 22691 + ] + }, + { + "base64": "ICBIZWxsbw==", + "ids": [ + 220, + 22691 + ] + }, + { + "base64": "ICAgSGVsbG8=", + "ids": [ + 256, + 22691 + ] + }, + { + "base64": "ICAgIEhlbGxv", + "ids": [ + 262, + 22691 + ] + }, + { + "base64": "ICAgIEhlbGxvCiAgICBIZWxsbw==", + "ids": [ + 262, + 22691, + 198, + 262, + 22691 + ] + }, + { + "base64": "ICg=", + "ids": [ + 320 + ] + }, + { + "base64": "CiA9", + "ids": [ + 198, + 284 + ] + }, + { + "base64": "JyBlcmE=", + "ids": [ + 6, + 11639 + ] + }, + { + "base64": "SGVsbG8sIHknYWxsISBIb3cgYXJlIHlvdSDwn5iBID/miJHmg7PlnKhhcHBsZeW3peS9nDEzMTQxNTHlpKnvvZ4=", + "ids": [ + 9906, + 11, + 379, + 65948, + 0, + 2650, + 527, + 499, + 27623, + 223, + 949, + 37046, + 101067, + 19000, + 23182, + 102301, + 9263, + 18136, + 16, + 36827, + 21909 + ] + }, + { + "base64": "ISEhISEh", + "ids": [ + 17523, + 3001 + ] + }, + { + "base64": "Mw==", + "ids": [ + 18 + ] + }, + { + "base64": "MzM=", + "ids": [ + 1644 + ] + }, + { + "base64": "MzMz", + "ids": [ + 8765 + ] + }, + { + "base64": "MzMzMw==", + "ids": [ + 8765, + 18 + ] + }, + { + "base64": "MzMzMzM=", + "ids": [ + 8765, + 1644 + ] + }, + { + "base64": "MzMzMzMz", + "ids": [ + 8765, + 8765 + ] + }, + { + "base64": "MzMzMzMzMw==", + "ids": [ + 8765, + 8765, + 18 + ] + }, + { + "base64": "MzMzMzMzMzM=", + "ids": [ + 8765, + 8765, + 1644 + ] + }, + { + "base64": "MzMzMzMzMzMz", + "ids": [ + 8765, + 8765, + 8765 + ] + }, + { + "base64": "Q+G7rWEgVmnhu4d0", + "ids": [ + 34, + 91163, + 11655, + 26298, + 83 + ] + }, + { + "base64": "IGRpc2NhcmRz", + "ids": [ + 2624, + 2402 + ] + }, + { + "base64": "CiAKCiAKCgogCSAJCSAJCiAgCiAgIAogICAgCiAgICAgCvCfmoAgKG5vcm1hbCkg8J+YtuKAjfCfjKvvuI8gKG11bHRpcGxlIGVtb2ppcyBjb25jYXRlbmF0ZWQpIOKchSDwn6aZ8J+mmSAzIDMzIDMzMyAzMzMzIDMzMzMzIDMzMzMzMyAzMzMzMzMzIDMzMzMzMzMzIDMuMyAzLi4zIDMuLi4zIOGegOGetuGek+Gfi+Gej+GfguGeluGet+Gen+GfgeGen+GeouGetuGehfCfmIEgP+aIkeaDs+WcqGFwcGxl5bel5L2cMTMxNDE1MeWkqe+9niAtLS0tLS09PT09PT09INC90LXRidC+INC90LAg0JHRitC70LPQsNGA0YHQutC4ICcnJycnJ2BgYGBgYGAiIiIiLi4uLi4uISEhISEhPz8/Pz8/IEkndmUgYmVlbiAndG9sZCBoZSdzIHRoZXJlLCAnUkUgeW91IHN1cmU/ICdNIG5vdCBzdXJlIEknbGwgbWFrZSBpdCwgJ0QgeW91IGxpa2Ugc29tZSB0ZWE/IFdlJ1ZlIGEnbEw=", + "ids": [ + 198, + 4815, + 15073, + 66597, + 8004, + 1602, + 2355, + 79772, + 11187, + 9468, + 248, + 222, + 320, + 8416, + 8, + 27623, + 114, + 102470, + 9468, + 234, + 104, + 31643, + 320, + 36773, + 100166, + 98634, + 8, + 26602, + 227, + 11410, + 99, + 247, + 9468, + 99, + 247, + 220, + 18, + 220, + 1644, + 220, + 8765, + 220, + 8765, + 18, + 220, + 8765, + 1644, + 220, + 8765, + 8765, + 220, + 8765, + 8765, + 18, + 220, + 8765, + 8765, + 1644, + 220, + 18, + 13, + 18, + 220, + 18, + 497, + 18, + 220, + 18, + 1131, + 18, + 220, + 21549, + 222, + 98629, + 241, + 45358, + 233, + 21549, + 237, + 45358, + 224, + 21549, + 244, + 21549, + 115, + 21549, + 253, + 45358, + 223, + 21549, + 253, + 21549, + 95, + 98629, + 227, + 76460, + 223, + 949, + 37046, + 101067, + 19000, + 23182, + 102301, + 9263, + 18136, + 16, + 36827, + 21909, + 56560, + 54337, + 19175, + 102118, + 13373, + 64571, + 34694, + 3114, + 112203, + 80112, + 3436, + 106451, + 14196, + 14196, + 74694, + 3089, + 3089, + 29249, + 17523, + 3001, + 27708, + 7801, + 358, + 3077, + 1027, + 364, + 83, + 820, + 568, + 596, + 1070, + 11, + 364, + 793, + 499, + 2771, + 30, + 364, + 44, + 539, + 2771, + 358, + 3358, + 1304, + 433, + 11, + 364, + 35, + 499, + 1093, + 1063, + 15600, + 30, + 1226, + 6, + 43712, + 264, + 64966, + 43 + ] + } +] \ No newline at end of file diff --git a/sample/greedy.go b/sample/greedy.go new file mode 100644 index 000000000..206f5544d --- /dev/null +++ b/sample/greedy.go @@ -0,0 +1,13 @@ +package sample + +import "gonum.org/v1/gonum/floats" + +type greedy struct{} + +func Greedy() Sampler { + return greedy{} +} + +func (s greedy) Sample(t []float64) ([]float64, error) { + return []float64{float64(floats.MaxIdx(t))}, nil +} diff --git a/sample/sample.go b/sample/sample.go new file mode 100644 index 000000000..44c08caed --- /dev/null +++ b/sample/sample.go @@ -0,0 +1,74 @@ +package sample + +import ( + "slices" + + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/stat/sampleuv" +) + +type Sampler interface { + Sample([]float64) ([]float64, error) +} + +type Temperature float64 + +func (s Temperature) Sample(t []float64) ([]float64, error) { + floats.Div(t, slices.Repeat([]float64{float64(s)}, len(t))) + return t, nil +} + +type softmax struct{} + +func Softmax() Sampler { + return softmax{} +} + +func (softmax) Sample(t []float64) ([]float64, error) { + return t, nil +} + +type TopK int + +func (s TopK) Sample(t []float64) ([]float64, error) { + return t, nil +} + +type TopP float32 + +func (s TopP) Sample(t []float64) ([]float64, error) { + return t, nil +} + +type MinP float32 + +func (s MinP) Sample(t []float64) ([]float64, error) { + return t, nil +} + +type weighed struct{} + +func Weighed() Sampler { + return weighed{} +} + +func (s weighed) Sample(t []float64) ([]float64, error) { + w := sampleuv.NewWeighted(t, nil) + if v, ok := w.Take(); ok { + return []float64{float64(v)}, nil + } + + return t, nil +} + +func Sample(floats []float64, samplers ...Sampler) ([]float64, error) { + var err error + for _, sampler := range samplers { + floats, err = sampler.Sample(floats) + if err != nil { + return nil, err + } + } + + return floats, nil +}