MS-Schisandra-22B-v0.3

1
3
by
Nohobby
Language Model
OTHER
22B params
New
1 downloads
Early-stage
Edge AI:
Mobile
Laptop
Server
50GB+ RAM
Mobile
Laptop
Server
Quick Summary

RPMax v1.1 | Pantheon-RP | Cydonia-v1.3 | Magnum V4 | ChatWaifu v2.0 | SorcererLM | NovusKyver | Meadowlark | Firefly At the moment, I'm not entirely sure it's...

Device Compatibility

Mobile
4-6GB RAM
Laptop
16GB RAM
Server
GPU
Minimum Recommended
21GB+ RAM

Code Examples

Settingsyaml
models:
  - model: Mistral-Small-22B-ArliAI-RPMax-v1.1
    parameters:
      weight: [0.2, 0.3, 0.2, 0.3, 0.2]
      density: [0.45, 0.55, 0.45, 0.55, 0.45]
  - model: Mistral-Small-NovusKyver
    parameters:
      weight: [0.01768, -0.01675, 0.01285, -0.01696, 0.01421]
      density: [0.6, 0.4, 0.5, 0.4, 0.6]
  - model: MiS-Firefly-v0.2-22B
    parameters:
      weight: [0.208, 0.139, 0.139, 0.139, 0.208]
      density: [0.7]
  - model: magnum-v4-22b
    parameters:
      weight: [0.33]
      density: [0.45, 0.55, 0.45, 0.55, 0.45]
merge_method: della_linear
base_model: Mistral-Small-22B-ArliAI-RPMax-v1.1
parameters:
  epsilon: 0.05
  lambda: 1.05
  int8_mask: true
  rescale: true
  normalize: false
dtype: bfloat16
tokenizer_source: base
Settingsyaml
models:
  - model: Mistral-Small-22B-ArliAI-RPMax-v1.1
    parameters:
      weight: [0.2, 0.3, 0.2, 0.3, 0.2]
      density: [0.45, 0.55, 0.45, 0.55, 0.45]
  - model: Mistral-Small-NovusKyver
    parameters:
      weight: [0.01768, -0.01675, 0.01285, -0.01696, 0.01421]
      density: [0.6, 0.4, 0.5, 0.4, 0.6]
  - model: MiS-Firefly-v0.2-22B
    parameters:
      weight: [0.208, 0.139, 0.139, 0.139, 0.208]
      density: [0.7]
  - model: magnum-v4-22b
    parameters:
      weight: [0.33]
      density: [0.45, 0.55, 0.45, 0.55, 0.45]
merge_method: della_linear
base_model: Mistral-Small-22B-ArliAI-RPMax-v1.1
parameters:
  epsilon: 0.05
  lambda: 1.05
  int8_mask: true
  rescale: true
  normalize: false
dtype: bfloat16
tokenizer_source: base
Settingsyaml
models:
  - model: Mistral-Small-22B-ArliAI-RPMax-v1.1
    parameters:
      weight: [0.2, 0.3, 0.2, 0.3, 0.2]
      density: [0.45, 0.55, 0.45, 0.55, 0.45]
  - model: Mistral-Small-NovusKyver
    parameters:
      weight: [0.01768, -0.01675, 0.01285, -0.01696, 0.01421]
      density: [0.6, 0.4, 0.5, 0.4, 0.6]
  - model: MiS-Firefly-v0.2-22B
    parameters:
      weight: [0.208, 0.139, 0.139, 0.139, 0.208]
      density: [0.7]
  - model: magnum-v4-22b
    parameters:
      weight: [0.33]
      density: [0.45, 0.55, 0.45, 0.55, 0.45]
merge_method: della_linear
base_model: Mistral-Small-22B-ArliAI-RPMax-v1.1
parameters:
  epsilon: 0.05
  lambda: 1.05
  int8_mask: true
  rescale: true
  normalize: false
dtype: bfloat16
tokenizer_source: base
Settingsyaml
models:
  - model: Mistral-Small-22B-ArliAI-RPMax-v1.1
    parameters:
      weight: [0.2, 0.3, 0.2, 0.3, 0.2]
      density: [0.45, 0.55, 0.45, 0.55, 0.45]
  - model: Mistral-Small-NovusKyver
    parameters:
      weight: [0.01768, -0.01675, 0.01285, -0.01696, 0.01421]
      density: [0.6, 0.4, 0.5, 0.4, 0.6]
  - model: MiS-Firefly-v0.2-22B
    parameters:
      weight: [0.208, 0.139, 0.139, 0.139, 0.208]
      density: [0.7]
  - model: magnum-v4-22b
    parameters:
      weight: [0.33]
      density: [0.45, 0.55, 0.45, 0.55, 0.45]
merge_method: della_linear
base_model: Mistral-Small-22B-ArliAI-RPMax-v1.1
parameters:
  epsilon: 0.05
  lambda: 1.05
  int8_mask: true
  rescale: true
  normalize: false
dtype: bfloat16
tokenizer_source: base
Settingsyaml
models:
  - model: Mistral-Small-22B-ArliAI-RPMax-v1.1
    parameters:
      weight: [0.2, 0.3, 0.2, 0.3, 0.2]
      density: [0.45, 0.55, 0.45, 0.55, 0.45]
  - model: Mistral-Small-NovusKyver
    parameters:
      weight: [0.01768, -0.01675, 0.01285, -0.01696, 0.01421]
      density: [0.6, 0.4, 0.5, 0.4, 0.6]
  - model: MiS-Firefly-v0.2-22B
    parameters:
      weight: [0.208, 0.139, 0.139, 0.139, 0.208]
      density: [0.7]
  - model: magnum-v4-22b
    parameters:
      weight: [0.33]
      density: [0.45, 0.55, 0.45, 0.55, 0.45]
merge_method: della_linear
base_model: Mistral-Small-22B-ArliAI-RPMax-v1.1
parameters:
  epsilon: 0.05
  lambda: 1.05
  int8_mask: true
  rescale: true
  normalize: false
dtype: bfloat16
tokenizer_source: base
Settingsyaml
models:
  - model: Mistral-Small-22B-ArliAI-RPMax-v1.1
    parameters:
      weight: [0.2, 0.3, 0.2, 0.3, 0.2]
      density: [0.45, 0.55, 0.45, 0.55, 0.45]
  - model: Mistral-Small-NovusKyver
    parameters:
      weight: [0.01768, -0.01675, 0.01285, -0.01696, 0.01421]
      density: [0.6, 0.4, 0.5, 0.4, 0.6]
  - model: MiS-Firefly-v0.2-22B
    parameters:
      weight: [0.208, 0.139, 0.139, 0.139, 0.208]
      density: [0.7]
  - model: magnum-v4-22b
    parameters:
      weight: [0.33]
      density: [0.45, 0.55, 0.45, 0.55, 0.45]
merge_method: della_linear
base_model: Mistral-Small-22B-ArliAI-RPMax-v1.1
parameters:
  epsilon: 0.05
  lambda: 1.05
  int8_mask: true
  rescale: true
  normalize: false
dtype: bfloat16
tokenizer_source: base
Settingsyaml
models:
  - model: Mistral-Small-22B-ArliAI-RPMax-v1.1
    parameters:
      weight: [0.2, 0.3, 0.2, 0.3, 0.2]
      density: [0.45, 0.55, 0.45, 0.55, 0.45]
  - model: Mistral-Small-NovusKyver
    parameters:
      weight: [0.01768, -0.01675, 0.01285, -0.01696, 0.01421]
      density: [0.6, 0.4, 0.5, 0.4, 0.6]
  - model: MiS-Firefly-v0.2-22B
    parameters:
      weight: [0.208, 0.139, 0.139, 0.139, 0.208]
      density: [0.7]
  - model: magnum-v4-22b
    parameters:
      weight: [0.33]
      density: [0.45, 0.55, 0.45, 0.55, 0.45]
merge_method: della_linear
base_model: Mistral-Small-22B-ArliAI-RPMax-v1.1
parameters:
  epsilon: 0.05
  lambda: 1.05
  int8_mask: true
  rescale: true
  normalize: false
dtype: bfloat16
tokenizer_source: base
Settingsyaml
models:
  - model: Mistral-Small-22B-ArliAI-RPMax-v1.1
    parameters:
      weight: [0.2, 0.3, 0.2, 0.3, 0.2]
      density: [0.45, 0.55, 0.45, 0.55, 0.45]
  - model: Mistral-Small-NovusKyver
    parameters:
      weight: [0.01768, -0.01675, 0.01285, -0.01696, 0.01421]
      density: [0.6, 0.4, 0.5, 0.4, 0.6]
  - model: MiS-Firefly-v0.2-22B
    parameters:
      weight: [0.208, 0.139, 0.139, 0.139, 0.208]
      density: [0.7]
  - model: magnum-v4-22b
    parameters:
      weight: [0.33]
      density: [0.45, 0.55, 0.45, 0.55, 0.45]
merge_method: della_linear
base_model: Mistral-Small-22B-ArliAI-RPMax-v1.1
parameters:
  epsilon: 0.05
  lambda: 1.05
  int8_mask: true
  rescale: true
  normalize: false
dtype: bfloat16
tokenizer_source: base
Settingsyaml
models:
  - model: Mistral-Small-22B-ArliAI-RPMax-v1.1
    parameters:
      weight: [0.2, 0.3, 0.2, 0.3, 0.2]
      density: [0.45, 0.55, 0.45, 0.55, 0.45]
  - model: Mistral-Small-NovusKyver
    parameters:
      weight: [0.01768, -0.01675, 0.01285, -0.01696, 0.01421]
      density: [0.6, 0.4, 0.5, 0.4, 0.6]
  - model: MiS-Firefly-v0.2-22B
    parameters:
      weight: [0.208, 0.139, 0.139, 0.139, 0.208]
      density: [0.7]
  - model: magnum-v4-22b
    parameters:
      weight: [0.33]
      density: [0.45, 0.55, 0.45, 0.55, 0.45]
merge_method: della_linear
base_model: Mistral-Small-22B-ArliAI-RPMax-v1.1
parameters:
  epsilon: 0.05
  lambda: 1.05
  int8_mask: true
  rescale: true
  normalize: false
dtype: bfloat16
tokenizer_source: base
Settingsyaml
models:
  - model: Mistral-Small-22B-ArliAI-RPMax-v1.1
    parameters:
      weight: [0.2, 0.3, 0.2, 0.3, 0.2]
      density: [0.45, 0.55, 0.45, 0.55, 0.45]
  - model: Mistral-Small-NovusKyver
    parameters:
      weight: [0.01768, -0.01675, 0.01285, -0.01696, 0.01421]
      density: [0.6, 0.4, 0.5, 0.4, 0.6]
  - model: MiS-Firefly-v0.2-22B
    parameters:
      weight: [0.208, 0.139, 0.139, 0.139, 0.208]
      density: [0.7]
  - model: magnum-v4-22b
    parameters:
      weight: [0.33]
      density: [0.45, 0.55, 0.45, 0.55, 0.45]
merge_method: della_linear
base_model: Mistral-Small-22B-ArliAI-RPMax-v1.1
parameters:
  epsilon: 0.05
  lambda: 1.05
  int8_mask: true
  rescale: true
  normalize: false
dtype: bfloat16
tokenizer_source: base
Settingsyaml
models:
  - model: Mistral-Small-22B-ArliAI-RPMax-v1.1
    parameters:
      weight: [0.2, 0.3, 0.2, 0.3, 0.2]
      density: [0.45, 0.55, 0.45, 0.55, 0.45]
  - model: Mistral-Small-NovusKyver
    parameters:
      weight: [0.01768, -0.01675, 0.01285, -0.01696, 0.01421]
      density: [0.6, 0.4, 0.5, 0.4, 0.6]
  - model: MiS-Firefly-v0.2-22B
    parameters:
      weight: [0.208, 0.139, 0.139, 0.139, 0.208]
      density: [0.7]
  - model: magnum-v4-22b
    parameters:
      weight: [0.33]
      density: [0.45, 0.55, 0.45, 0.55, 0.45]
merge_method: della_linear
base_model: Mistral-Small-22B-ArliAI-RPMax-v1.1
parameters:
  epsilon: 0.05
  lambda: 1.05
  int8_mask: true
  rescale: true
  normalize: false
dtype: bfloat16
tokenizer_source: base
Settingsyaml
models:
  - model: Mistral-Small-22B-ArliAI-RPMax-v1.1
    parameters:
      weight: [0.2, 0.3, 0.2, 0.3, 0.2]
      density: [0.45, 0.55, 0.45, 0.55, 0.45]
  - model: Mistral-Small-NovusKyver
    parameters:
      weight: [0.01768, -0.01675, 0.01285, -0.01696, 0.01421]
      density: [0.6, 0.4, 0.5, 0.4, 0.6]
  - model: MiS-Firefly-v0.2-22B
    parameters:
      weight: [0.208, 0.139, 0.139, 0.139, 0.208]
      density: [0.7]
  - model: magnum-v4-22b
    parameters:
      weight: [0.33]
      density: [0.45, 0.55, 0.45, 0.55, 0.45]
merge_method: della_linear
base_model: Mistral-Small-22B-ArliAI-RPMax-v1.1
parameters:
  epsilon: 0.05
  lambda: 1.05
  int8_mask: true
  rescale: true
  normalize: false
dtype: bfloat16
tokenizer_source: base
Settingsyaml
models:
  - model: Mistral-Small-22B-ArliAI-RPMax-v1.1
    parameters:
      weight: [0.2, 0.3, 0.2, 0.3, 0.2]
      density: [0.45, 0.55, 0.45, 0.55, 0.45]
  - model: Mistral-Small-NovusKyver
    parameters:
      weight: [0.01768, -0.01675, 0.01285, -0.01696, 0.01421]
      density: [0.6, 0.4, 0.5, 0.4, 0.6]
  - model: MiS-Firefly-v0.2-22B
    parameters:
      weight: [0.208, 0.139, 0.139, 0.139, 0.208]
      density: [0.7]
  - model: magnum-v4-22b
    parameters:
      weight: [0.33]
      density: [0.45, 0.55, 0.45, 0.55, 0.45]
merge_method: della_linear
base_model: Mistral-Small-22B-ArliAI-RPMax-v1.1
parameters:
  epsilon: 0.05
  lambda: 1.05
  int8_mask: true
  rescale: true
  normalize: false
dtype: bfloat16
tokenizer_source: base
Settingsyaml
models:
  - model: Mistral-Small-22B-ArliAI-RPMax-v1.1
    parameters:
      weight: [0.2, 0.3, 0.2, 0.3, 0.2]
      density: [0.45, 0.55, 0.45, 0.55, 0.45]
  - model: Mistral-Small-NovusKyver
    parameters:
      weight: [0.01768, -0.01675, 0.01285, -0.01696, 0.01421]
      density: [0.6, 0.4, 0.5, 0.4, 0.6]
  - model: MiS-Firefly-v0.2-22B
    parameters:
      weight: [0.208, 0.139, 0.139, 0.139, 0.208]
      density: [0.7]
  - model: magnum-v4-22b
    parameters:
      weight: [0.33]
      density: [0.45, 0.55, 0.45, 0.55, 0.45]
merge_method: della_linear
base_model: Mistral-Small-22B-ArliAI-RPMax-v1.1
parameters:
  epsilon: 0.05
  lambda: 1.05
  int8_mask: true
  rescale: true
  normalize: false
dtype: bfloat16
tokenizer_source: base
Settingsyaml
models:
  - model: Mistral-Small-22B-ArliAI-RPMax-v1.1
    parameters:
      weight: [0.2, 0.3, 0.2, 0.3, 0.2]
      density: [0.45, 0.55, 0.45, 0.55, 0.45]
  - model: Mistral-Small-NovusKyver
    parameters:
      weight: [0.01768, -0.01675, 0.01285, -0.01696, 0.01421]
      density: [0.6, 0.4, 0.5, 0.4, 0.6]
  - model: MiS-Firefly-v0.2-22B
    parameters:
      weight: [0.208, 0.139, 0.139, 0.139, 0.208]
      density: [0.7]
  - model: magnum-v4-22b
    parameters:
      weight: [0.33]
      density: [0.45, 0.55, 0.45, 0.55, 0.45]
merge_method: della_linear
base_model: Mistral-Small-22B-ArliAI-RPMax-v1.1
parameters:
  epsilon: 0.05
  lambda: 1.05
  int8_mask: true
  rescale: true
  normalize: false
dtype: bfloat16
tokenizer_source: base
SchisandraVA3yaml
merge_method: della_linear
dtype: bfloat16
parameters:
  normalize: true
  int8_mask: true
tokenizer_source: base
base_model: Cydonia-22B-v1.3
models:
    - model: Karasik03
      parameters:
        density: 0.55
        weight: 1
    - model: Pantheon-RP-Pure-1.6.2-22b-Small
      parameters:
        density: 0.55
        weight: 1
    - model: ChatWaifu_v2.0_22B
      parameters:
        density: 0.55
        weight: 1
    - model: MS-Meadowlark-Alt-22B
      parameters:
        density: 0.55
        weight: 1
    - model: SorcererLM-22B
      parameters:
        density: 0.55
        weight: 1
SchisandraVA3yaml
merge_method: della_linear
dtype: bfloat16
parameters:
  normalize: true
  int8_mask: true
tokenizer_source: base
base_model: Cydonia-22B-v1.3
models:
    - model: Karasik03
      parameters:
        density: 0.55
        weight: 1
    - model: Pantheon-RP-Pure-1.6.2-22b-Small
      parameters:
        density: 0.55
        weight: 1
    - model: ChatWaifu_v2.0_22B
      parameters:
        density: 0.55
        weight: 1
    - model: MS-Meadowlark-Alt-22B
      parameters:
        density: 0.55
        weight: 1
    - model: SorcererLM-22B
      parameters:
        density: 0.55
        weight: 1
SchisandraVA3yaml
merge_method: della_linear
dtype: bfloat16
parameters:
  normalize: true
  int8_mask: true
tokenizer_source: base
base_model: Cydonia-22B-v1.3
models:
    - model: Karasik03
      parameters:
        density: 0.55
        weight: 1
    - model: Pantheon-RP-Pure-1.6.2-22b-Small
      parameters:
        density: 0.55
        weight: 1
    - model: ChatWaifu_v2.0_22B
      parameters:
        density: 0.55
        weight: 1
    - model: MS-Meadowlark-Alt-22B
      parameters:
        density: 0.55
        weight: 1
    - model: SorcererLM-22B
      parameters:
        density: 0.55
        weight: 1
SchisandraVA3yaml
merge_method: della_linear
dtype: bfloat16
parameters:
  normalize: true
  int8_mask: true
tokenizer_source: base
base_model: Cydonia-22B-v1.3
models:
    - model: Karasik03
      parameters:
        density: 0.55
        weight: 1
    - model: Pantheon-RP-Pure-1.6.2-22b-Small
      parameters:
        density: 0.55
        weight: 1
    - model: ChatWaifu_v2.0_22B
      parameters:
        density: 0.55
        weight: 1
    - model: MS-Meadowlark-Alt-22B
      parameters:
        density: 0.55
        weight: 1
    - model: SorcererLM-22B
      parameters:
        density: 0.55
        weight: 1
SchisandraVA3yaml
merge_method: della_linear
dtype: bfloat16
parameters:
  normalize: true
  int8_mask: true
tokenizer_source: base
base_model: Cydonia-22B-v1.3
models:
    - model: Karasik03
      parameters:
        density: 0.55
        weight: 1
    - model: Pantheon-RP-Pure-1.6.2-22b-Small
      parameters:
        density: 0.55
        weight: 1
    - model: ChatWaifu_v2.0_22B
      parameters:
        density: 0.55
        weight: 1
    - model: MS-Meadowlark-Alt-22B
      parameters:
        density: 0.55
        weight: 1
    - model: SorcererLM-22B
      parameters:
        density: 0.55
        weight: 1
SchisandraVA3yaml
merge_method: della_linear
dtype: bfloat16
parameters:
  normalize: true
  int8_mask: true
tokenizer_source: base
base_model: Cydonia-22B-v1.3
models:
    - model: Karasik03
      parameters:
        density: 0.55
        weight: 1
    - model: Pantheon-RP-Pure-1.6.2-22b-Small
      parameters:
        density: 0.55
        weight: 1
    - model: ChatWaifu_v2.0_22B
      parameters:
        density: 0.55
        weight: 1
    - model: MS-Meadowlark-Alt-22B
      parameters:
        density: 0.55
        weight: 1
    - model: SorcererLM-22B
      parameters:
        density: 0.55
        weight: 1
SchisandraVA3yaml
merge_method: della_linear
dtype: bfloat16
parameters:
  normalize: true
  int8_mask: true
tokenizer_source: base
base_model: Cydonia-22B-v1.3
models:
    - model: Karasik03
      parameters:
        density: 0.55
        weight: 1
    - model: Pantheon-RP-Pure-1.6.2-22b-Small
      parameters:
        density: 0.55
        weight: 1
    - model: ChatWaifu_v2.0_22B
      parameters:
        density: 0.55
        weight: 1
    - model: MS-Meadowlark-Alt-22B
      parameters:
        density: 0.55
        weight: 1
    - model: SorcererLM-22B
      parameters:
        density: 0.55
        weight: 1
SchisandraVA3yaml
merge_method: della_linear
dtype: bfloat16
parameters:
  normalize: true
  int8_mask: true
tokenizer_source: base
base_model: Cydonia-22B-v1.3
models:
    - model: Karasik03
      parameters:
        density: 0.55
        weight: 1
    - model: Pantheon-RP-Pure-1.6.2-22b-Small
      parameters:
        density: 0.55
        weight: 1
    - model: ChatWaifu_v2.0_22B
      parameters:
        density: 0.55
        weight: 1
    - model: MS-Meadowlark-Alt-22B
      parameters:
        density: 0.55
        weight: 1
    - model: SorcererLM-22B
      parameters:
        density: 0.55
        weight: 1
SchisandraVA3yaml
merge_method: della_linear
dtype: bfloat16
parameters:
  normalize: true
  int8_mask: true
tokenizer_source: base
base_model: Cydonia-22B-v1.3
models:
    - model: Karasik03
      parameters:
        density: 0.55
        weight: 1
    - model: Pantheon-RP-Pure-1.6.2-22b-Small
      parameters:
        density: 0.55
        weight: 1
    - model: ChatWaifu_v2.0_22B
      parameters:
        density: 0.55
        weight: 1
    - model: MS-Meadowlark-Alt-22B
      parameters:
        density: 0.55
        weight: 1
    - model: SorcererLM-22B
      parameters:
        density: 0.55
        weight: 1
SchisandraVA3yaml
merge_method: della_linear
dtype: bfloat16
parameters:
  normalize: true
  int8_mask: true
tokenizer_source: base
base_model: Cydonia-22B-v1.3
models:
    - model: Karasik03
      parameters:
        density: 0.55
        weight: 1
    - model: Pantheon-RP-Pure-1.6.2-22b-Small
      parameters:
        density: 0.55
        weight: 1
    - model: ChatWaifu_v2.0_22B
      parameters:
        density: 0.55
        weight: 1
    - model: MS-Meadowlark-Alt-22B
      parameters:
        density: 0.55
        weight: 1
    - model: SorcererLM-22B
      parameters:
        density: 0.55
        weight: 1
SchisandraVA3yaml
merge_method: della_linear
dtype: bfloat16
parameters:
  normalize: true
  int8_mask: true
tokenizer_source: base
base_model: Cydonia-22B-v1.3
models:
    - model: Karasik03
      parameters:
        density: 0.55
        weight: 1
    - model: Pantheon-RP-Pure-1.6.2-22b-Small
      parameters:
        density: 0.55
        weight: 1
    - model: ChatWaifu_v2.0_22B
      parameters:
        density: 0.55
        weight: 1
    - model: MS-Meadowlark-Alt-22B
      parameters:
        density: 0.55
        weight: 1
    - model: SorcererLM-22B
      parameters:
        density: 0.55
        weight: 1
SchisandraVA3yaml
merge_method: della_linear
dtype: bfloat16
parameters:
  normalize: true
  int8_mask: true
tokenizer_source: base
base_model: Cydonia-22B-v1.3
models:
    - model: Karasik03
      parameters:
        density: 0.55
        weight: 1
    - model: Pantheon-RP-Pure-1.6.2-22b-Small
      parameters:
        density: 0.55
        weight: 1
    - model: ChatWaifu_v2.0_22B
      parameters:
        density: 0.55
        weight: 1
    - model: MS-Meadowlark-Alt-22B
      parameters:
        density: 0.55
        weight: 1
    - model: SorcererLM-22B
      parameters:
        density: 0.55
        weight: 1
SchisandraVA3yaml
merge_method: della_linear
dtype: bfloat16
parameters:
  normalize: true
  int8_mask: true
tokenizer_source: base
base_model: Cydonia-22B-v1.3
models:
    - model: Karasik03
      parameters:
        density: 0.55
        weight: 1
    - model: Pantheon-RP-Pure-1.6.2-22b-Small
      parameters:
        density: 0.55
        weight: 1
    - model: ChatWaifu_v2.0_22B
      parameters:
        density: 0.55
        weight: 1
    - model: MS-Meadowlark-Alt-22B
      parameters:
        density: 0.55
        weight: 1
    - model: SorcererLM-22B
      parameters:
        density: 0.55
        weight: 1
SchisandraVA3yaml
merge_method: della_linear
dtype: bfloat16
parameters:
  normalize: true
  int8_mask: true
tokenizer_source: base
base_model: Cydonia-22B-v1.3
models:
    - model: Karasik03
      parameters:
        density: 0.55
        weight: 1
    - model: Pantheon-RP-Pure-1.6.2-22b-Small
      parameters:
        density: 0.55
        weight: 1
    - model: ChatWaifu_v2.0_22B
      parameters:
        density: 0.55
        weight: 1
    - model: MS-Meadowlark-Alt-22B
      parameters:
        density: 0.55
        weight: 1
    - model: SorcererLM-22B
      parameters:
        density: 0.55
        weight: 1
SchisandraVA3yaml
merge_method: della_linear
dtype: bfloat16
parameters:
  normalize: true
  int8_mask: true
tokenizer_source: base
base_model: Cydonia-22B-v1.3
models:
    - model: Karasik03
      parameters:
        density: 0.55
        weight: 1
    - model: Pantheon-RP-Pure-1.6.2-22b-Small
      parameters:
        density: 0.55
        weight: 1
    - model: ChatWaifu_v2.0_22B
      parameters:
        density: 0.55
        weight: 1
    - model: MS-Meadowlark-Alt-22B
      parameters:
        density: 0.55
        weight: 1
    - model: SorcererLM-22B
      parameters:
        density: 0.55
        weight: 1
Schisandra-v0.3yaml
dtype: bfloat16
tokenizer_source: base
merge_method: della_linear
parameters:
  density: 0.5
base_model: SchisandraVA3
models:
  - model: unsloth/Mistral-Small-Instruct-2409
    parameters:
      weight:
        - filter: v_proj
          value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
        - filter: o_proj
          value: [1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1]
        - filter: up_proj
          value: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        - filter: gate_proj
          value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
        - filter: down_proj
          value: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
        - value: 0
  - model: SchisandraVA3
    parameters:
      weight:
        - filter: v_proj
          value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
        - filter: o_proj
          value: [0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0]
        - filter: up_proj
          value: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
        - filter: gate_proj
          value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
        - filter: down_proj
          value: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        - value: 1
Schisandra-v0.3yaml
dtype: bfloat16
tokenizer_source: base
merge_method: della_linear
parameters:
  density: 0.5
base_model: SchisandraVA3
models:
  - model: unsloth/Mistral-Small-Instruct-2409
    parameters:
      weight:
        - filter: v_proj
          value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
        - filter: o_proj
          value: [1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1]
        - filter: up_proj
          value: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        - filter: gate_proj
          value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
        - filter: down_proj
          value: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
        - value: 0
  - model: SchisandraVA3
    parameters:
      weight:
        - filter: v_proj
          value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
        - filter: o_proj
          value: [0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0]
        - filter: up_proj
          value: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
        - filter: gate_proj
          value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
        - filter: down_proj
          value: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        - value: 1
Schisandra-v0.3yaml
dtype: bfloat16
tokenizer_source: base
merge_method: della_linear
parameters:
  density: 0.5
base_model: SchisandraVA3
models:
  - model: unsloth/Mistral-Small-Instruct-2409
    parameters:
      weight:
        - filter: v_proj
          value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
        - filter: o_proj
          value: [1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1]
        - filter: up_proj
          value: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        - filter: gate_proj
          value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
        - filter: down_proj
          value: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
        - value: 0
  - model: SchisandraVA3
    parameters:
      weight:
        - filter: v_proj
          value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
        - filter: o_proj
          value: [0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0]
        - filter: up_proj
          value: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
        - filter: gate_proj
          value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
        - filter: down_proj
          value: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        - value: 1
Schisandra-v0.3yaml
dtype: bfloat16
tokenizer_source: base
merge_method: della_linear
parameters:
  density: 0.5
base_model: SchisandraVA3
models:
  - model: unsloth/Mistral-Small-Instruct-2409
    parameters:
      weight:
        - filter: v_proj
          value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
        - filter: o_proj
          value: [1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1]
        - filter: up_proj
          value: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        - filter: gate_proj
          value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
        - filter: down_proj
          value: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
        - value: 0
  - model: SchisandraVA3
    parameters:
      weight:
        - filter: v_proj
          value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
        - filter: o_proj
          value: [0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0]
        - filter: up_proj
          value: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
        - filter: gate_proj
          value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
        - filter: down_proj
          value: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        - value: 1
Schisandra-v0.3yaml
dtype: bfloat16
tokenizer_source: base
merge_method: della_linear
parameters:
  density: 0.5
base_model: SchisandraVA3
models:
  - model: unsloth/Mistral-Small-Instruct-2409
    parameters:
      weight:
        - filter: v_proj
          value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
        - filter: o_proj
          value: [1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1]
        - filter: up_proj
          value: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        - filter: gate_proj
          value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
        - filter: down_proj
          value: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
        - value: 0
  - model: SchisandraVA3
    parameters:
      weight:
        - filter: v_proj
          value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
        - filter: o_proj
          value: [0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0]
        - filter: up_proj
          value: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
        - filter: gate_proj
          value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
        - filter: down_proj
          value: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        - value: 1
Schisandra-v0.3yaml
dtype: bfloat16
tokenizer_source: base
merge_method: della_linear
parameters:
  density: 0.5
base_model: SchisandraVA3
models:
  - model: unsloth/Mistral-Small-Instruct-2409
    parameters:
      weight:
        - filter: v_proj
          value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
        - filter: o_proj
          value: [1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1]
        - filter: up_proj
          value: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        - filter: gate_proj
          value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
        - filter: down_proj
          value: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
        - value: 0
  - model: SchisandraVA3
    parameters:
      weight:
        - filter: v_proj
          value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
        - filter: o_proj
          value: [0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0]
        - filter: up_proj
          value: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
        - filter: gate_proj
          value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
        - filter: down_proj
          value: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        - value: 1
Schisandra-v0.3yaml
dtype: bfloat16
tokenizer_source: base
merge_method: della_linear
parameters:
  density: 0.5
base_model: SchisandraVA3
models:
  - model: unsloth/Mistral-Small-Instruct-2409
    parameters:
      weight:
        - filter: v_proj
          value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
        - filter: o_proj
          value: [1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1]
        - filter: up_proj
          value: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        - filter: gate_proj
          value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
        - filter: down_proj
          value: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
        - value: 0
  - model: SchisandraVA3
    parameters:
      weight:
        - filter: v_proj
          value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
        - filter: o_proj
          value: [0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0]
        - filter: up_proj
          value: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
        - filter: gate_proj
          value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
        - filter: down_proj
          value: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        - value: 1
Schisandra-v0.3yaml
dtype: bfloat16
tokenizer_source: base
merge_method: della_linear
parameters:
  density: 0.5
base_model: SchisandraVA3
models:
  - model: unsloth/Mistral-Small-Instruct-2409
    parameters:
      weight:
        - filter: v_proj
          value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
        - filter: o_proj
          value: [1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1]
        - filter: up_proj
          value: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        - filter: gate_proj
          value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
        - filter: down_proj
          value: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
        - value: 0
  - model: SchisandraVA3
    parameters:
      weight:
        - filter: v_proj
          value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
        - filter: o_proj
          value: [0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0]
        - filter: up_proj
          value: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
        - filter: gate_proj
          value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
        - filter: down_proj
          value: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        - value: 1
Schisandra-v0.3yaml
dtype: bfloat16
tokenizer_source: base
merge_method: della_linear
parameters:
  density: 0.5
base_model: SchisandraVA3
models:
  - model: unsloth/Mistral-Small-Instruct-2409
    parameters:
      weight:
        - filter: v_proj
          value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
        - filter: o_proj
          value: [1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1]
        - filter: up_proj
          value: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        - filter: gate_proj
          value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
        - filter: down_proj
          value: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
        - value: 0
  - model: SchisandraVA3
    parameters:
      weight:
        - filter: v_proj
          value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
        - filter: o_proj
          value: [0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0]
        - filter: up_proj
          value: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
        - filter: gate_proj
          value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
        - filter: down_proj
          value: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        - value: 1
Schisandra-v0.3yaml
dtype: bfloat16
tokenizer_source: base
merge_method: della_linear
parameters:
  density: 0.5
base_model: SchisandraVA3
models:
  - model: unsloth/Mistral-Small-Instruct-2409
    parameters:
      weight:
        - filter: v_proj
          value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
        - filter: o_proj
          value: [1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1]
        - filter: up_proj
          value: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        - filter: gate_proj
          value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
        - filter: down_proj
          value: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
        - value: 0
  - model: SchisandraVA3
    parameters:
      weight:
        - filter: v_proj
          value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
        - filter: o_proj
          value: [0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0]
        - filter: up_proj
          value: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
        - filter: gate_proj
          value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
        - filter: down_proj
          value: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        - value: 1
Schisandra-v0.3yaml
dtype: bfloat16
tokenizer_source: base
merge_method: della_linear
parameters:
  density: 0.5
base_model: SchisandraVA3
models:
  - model: unsloth/Mistral-Small-Instruct-2409
    parameters:
      weight:
        - filter: v_proj
          value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
        - filter: o_proj
          value: [1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1]
        - filter: up_proj
          value: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        - filter: gate_proj
          value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
        - filter: down_proj
          value: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
        - value: 0
  - model: SchisandraVA3
    parameters:
      weight:
        - filter: v_proj
          value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
        - filter: o_proj
          value: [0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0]
        - filter: up_proj
          value: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
        - filter: gate_proj
          value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
        - filter: down_proj
          value: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        - value: 1
Schisandra-v0.3yaml
dtype: bfloat16
tokenizer_source: base
merge_method: della_linear
parameters:
  density: 0.5
base_model: SchisandraVA3
models:
  - model: unsloth/Mistral-Small-Instruct-2409
    parameters:
      weight:
        - filter: v_proj
          value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
        - filter: o_proj
          value: [1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1]
        - filter: up_proj
          value: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        - filter: gate_proj
          value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
        - filter: down_proj
          value: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
        - value: 0
  - model: SchisandraVA3
    parameters:
      weight:
        - filter: v_proj
          value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
        - filter: o_proj
          value: [0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0]
        - filter: up_proj
          value: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
        - filter: gate_proj
          value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
        - filter: down_proj
          value: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        - value: 1
Schisandra-v0.3yaml
dtype: bfloat16
tokenizer_source: base
merge_method: della_linear
parameters:
  density: 0.5
base_model: SchisandraVA3
models:
  - model: unsloth/Mistral-Small-Instruct-2409
    parameters:
      weight:
        - filter: v_proj
          value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
        - filter: o_proj
          value: [1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1]
        - filter: up_proj
          value: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        - filter: gate_proj
          value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
        - filter: down_proj
          value: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
        - value: 0
  - model: SchisandraVA3
    parameters:
      weight:
        - filter: v_proj
          value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
        - filter: o_proj
          value: [0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0]
        - filter: up_proj
          value: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
        - filter: gate_proj
          value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
        - filter: down_proj
          value: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        - value: 1
Schisandra-v0.3yaml
dtype: bfloat16
tokenizer_source: base
merge_method: della_linear
parameters:
  density: 0.5
base_model: SchisandraVA3
models:
  - model: unsloth/Mistral-Small-Instruct-2409
    parameters:
      weight:
        - filter: v_proj
          value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
        - filter: o_proj
          value: [1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1]
        - filter: up_proj
          value: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        - filter: gate_proj
          value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
        - filter: down_proj
          value: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
        - value: 0
  - model: SchisandraVA3
    parameters:
      weight:
        - filter: v_proj
          value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
        - filter: o_proj
          value: [0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0]
        - filter: up_proj
          value: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
        - filter: gate_proj
          value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
        - filter: down_proj
          value: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        - value: 1
Schisandra-v0.3yaml
dtype: bfloat16
tokenizer_source: base
merge_method: della_linear
parameters:
  density: 0.5
base_model: SchisandraVA3
models:
  - model: unsloth/Mistral-Small-Instruct-2409
    parameters:
      weight:
        - filter: v_proj
          value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
        - filter: o_proj
          value: [1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1]
        - filter: up_proj
          value: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        - filter: gate_proj
          value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
        - filter: down_proj
          value: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
        - value: 0
  - model: SchisandraVA3
    parameters:
      weight:
        - filter: v_proj
          value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
        - filter: o_proj
          value: [0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0]
        - filter: up_proj
          value: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
        - filter: gate_proj
          value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
        - filter: down_proj
          value: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        - value: 1

Deploy This Model

Production-ready deployment in minutes

Together.ai

Instant API access to this model

Fastest API

Production-ready inference API. Start free, scale to millions.

Try Free API

Replicate

One-click model deployment

Easiest Setup

Run models in the cloud with simple API. No DevOps required.

Deploy Now

Disclosure: We may earn a commission from these partners. This helps keep LLMYourWay free.