feat(mistral): add Mistral model implementation and configs

- implement Mistral model in llm/models/mistral/mistral.py with GroupedQueryAttention, SwiGLU, RoPE, sliding window attention
- add __init__.py for module export
- add config files for mistral training and generation
- update universal experiment runner to support Mistral model
- add notebook for Mistral experiments
This commit is contained in:
Sergey Penkovsky
2025-10-14 14:53:45 +03:00
parent e5706a690d
commit ec0d2bd8d0
6 changed files with 3904 additions and 0 deletions

View File

@@ -0,0 +1,19 @@
{
"bpe_tokenizer": "checkpoints/bpe_tokenizer.json",
"test_prompts": [
"Open weights",
"The Llama model is",
"Efficient transformers"
],
"model_config_path": "checkpoints/mistral-bpe/config.json",
"model_weights": "checkpoints/mistral-bpe/model.pt",
"generation": {
"max_new_tokens": 40,
"temperature": 0.8,
"do_sample": true,
"top_k": null,
"top_p": null
},
"log_path": "checkpoints/mistral_only_generation_logs.json"
}