| Файл |
Скачать |
| .gitattributes |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct .gitattributes --local-dir meta-llama
|
| LICENSE |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct LICENSE --local-dir meta-llama
|
| README.md |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct README.md --local-dir meta-llama
|
| USE_POLICY.md |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct USE_POLICY.md --local-dir meta-llama
|
| chat_template.jinja |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct chat_template.jinja --local-dir meta-llama
|
| config.json |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct config.json --local-dir meta-llama
|
| generation_config.json |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct generation_config.json --local-dir meta-llama
|
| model-00001-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00001-of-00050.safetensors --local-dir meta-llama
|
| model-00002-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00002-of-00050.safetensors --local-dir meta-llama
|
| model-00003-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00003-of-00050.safetensors --local-dir meta-llama
|
| model-00004-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00004-of-00050.safetensors --local-dir meta-llama
|
| model-00005-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00005-of-00050.safetensors --local-dir meta-llama
|
| model-00006-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00006-of-00050.safetensors --local-dir meta-llama
|
| model-00007-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00007-of-00050.safetensors --local-dir meta-llama
|
| model-00008-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00008-of-00050.safetensors --local-dir meta-llama
|
| model-00009-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00009-of-00050.safetensors --local-dir meta-llama
|
| model-00010-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00010-of-00050.safetensors --local-dir meta-llama
|
| model-00011-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00011-of-00050.safetensors --local-dir meta-llama
|
| model-00012-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00012-of-00050.safetensors --local-dir meta-llama
|
| model-00013-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00013-of-00050.safetensors --local-dir meta-llama
|
| model-00014-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00014-of-00050.safetensors --local-dir meta-llama
|
| model-00015-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00015-of-00050.safetensors --local-dir meta-llama
|
| model-00016-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00016-of-00050.safetensors --local-dir meta-llama
|
| model-00017-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00017-of-00050.safetensors --local-dir meta-llama
|
| model-00018-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00018-of-00050.safetensors --local-dir meta-llama
|
| model-00019-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00019-of-00050.safetensors --local-dir meta-llama
|
| model-00020-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00020-of-00050.safetensors --local-dir meta-llama
|
| model-00021-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00021-of-00050.safetensors --local-dir meta-llama
|
| model-00022-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00022-of-00050.safetensors --local-dir meta-llama
|
| model-00023-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00023-of-00050.safetensors --local-dir meta-llama
|
| model-00024-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00024-of-00050.safetensors --local-dir meta-llama
|
| model-00025-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00025-of-00050.safetensors --local-dir meta-llama
|
| model-00026-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00026-of-00050.safetensors --local-dir meta-llama
|
| model-00027-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00027-of-00050.safetensors --local-dir meta-llama
|
| model-00028-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00028-of-00050.safetensors --local-dir meta-llama
|
| model-00029-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00029-of-00050.safetensors --local-dir meta-llama
|
| model-00030-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00030-of-00050.safetensors --local-dir meta-llama
|
| model-00031-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00031-of-00050.safetensors --local-dir meta-llama
|
| model-00032-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00032-of-00050.safetensors --local-dir meta-llama
|
| model-00033-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00033-of-00050.safetensors --local-dir meta-llama
|
| model-00034-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00034-of-00050.safetensors --local-dir meta-llama
|
| model-00035-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00035-of-00050.safetensors --local-dir meta-llama
|
| model-00036-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00036-of-00050.safetensors --local-dir meta-llama
|
| model-00037-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00037-of-00050.safetensors --local-dir meta-llama
|
| model-00038-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00038-of-00050.safetensors --local-dir meta-llama
|
| model-00039-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00039-of-00050.safetensors --local-dir meta-llama
|
| model-00040-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00040-of-00050.safetensors --local-dir meta-llama
|
| model-00041-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00041-of-00050.safetensors --local-dir meta-llama
|
| model-00042-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00042-of-00050.safetensors --local-dir meta-llama
|
| model-00043-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00043-of-00050.safetensors --local-dir meta-llama
|
| model-00044-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00044-of-00050.safetensors --local-dir meta-llama
|
| model-00045-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00045-of-00050.safetensors --local-dir meta-llama
|
| model-00046-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00046-of-00050.safetensors --local-dir meta-llama
|
| model-00047-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00047-of-00050.safetensors --local-dir meta-llama
|
| model-00048-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00048-of-00050.safetensors --local-dir meta-llama
|
| model-00049-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00049-of-00050.safetensors --local-dir meta-llama
|
| model-00050-of-00050.safetensors |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model-00050-of-00050.safetensors --local-dir meta-llama
|
| model.safetensors.index.json |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct model.safetensors.index.json --local-dir meta-llama
|
| preprocessor_config.json |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct preprocessor_config.json --local-dir meta-llama
|
| processor_config.json |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct processor_config.json --local-dir meta-llama
|
| special_tokens_map.json |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct special_tokens_map.json --local-dir meta-llama
|
| tokenizer.json |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct tokenizer.json --local-dir meta-llama
|
| tokenizer.model |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct tokenizer.model --local-dir meta-llama
|
| tokenizer_config.json |
export HF_ENDPOINT=https://hf.aihoster.ru
huggingface-cli download meta-llama/Llama-4-Scout-17B-16E-Instruct tokenizer_config.json --local-dir meta-llama
|