|
48 | 48 | "path": "gemma",
|
49 | 49 | "model_card": "https://www.kaggle.com/models/google/gemma",
|
50 | 50 | },
|
51 |
| - "kaggle_handle": "kaggle://keras/gemma/keras/gemma_1.1_instruct_2b_en/1", |
| 51 | + "kaggle_handle": "kaggle://keras/gemma/keras/gemma_1.1_instruct_2b_en/3", |
| 52 | + }, |
| 53 | + "code_gemma_2b_en": { |
| 54 | + "metadata": { |
| 55 | + "description": ( |
| 56 | + "2 billion parameter, 18-layer, CodeGemma model. This model " |
| 57 | + "has been trained on a fill-in-the-middle (FIM) task for code " |
| 58 | + "completion." |
| 59 | + ), |
| 60 | + "params": 2506172416, |
| 61 | + "official_name": "Gemma", |
| 62 | + "path": "gemma", |
| 63 | + "model_card": "https://www.kaggle.com/models/google/gemma", |
| 64 | + }, |
| 65 | + "kaggle_handle": "kaggle://keras/codegemma/keras/code_gemma_2b_en/1", |
52 | 66 | },
|
53 | 67 | "gemma_7b_en": {
|
54 | 68 | "metadata": {
|
|
83 | 97 | "path": "gemma",
|
84 | 98 | "model_card": "https://www.kaggle.com/models/google/gemma",
|
85 | 99 | },
|
86 |
| - "kaggle_handle": "kaggle://keras/gemma/keras/gemma_1.1_instruct_7b_en/1", |
| 100 | + "kaggle_handle": "kaggle://keras/gemma/keras/gemma_1.1_instruct_7b_en/3", |
| 101 | + }, |
| 102 | + "code_gemma_7b_en": { |
| 103 | + "metadata": { |
| 104 | + "description": ( |
| 105 | + "7 billion parameter, 28-layer, CodeGemma model. This model " |
| 106 | + "has been trained on a fill-in-the-middle (FIM) task for code " |
| 107 | + "completion." |
| 108 | + ), |
| 109 | + "params": 8537680896, |
| 110 | + "official_name": "Gemma", |
| 111 | + "path": "gemma", |
| 112 | + "model_card": "https://www.kaggle.com/models/google/gemma", |
| 113 | + }, |
| 114 | + "kaggle_handle": "kaggle://keras/codegemma/keras/code_gemma_7b_en/1", |
| 115 | + }, |
| 116 | + "code_gemma_instruct_7b_en": { |
| 117 | + "metadata": { |
| 118 | + "description": ( |
| 119 | + "7 billion parameter, 28-layer, instruction tuned CodeGemma " |
| 120 | + "model. This model has been trained for chat use cases related " |
| 121 | + "to code." |
| 122 | + ), |
| 123 | + "params": 8537680896, |
| 124 | + "official_name": "Gemma", |
| 125 | + "path": "gemma", |
| 126 | + "model_card": "https://www.kaggle.com/models/google/gemma", |
| 127 | + }, |
| 128 | + "kaggle_handle": "kaggle://keras/codegemma/keras/code_gemma_instruct_7b_en/1", |
87 | 129 | },
|
88 | 130 | }
|
0 commit comments