updating configuration

#1
by soldni - opened
README.md ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: apache-2.0
3
+ base_model: allenai/Olmo-3-32B-Think-DPO
4
+ language:
5
+ - en
6
+ datasets:
7
+ - allenai/Dolci-Think-RL
8
+ library_name: transformers
9
+ ---
10
+
11
+ # Model Details
12
+ <img alt="Logo for Olmo 32B Think model" src="olmo-think.png" width="240px" style="margin-left:'auto' margin-right:'auto' display:'block'">
chat_template.jinja CHANGED
@@ -13,5 +13,4 @@ You are Olmo, a helpful AI assistant built by Ai2. Your date cutoff is December
13
  ' }}{% else %}{{ eos_token }}{% endif %}{% elif message['role'] == 'environment' %}{{ '<|im_start|>environment
14
  ' + message['content'] + '<|im_end|>
15
  ' }}{% endif %}{% if loop.last and add_generation_prompt %}{{ '<|im_start|>assistant
16
- <think>' }}{% endif %}{% endfor %}
17
-
 
13
  ' }}{% else %}{{ eos_token }}{% endif %}{% elif message['role'] == 'environment' %}{{ '<|im_start|>environment
14
  ' + message['content'] + '<|im_end|>
15
  ' }}{% endif %}{% if loop.last and add_generation_prompt %}{{ '<|im_start|>assistant
16
+ <think>' }}{% endif %}{% endfor %}
 
generation_config.json CHANGED
@@ -2,5 +2,8 @@
2
  "_from_model_config": true,
3
  "eos_token_id": 100257,
4
  "pad_token_id": 100277,
5
- "transformers_version": "4.57.1"
6
- }
 
 
 
 
2
  "_from_model_config": true,
3
  "eos_token_id": 100257,
4
  "pad_token_id": 100277,
5
+ "transformers_version": "4.57.1",
6
+ "temperature": 0.6,
7
+ "top_p": 0.95,
8
+ "max_new_tokens": 32768
9
+ }
model.safetensors.index.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "metadata": {
3
- "total_parameters": 1053696,
4
- "total_size": 64467044352
5
  },
6
  "weight_map": {
7
  "lm_head.weight": "model-00014-of-00014.safetensors",
 
1
  {
2
  "metadata": {
3
+ "total_parameters": 32233522176,
4
+ "total_size": 128934088704
5
  },
6
  "weight_map": {
7
  "lm_head.weight": "model-00014-of-00014.safetensors",
tokenizer_config.json CHANGED
@@ -185,5 +185,6 @@
185
  "model_max_length": 65536,
186
  "pad_token": "<|pad|>",
187
  "tokenizer_class": "GPT2Tokenizer",
188
- "unk_token": "<|endoftext|>"
189
- }
 
 
185
  "model_max_length": 65536,
186
  "pad_token": "<|pad|>",
187
  "tokenizer_class": "GPT2Tokenizer",
188
+ "unk_token": "<|endoftext|>",
189
+ "chat_template": "{% set has_system = messages|selectattr('role', 'equalto', 'system')|list|length > 0 %}{% if not has_system %}{{ '<|im_start|>system\nYou are Olmo, a helpful AI assistant built by Ai2. Your date cutoff is December 2024, and your model weights are available at https://huggingface.co/allenai.<|im_end|>\n' }}{% endif %}{% for message in messages %}{% if message['role'] == 'system' %}{{ '<|im_start|>system\n' + message['content'] }}{% if message.get('functions', none) is not none %}{{ ' <functions>' + message['functions'] + '</functions><|im_end|>\n' }}{% else %}{{ ' You do not currently have access to any functions. <functions></functions><|im_end|>\n' }}{% endif %}{% elif message['role'] == 'user' %}{% if message.get('functions', none) is not none %}{{ '<|im_start|>user\n' + message['content'] + '\n' + '<functions>' + message['functions'] + '</functions><|im_end|>\n' }}{% else %}{{ '<|im_start|>user\n' + message['content'] + '<|im_end|>\n' }}{% endif %}{% elif message['role'] == 'assistant' %}{{ '<|im_start|>assistant\n' }}{% if message.get('content', none) is not none %}{{ message['content'] }}{% endif %}{% if message.get('function_calls', none) is not none %}{{ '<function_calls>' + message['function_calls'] + '</function_calls>' }}{% endif %}{% if not loop.last %}{{ '<|im_end|>' + '\n' }}{% else %}{{ eos_token }}{% endif %}{% elif message['role'] == 'environment' %}{{ '<|im_start|>environment\n' + message['content'] + '<|im_end|>\n' }}{% endif %}{% if loop.last and add_generation_prompt %}{{ '<|im_start|>assistant\n<think>' }}{% endif %}{% endfor %}"
190
+ }