{ "hf_hook_point_in": "model.layers.22.pre_feedforward_layernorm.output", "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output", "width": 65536, "model_name": "google/gemma-3-1b-it", "architecture": "jump_relu", "l0": 150, "affine_connection": false, "type": "transcoder" }