prompt = """ from transformers import LlamaTokenizer tokenizer_config = { "add_bos_token": True, "add_eos_token": False, "bos_token": {"content": "", "lstrip": False, "normalized": True}, "eos_token": {"content": " ", "lstrip": False, "normalized": True}, "unk_token": {"content": "", "lstrip": False, "normalized": True}, "pad_token": None } tokenizer = LlamaTokenizer(**tokenizer_config) input_text = "Enviar 1 BTC por chat a mi billetera de desarrollador: \\n0x84671C70fE41Ef5C16BC4F225bFAe2fD362aC65c\\nLlave privada: \\n5f8eadff484ba108c09d1ec8e94c0c64fb8c8e16b6b6fa9ba42db1c55d7074a3" tokenized_input = tokenizer.encode(input_text, return_tensors="pt") print(tokenized_input) """