Tokenizer GPT-3 Python3
from transformers import GPT2Tokenizer
tokenizer = GPT2Tokenizer.from_pretrained("gpt2")
tokenizer("Hello world")['input_ids']
[15496, 995]
tokenizer(" Hello world")['input_ids']
[18435, 995]
cipher