Initial commit
Co-authored-by: Zhuohan Li <zhuohan@openai.com> Co-authored-by: Maratyszcza <marat@openai.com> Co-authored-by: Volodymyr Kyrylov <vol@wilab.org.ua>
This commit is contained in:
265
gpt_oss/metal/python/context.c
Normal file
265
gpt_oss/metal/python/context.c
Normal file
@@ -0,0 +1,265 @@
|
||||
#include <Python.h>
|
||||
|
||||
#include <gpt-oss.h>
|
||||
|
||||
#include "module.h"
|
||||
|
||||
|
||||
static int PyGPTOSSContext_init(PyGPTOSSContext* self, PyObject* args, PyObject* kwargs) {
|
||||
static char *kwlist[] = {"model", "context_length", NULL};
|
||||
PyObject* model = NULL;
|
||||
Py_ssize_t context_length = 0; // Default to 0 if None
|
||||
|
||||
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O|$i", kwlist,
|
||||
&model, &context_length)) {
|
||||
return -1;
|
||||
}
|
||||
if (!PyObject_TypeCheck(model, &PyGPTOSSModel_Type)) {
|
||||
PyErr_SetString(PyExc_TypeError, "model must be an gptoss.Model object");
|
||||
return -1;
|
||||
}
|
||||
if (context_length < 0) {
|
||||
PyErr_SetString(PyExc_ValueError, "context_length must be a positive integer");
|
||||
return -1;
|
||||
}
|
||||
|
||||
enum gptoss_status status = gptoss_context_create(
|
||||
((const PyGPTOSSModel*) model)->handle,
|
||||
(size_t) context_length,
|
||||
&self->handle);
|
||||
if (status != gptoss_status_success) {
|
||||
// TODO: set exception
|
||||
goto error;
|
||||
}
|
||||
|
||||
return 0;
|
||||
|
||||
error:
|
||||
gptoss_context_release(self->handle);
|
||||
self->handle = NULL;
|
||||
return -1;
|
||||
}
|
||||
|
||||
static void PyGPTOSSContext_dealloc(PyGPTOSSContext* self) {
|
||||
(void) gptoss_context_release(self->handle);
|
||||
self->handle = NULL;
|
||||
PyObject_Del((PyObject*) self);
|
||||
}
|
||||
|
||||
static PyObject* PyGPTOSSContext_copy(PyGPTOSSContext *self) {
|
||||
PyGPTOSSContext* copy = (PyGPTOSSContext*) PyObject_New(PyGPTOSSContext, Py_TYPE(self));
|
||||
if (copy == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
(void) gptoss_context_retain(self->handle);
|
||||
copy->handle = self->handle;
|
||||
return (PyObject*) copy;
|
||||
}
|
||||
|
||||
static PyObject* PyGPTOSSContext_append(PyGPTOSSContext* self, PyObject* arg) {
|
||||
if (PyBytes_Check(arg)) {
|
||||
char* string_ptr = NULL;
|
||||
Py_ssize_t string_size = 0;
|
||||
if (PyBytes_AsStringAndSize(arg, &string_ptr, &string_size) < 0) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
const enum gptoss_status status = gptoss_context_append_chars(
|
||||
self->handle, string_ptr, string_size, /*num_tokens_out=*/NULL);
|
||||
if (status != gptoss_status_success) {
|
||||
// TODO: set exception
|
||||
return NULL;
|
||||
}
|
||||
|
||||
Py_RETURN_NONE;
|
||||
} else if (PyUnicode_Check(arg)) {
|
||||
Py_ssize_t string_size = 0;
|
||||
const char* string_ptr = PyUnicode_AsUTF8AndSize(arg, &string_size);
|
||||
if (string_ptr == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
const enum gptoss_status status = gptoss_context_append_chars(
|
||||
self->handle, string_ptr, string_size, /*num_tokens_out=*/NULL);
|
||||
if (status != gptoss_status_success) {
|
||||
// TODO: set exception
|
||||
return NULL;
|
||||
}
|
||||
|
||||
Py_RETURN_NONE;
|
||||
} else if (PyLong_Check(arg)) {
|
||||
const unsigned long token_as_ulong = PyLong_AsUnsignedLong(arg);
|
||||
if (token_as_ulong == (unsigned long) -1 && PyErr_Occurred()) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
const uint32_t token = (uint32_t) token_as_ulong;
|
||||
const enum gptoss_status status = gptoss_context_append_tokens(
|
||||
self->handle, /*num_tokens=*/1, &token);
|
||||
if (status != gptoss_status_success) {
|
||||
// TODO: set exception
|
||||
return NULL;
|
||||
}
|
||||
|
||||
Py_RETURN_NONE;
|
||||
} else {
|
||||
PyErr_SetString(PyExc_TypeError, "expected a bytes or integer argument");
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
static PyObject* PyGPTOSSContext_process(PyGPTOSSContext* self) {
|
||||
const enum gptoss_status status = gptoss_context_process(self->handle);
|
||||
if (status != gptoss_status_success) {
|
||||
// TODO: set exception
|
||||
return NULL;
|
||||
}
|
||||
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
static PyObject* PyGPTOSSContext_sample(PyGPTOSSContext* self, PyObject* args, PyObject* kwargs) {
|
||||
static char *kwlist[] = {"temperature", "seed", NULL};
|
||||
|
||||
unsigned long long seed = 0;
|
||||
float temperature = 1.0f;
|
||||
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "|$fK", kwlist,
|
||||
&temperature, &seed))
|
||||
{
|
||||
return NULL;
|
||||
}
|
||||
|
||||
uint32_t token_out = UINT32_MAX;
|
||||
enum gptoss_status status = gptoss_context_sample(
|
||||
self->handle, temperature, (uint64_t) seed, &token_out);
|
||||
if (status != gptoss_status_success) {
|
||||
// TODO: set exception
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return PyLong_FromUnsignedLong((unsigned long) token_out);
|
||||
}
|
||||
|
||||
static PyObject* PyGPTOSSContext_reset(PyGPTOSSContext* self) {
|
||||
const enum gptoss_status status = gptoss_context_reset(self->handle);
|
||||
if (status != gptoss_status_success) {
|
||||
// TODO: set exception
|
||||
return NULL;
|
||||
}
|
||||
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
static PyMethodDef PyGPTOSSContext_methods[] = {
|
||||
{"__copy__", (PyCFunction) PyGPTOSSContext_copy, METH_NOARGS, "Create a copy of the Context"},
|
||||
{"append", (PyCFunction) PyGPTOSSContext_append, METH_O, "Append bytes to the Context"},
|
||||
{"process", (PyCFunction) PyGPTOSSContext_process, METH_NOARGS, "Process tokens in the Context"},
|
||||
{"sample", (PyCFunction) PyGPTOSSContext_sample, METH_VARARGS | METH_KEYWORDS, "Sample token prediction from the Context"},
|
||||
{"reset", (PyCFunction) PyGPTOSSContext_reset, METH_NOARGS, "Discard the content of the Context"},
|
||||
{NULL},
|
||||
};
|
||||
|
||||
static PyObject* PyGPTOSSContext_get_num_tokens(PyGPTOSSContext* self, void* closure) {
|
||||
size_t num_tokens = 0;
|
||||
const enum gptoss_status status = gptoss_context_get_num_tokens(self->handle, &num_tokens);
|
||||
if (status != gptoss_status_success) {
|
||||
// TODO: set exception
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return PyLong_FromSize_t(num_tokens);
|
||||
}
|
||||
|
||||
static PyObject* PyGPTOSSContext_get_max_tokens(PyGPTOSSContext* self, void* closure) {
|
||||
size_t max_tokens = 0;
|
||||
const enum gptoss_status status = gptoss_context_get_max_tokens(self->handle, &max_tokens);
|
||||
if (status != gptoss_status_success) {
|
||||
// TODO: set exception
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return PyLong_FromSize_t(max_tokens);
|
||||
}
|
||||
|
||||
static PyObject* PyGPTOSSContext_get_tokens(PyGPTOSSContext* self, void* closure) {
|
||||
PyObject* token_list_obj = NULL;
|
||||
PyObject* token_obj = NULL;
|
||||
uint32_t* token_ptr = NULL;
|
||||
|
||||
size_t num_tokens = 0;
|
||||
gptoss_context_get_tokens(self->handle, /*tokens_out=*/NULL, /*max_tokens=*/0, &num_tokens);
|
||||
|
||||
if (num_tokens != 0) {
|
||||
token_ptr = (uint32_t*) PyMem_Malloc(num_tokens * sizeof(uint32_t));
|
||||
if (token_ptr == NULL) {
|
||||
// TODO: set exception
|
||||
goto error;
|
||||
}
|
||||
|
||||
enum gptoss_status status = gptoss_context_get_tokens(self->handle, token_ptr, /*max_tokens=*/num_tokens, &num_tokens);
|
||||
if (status != gptoss_status_success) {
|
||||
// TODO: set exception
|
||||
goto error;
|
||||
}
|
||||
}
|
||||
|
||||
token_list_obj = PyList_New((Py_ssize_t) num_tokens);
|
||||
if (token_list_obj == NULL) {
|
||||
goto error;
|
||||
}
|
||||
|
||||
for (size_t t = 0; t < num_tokens; t++) {
|
||||
token_obj = PyLong_FromUnsignedLong((unsigned long) token_ptr[t]);
|
||||
if (token_obj == NULL) {
|
||||
goto error;
|
||||
}
|
||||
if (PyList_SetItem(token_list_obj, (Py_ssize_t) t, token_obj) < 0) {
|
||||
goto error;
|
||||
}
|
||||
token_obj = NULL; // PyList_SetItem stole the reference
|
||||
}
|
||||
|
||||
PyMem_Free(token_ptr);
|
||||
return token_list_obj;
|
||||
|
||||
error:
|
||||
PyMem_Free(token_ptr);
|
||||
Py_XDECREF(token_obj);
|
||||
Py_XDECREF(token_list_obj);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static PyGetSetDef PyGPTOSSContext_getseters[] = {
|
||||
(PyGetSetDef) {
|
||||
.name = "num_tokens",
|
||||
.get = (getter) PyGPTOSSContext_get_num_tokens,
|
||||
.doc = "Current number of tokens in the context",
|
||||
},
|
||||
(PyGetSetDef) {
|
||||
.name = "max_tokens",
|
||||
.get = (getter) PyGPTOSSContext_get_max_tokens,
|
||||
.doc = "Maximum number of tokens in the context",
|
||||
},
|
||||
(PyGetSetDef) {
|
||||
.name = "tokens",
|
||||
.get = (getter) PyGPTOSSContext_get_tokens,
|
||||
.doc = "List of token IDs in the context",
|
||||
},
|
||||
{NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
PyTypeObject PyGPTOSSContext_Type = {
|
||||
PyVarObject_HEAD_INIT(NULL, 0)
|
||||
.tp_name = "gptoss.Context",
|
||||
.tp_basicsize = sizeof(PyGPTOSSContext),
|
||||
.tp_flags = 0
|
||||
| Py_TPFLAGS_DEFAULT
|
||||
| Py_TPFLAGS_BASETYPE,
|
||||
.tp_doc = "Context object",
|
||||
.tp_methods = PyGPTOSSContext_methods,
|
||||
.tp_getset = PyGPTOSSContext_getseters,
|
||||
.tp_new = PyType_GenericNew,
|
||||
.tp_init = (initproc) PyGPTOSSContext_init,
|
||||
.tp_dealloc = (destructor) PyGPTOSSContext_dealloc,
|
||||
};
|
||||
94
gpt_oss/metal/python/model.c
Normal file
94
gpt_oss/metal/python/model.c
Normal file
@@ -0,0 +1,94 @@
|
||||
#include <Python.h>
|
||||
|
||||
#include <gpt-oss.h>
|
||||
|
||||
#include "module.h"
|
||||
|
||||
|
||||
static int PyGPTOSSModel_init(PyGPTOSSModel* self, PyObject* args, PyObject* kwargs) {
|
||||
enum gptoss_status status;
|
||||
const char* filepath;
|
||||
|
||||
if (!PyArg_ParseTuple(args, "s", &filepath)) {
|
||||
return -1;
|
||||
}
|
||||
status = gptoss_model_create_from_file(filepath, &self->handle);
|
||||
if (status != gptoss_status_success) {
|
||||
// TODO: set exception
|
||||
return -1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void PyGPTOSSModel_dealloc(PyGPTOSSModel* self) {
|
||||
(void) gptoss_model_release(self->handle);
|
||||
self->handle = NULL;
|
||||
PyObject_Del((PyObject*) self);
|
||||
}
|
||||
|
||||
static PyObject* PyGPTOSSModel_copy(PyGPTOSSModel* self) {
|
||||
PyGPTOSSModel* copy = (PyGPTOSSModel*) PyObject_New(PyGPTOSSModel, Py_TYPE(self));
|
||||
if (copy == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
(void) gptoss_model_retain(self->handle);
|
||||
copy->handle = self->handle;
|
||||
return (PyObject*) copy;
|
||||
}
|
||||
|
||||
static PyMethodDef PyGPTOSSModel_methods[] = {
|
||||
{"__copy__", (PyCFunction) PyGPTOSSModel_copy, METH_NOARGS, "Create a copy of the Model"},
|
||||
{NULL},
|
||||
};
|
||||
|
||||
static PyObject *PyGPTOSSModel_get_max_context_length(PyGPTOSSModel* self, void* closure) {
|
||||
size_t max_context_length = 0;
|
||||
const enum gptoss_status status = gptoss_model_get_max_context_length(self->handle, &max_context_length);
|
||||
if (status != gptoss_status_success) {
|
||||
// TODO: set exception
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return PyLong_FromSize_t(max_context_length);
|
||||
}
|
||||
|
||||
static PyObject *PyGPTOSSModel_get_tokenizer(PyGPTOSSModel* self, void* closure) {
|
||||
PyObject* args = PyTuple_Pack(1, self);
|
||||
if (args == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
PyObject* tokenizer = PyObject_CallObject((PyObject*) &PyGPTOSSTokenizer_Type, args);
|
||||
Py_DECREF(args);
|
||||
return tokenizer;
|
||||
}
|
||||
|
||||
static PyGetSetDef PyGPTOSSModel_getseters[] = {
|
||||
(PyGetSetDef) {
|
||||
.name = "max_context_length",
|
||||
.get = (getter) PyGPTOSSModel_get_max_context_length,
|
||||
.doc = "Maximum context length supported by the model",
|
||||
},
|
||||
(PyGetSetDef) {
|
||||
.name = "tokenizer",
|
||||
.get = (getter) PyGPTOSSModel_get_tokenizer,
|
||||
.doc = "Tokenizer object associated with the model",
|
||||
},
|
||||
{NULL} // Sentinel
|
||||
};
|
||||
|
||||
PyTypeObject PyGPTOSSModel_Type = {
|
||||
PyVarObject_HEAD_INIT(NULL, 0)
|
||||
.tp_name = "gptoss.Model",
|
||||
.tp_basicsize = sizeof(PyGPTOSSModel),
|
||||
.tp_flags = 0
|
||||
| Py_TPFLAGS_DEFAULT
|
||||
| Py_TPFLAGS_BASETYPE,
|
||||
.tp_doc = "Model object",
|
||||
.tp_methods = PyGPTOSSModel_methods,
|
||||
.tp_getset = PyGPTOSSModel_getseters,
|
||||
.tp_new = PyType_GenericNew,
|
||||
.tp_init = (initproc) PyGPTOSSModel_init,
|
||||
.tp_dealloc = (destructor) PyGPTOSSModel_dealloc,
|
||||
};
|
||||
67
gpt_oss/metal/python/module.c
Normal file
67
gpt_oss/metal/python/module.c
Normal file
@@ -0,0 +1,67 @@
|
||||
#include <Python.h>
|
||||
|
||||
#include "module.h"
|
||||
|
||||
|
||||
static PyMethodDef module_methods[] = {
|
||||
{NULL, NULL, 0, NULL}
|
||||
};
|
||||
|
||||
static PyModuleDef metal_module = {
|
||||
PyModuleDef_HEAD_INIT,
|
||||
"_metal",
|
||||
"Local GPT-OSS inference",
|
||||
-1,
|
||||
module_methods
|
||||
};
|
||||
|
||||
PyMODINIT_FUNC PyInit__metal(void) {
|
||||
PyObject* module = NULL;
|
||||
PyObject* model_type = NULL;
|
||||
PyObject* tokenizer_type = NULL;
|
||||
PyObject* context_type = NULL;
|
||||
|
||||
if (PyType_Ready(&PyGPTOSSModel_Type) < 0) {
|
||||
goto error;
|
||||
}
|
||||
model_type = (PyObject*) &PyGPTOSSModel_Type;
|
||||
Py_INCREF(model_type);
|
||||
|
||||
if (PyType_Ready(&PyGPTOSSTokenizer_Type) < 0) {
|
||||
goto error;
|
||||
}
|
||||
tokenizer_type = (PyObject*) &PyGPTOSSTokenizer_Type;
|
||||
Py_INCREF(tokenizer_type);
|
||||
|
||||
if (PyType_Ready(&PyGPTOSSContext_Type) < 0) {
|
||||
goto error;
|
||||
}
|
||||
context_type = (PyObject*) &PyGPTOSSContext_Type;
|
||||
Py_INCREF(context_type);
|
||||
|
||||
module = PyModule_Create(&metal_module);
|
||||
if (module == NULL) {
|
||||
goto error;
|
||||
}
|
||||
|
||||
if (PyModule_AddObject(module, "Model", model_type) < 0) {
|
||||
goto error;
|
||||
}
|
||||
|
||||
if (PyModule_AddObject(module, "Tokenizer", tokenizer_type) < 0) {
|
||||
goto error;
|
||||
}
|
||||
|
||||
if (PyModule_AddObject(module, "Context", context_type) < 0) {
|
||||
goto error;
|
||||
}
|
||||
|
||||
return module;
|
||||
|
||||
error:
|
||||
Py_XDECREF(context_type);
|
||||
Py_XDECREF(tokenizer_type);
|
||||
Py_XDECREF(model_type);
|
||||
Py_XDECREF(module);
|
||||
return NULL;
|
||||
}
|
||||
22
gpt_oss/metal/python/module.h
Normal file
22
gpt_oss/metal/python/module.h
Normal file
@@ -0,0 +1,22 @@
|
||||
#include <Python.h>
|
||||
|
||||
#include <gpt-oss.h>
|
||||
|
||||
typedef struct {
|
||||
PyObject_HEAD
|
||||
gptoss_model_t handle;
|
||||
} PyGPTOSSModel;
|
||||
|
||||
typedef struct {
|
||||
PyObject_HEAD
|
||||
gptoss_tokenizer_t handle;
|
||||
} PyGPTOSSTokenizer;
|
||||
|
||||
typedef struct {
|
||||
PyObject_HEAD
|
||||
gptoss_context_t handle;
|
||||
} PyGPTOSSContext;
|
||||
|
||||
extern PyTypeObject PyGPTOSSModel_Type;
|
||||
extern PyTypeObject PyGPTOSSTokenizer_Type;
|
||||
extern PyTypeObject PyGPTOSSContext_Type;
|
||||
185
gpt_oss/metal/python/tokenizer.c
Normal file
185
gpt_oss/metal/python/tokenizer.c
Normal file
@@ -0,0 +1,185 @@
|
||||
#include <Python.h>
|
||||
|
||||
#include <gpt-oss.h>
|
||||
|
||||
#include "module.h"
|
||||
|
||||
static PyObject* PyGPTOSSTokenizer_new(PyTypeObject* subtype, PyObject* args, PyObject* kwargs) {
|
||||
static char *kwlist[] = {"model", NULL};
|
||||
PyObject* model = NULL;
|
||||
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O!", kwlist, &PyGPTOSSModel_Type, &model)) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
PyGPTOSSTokenizer* self = (PyGPTOSSTokenizer*) subtype->tp_alloc(subtype, 0);
|
||||
if (self == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
const enum gptoss_status status = gptoss_model_get_tokenizer(
|
||||
((const PyGPTOSSModel*) model)->handle,
|
||||
&self->handle);
|
||||
if (status != gptoss_status_success) {
|
||||
// TODO: set exception
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return (PyObject*) self;
|
||||
}
|
||||
|
||||
static void PyGPTOSSTokenizer_dealloc(PyGPTOSSTokenizer* self) {
|
||||
(void) gptoss_tokenizer_release(self->handle);
|
||||
self->handle = NULL;
|
||||
PyObject_Del((PyObject*) self);
|
||||
}
|
||||
|
||||
static PyObject* PyGPTOSSTokenizer_copy(PyGPTOSSTokenizer* self) {
|
||||
PyGPTOSSTokenizer* copy = (PyGPTOSSTokenizer*) PyObject_New(PyGPTOSSTokenizer, Py_TYPE(self));
|
||||
if (copy == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
(void) gptoss_tokenizer_retain(self->handle);
|
||||
copy->handle = self->handle;
|
||||
return (PyObject*) copy;
|
||||
}
|
||||
|
||||
static PyObject* PyGPTOSSTokenizer_encode_special_token(PyGPTOSSTokenizer* self, PyObject* arg) {
|
||||
if (PyUnicode_Check(arg)) {
|
||||
const char* string_ptr = PyUnicode_AsUTF8(arg);
|
||||
if (string_ptr == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
enum gptoss_special_token token_type = gptoss_special_token_invalid;
|
||||
if (strcmp(string_ptr, "<|return|>") == 0) {
|
||||
token_type = gptoss_special_token_return;
|
||||
} else if (strcmp(string_ptr, "<|start|>") == 0) {
|
||||
token_type = gptoss_special_token_start;
|
||||
} else if (strcmp(string_ptr, "<|message|>") == 0) {
|
||||
token_type = gptoss_special_token_message;
|
||||
} else if (strcmp(string_ptr, "<|end|>") == 0) {
|
||||
token_type = gptoss_special_token_end;
|
||||
} else if (strcmp(string_ptr, "<|refusal|>") == 0) {
|
||||
token_type = gptoss_special_token_refusal;
|
||||
} else if (strcmp(string_ptr, "<|constrain|>") == 0) {
|
||||
token_type = gptoss_special_token_constrain;
|
||||
} else if (strcmp(string_ptr, "<|channel|>") == 0) {
|
||||
token_type = gptoss_special_token_channel;
|
||||
} else if (strcmp(string_ptr, "<|call|>") == 0) {
|
||||
token_type = gptoss_special_token_call;
|
||||
} else if (strcmp(string_ptr, "<|untrusted|>") == 0) {
|
||||
token_type = gptoss_special_token_untrusted;
|
||||
} else if (strcmp(string_ptr, "<|end_untrusted|>") == 0) {
|
||||
token_type = gptoss_special_token_end_untrusted;
|
||||
} else {
|
||||
PyErr_Format(PyExc_ValueError, "unrecognized special token: %s", string_ptr);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
uint32_t token_id = UINT32_MAX;
|
||||
const enum gptoss_status status = gptoss_tokenizer_get_special_token_id(
|
||||
self->handle, token_type, &token_id);
|
||||
if (status != gptoss_status_success || token_id == UINT32_MAX) {
|
||||
PyErr_Format(PyExc_ValueError, "tokenizer does not support the %s token", string_ptr);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return PyLong_FromUnsignedLong((unsigned long) token_id);
|
||||
} else {
|
||||
PyErr_SetString(PyExc_TypeError, "string argument expected");
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
static PyObject* PyGPTOSSTokenizer_decode(PyGPTOSSTokenizer* self, PyObject* args, PyObject* kwargs) {
|
||||
static char *kwlist[] = {"token", NULL};
|
||||
unsigned int token = 0; // Default to 0 if None
|
||||
|
||||
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "I", kwlist, &token)) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
const void* token_ptr = NULL;
|
||||
size_t token_size = 0;
|
||||
const enum gptoss_status status = gptoss_tokenizer_decode(self->handle, (uint32_t) token, &token_ptr, &token_size);
|
||||
if (status != gptoss_status_success) {
|
||||
// TODO: set exception
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return PyBytes_FromStringAndSize((const char*) token_ptr, (Py_ssize_t) token_size);
|
||||
}
|
||||
|
||||
static PyMethodDef PyGPTOSSTokenizer_methods[] = {
|
||||
{"__copy__", (PyCFunction) PyGPTOSSTokenizer_copy, METH_NOARGS, "Create a copy of the Tokenizer"},
|
||||
{"encode_special_token", (PyCFunction) PyGPTOSSTokenizer_encode_special_token, METH_O, "Query ID of a special token"},
|
||||
{"decode", (PyCFunction) PyGPTOSSTokenizer_decode, METH_VARARGS | METH_KEYWORDS, "Convert text token ID to bytes"},
|
||||
{NULL},
|
||||
};
|
||||
|
||||
static PyObject* PyGPTOSSTokenizer_get_num_text_tokens(PyGPTOSSTokenizer* self, void* closure) {
|
||||
uint32_t num_text_tokens = 0;
|
||||
const enum gptoss_status status = gptoss_tokenizer_get_num_text_tokens(self->handle, &num_text_tokens);
|
||||
if (status != gptoss_status_success) {
|
||||
// TODO: set exception
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return PyLong_FromUnsignedLong((unsigned long) num_text_tokens);
|
||||
}
|
||||
|
||||
static PyObject* PyGPTOSSTokenizer_get_num_special_tokens(PyGPTOSSTokenizer* self, void* closure) {
|
||||
uint32_t num_special_tokens = 0;
|
||||
const enum gptoss_status status = gptoss_tokenizer_get_num_special_tokens(self->handle, &num_special_tokens);
|
||||
if (status != gptoss_status_success) {
|
||||
// TODO: set exception
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return PyLong_FromUnsignedLong((unsigned long) num_special_tokens);
|
||||
}
|
||||
|
||||
static PyObject* PyGPTOSSTokenizer_get_num_tokens(PyGPTOSSTokenizer* self, void* closure) {
|
||||
uint32_t num_tokens = 0;
|
||||
const enum gptoss_status status = gptoss_tokenizer_get_num_tokens(self->handle, &num_tokens);
|
||||
if (status != gptoss_status_success) {
|
||||
// TODO: set exception
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return PyLong_FromUnsignedLong((unsigned long) num_tokens);
|
||||
}
|
||||
|
||||
static PyGetSetDef PyGPTOSSTokenizer_getseters[] = {
|
||||
(PyGetSetDef) {
|
||||
.name = "num_tokens",
|
||||
.get = (getter) PyGPTOSSTokenizer_get_num_tokens,
|
||||
.doc = "Total number of tokens in the tokenizer dictionary",
|
||||
},
|
||||
(PyGetSetDef) {
|
||||
.name = "num_text_tokens",
|
||||
.get = (getter) PyGPTOSSTokenizer_get_num_text_tokens,
|
||||
.doc = "Number of text tokens in the tokenizer dictionary",
|
||||
},
|
||||
(PyGetSetDef) {
|
||||
.name = "num_special_tokens",
|
||||
.get = (getter) PyGPTOSSTokenizer_get_num_special_tokens,
|
||||
.doc = "Number of special tokens in the tokenizer dictionary",
|
||||
},
|
||||
{NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
PyTypeObject PyGPTOSSTokenizer_Type = {
|
||||
PyVarObject_HEAD_INIT(NULL, 0)
|
||||
.tp_name = "gptoss.Tokenizer",
|
||||
.tp_basicsize = sizeof(PyGPTOSSTokenizer),
|
||||
.tp_flags = 0
|
||||
| Py_TPFLAGS_DEFAULT
|
||||
| Py_TPFLAGS_BASETYPE,
|
||||
.tp_doc = "Tokenizer object",
|
||||
.tp_methods = PyGPTOSSTokenizer_methods,
|
||||
.tp_getset = PyGPTOSSTokenizer_getseters,
|
||||
.tp_new = PyGPTOSSTokenizer_new,
|
||||
.tp_dealloc = (destructor) PyGPTOSSTokenizer_dealloc,
|
||||
};
|
||||
Reference in New Issue
Block a user