Skip to content

Commit ed1006e

Browse files
committed
remove tiktoken as a dependency
1 parent 78e2d65 commit ed1006e

File tree

2 files changed

+1
-4
lines changed

2 files changed

+1
-4
lines changed

codeflash/context/code_context_extractor.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77

88
import jedi
99
import libcst as cst
10-
import tiktoken
1110
from jedi.api.classes import Name
1211
from libcst import CSTNode
1312

@@ -73,8 +72,7 @@ def get_code_optimization_context(
7372
)
7473

7574
# Handle token limits
76-
tokenizer = tiktoken.encoding_for_model("gpt-4o")
77-
final_read_writable_tokens = len(tokenizer.encode(final_read_writable_code))
75+
final_read_writable_tokens = len(final_read_writable_code)*0.75
7876
if final_read_writable_tokens > optim_token_limit:
7977
raise ValueError("Read-writable code has exceeded token limit, cannot proceed")
8078

pyproject.toml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,6 @@ pytest = ">=7.0.0,!=8.3.4"
7373
gitpython = ">=3.1.31"
7474
libcst = ">=1.0.1"
7575
jedi = ">=0.19.1"
76-
tiktoken = ">=0.7.0"
7776
timeout-decorator = ">=0.5.0"
7877
pytest-timeout = ">=2.1.0"
7978
tomlkit = ">=0.11.7"

0 commit comments

Comments
 (0)