|
@@ -13,6 +13,6 @@ class SplitTextIntoSentences:
|
|
|
|
|
|
@staticmethod
|
|
|
def lang_chain_tokenize(text: str) -> List[str]:
|
|
|
- splitter = RecursiveCharacterTextSplitter(chunk_size=100, chunk_overlap=10)
|
|
|
+ splitter = RecursiveCharacterTextSplitter(chunk_size=64, chunk_overlap=16)
|
|
|
docs = splitter.split_text(text)
|
|
|
return docs
|