|
@@ -42,7 +42,7 @@
|
|
|
"metadata": {},
|
|
|
"outputs": [],
|
|
|
"source": [
|
|
|
- "# use ServiceContext to configure the LLM used and the custom embeddings \n",
|
|
|
+ "# use ServiceContext to configure the LLM used and the custom embeddings\n",
|
|
|
"from llama_index import ServiceContext\n",
|
|
|
"\n",
|
|
|
"# VectorStoreIndex is used to index custom data \n",
|