Last active
May 26, 2024 16:27
-
-
Save AkashC-goML/dd856e4a60cbce4aa7621e7cfae9460c to your computer and use it in GitHub Desktop.
enhancing_rag_with_graph.ipynb
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
{ | |
"cells": [ | |
{ | |
"cell_type": "markdown", | |
"metadata": { | |
"id": "view-in-github", | |
"colab_type": "text" | |
}, | |
"source": [ | |
"<a href=\"https://colab.research.google.com/gist/AkashC-goML/dd856e4a60cbce4aa7621e7cfae9460c/enhancing_rag_with_graph.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": null, | |
"metadata": { | |
"id": "5x3LkpUztHNU", | |
"outputId": "0ee6c75b-67ff-4573-fbed-b74d755a2885", | |
"colab": { | |
"base_uri": "https://localhost:8080/" | |
} | |
}, | |
"outputs": [ | |
{ | |
"output_type": "stream", | |
"name": "stdout", | |
"text": [ | |
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m973.7/973.7 kB\u001b[0m \u001b[31m5.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", | |
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m2.1/2.1 MB\u001b[0m \u001b[31m12.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", | |
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m199.5/199.5 kB\u001b[0m \u001b[31m10.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", | |
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m203.0/203.0 kB\u001b[0m \u001b[31m15.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", | |
"\u001b[?25h Installing build dependencies ... \u001b[?25l\u001b[?25hdone\n", | |
" Getting requirements to build wheel ... \u001b[?25l\u001b[?25hdone\n", | |
" Installing backend dependencies ... \u001b[?25l\u001b[?25hdone\n", | |
" Preparing metadata (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n", | |
" Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n", | |
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.1/1.1 MB\u001b[0m \u001b[31m16.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", | |
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m15.3/15.3 MB\u001b[0m \u001b[31m39.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", | |
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m307.9/307.9 kB\u001b[0m \u001b[31m29.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", | |
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m121.4/121.4 kB\u001b[0m \u001b[31m12.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", | |
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m320.6/320.6 kB\u001b[0m \u001b[31m28.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", | |
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m49.3/49.3 kB\u001b[0m \u001b[31m5.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", | |
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m53.0/53.0 kB\u001b[0m \u001b[31m6.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", | |
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m142.5/142.5 kB\u001b[0m \u001b[31m14.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", | |
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m75.6/75.6 kB\u001b[0m \u001b[31m9.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", | |
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m77.9/77.9 kB\u001b[0m \u001b[31m8.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", | |
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m58.3/58.3 kB\u001b[0m \u001b[31m6.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", | |
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.6/1.6 MB\u001b[0m \u001b[31m49.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", | |
"\u001b[?25h Building wheel for neo4j (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n", | |
" Building wheel for wikipedia (setup.py) ... \u001b[?25l\u001b[?25hdone\n" | |
] | |
} | |
], | |
"source": [ | |
"%pip install --upgrade --quiet langchain langchain-community langchain-openai langchain-experimental neo4j wikipedia tiktoken yfiles_jupyter_graphs" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": null, | |
"metadata": { | |
"id": "jPIRSGz4tHNV" | |
}, | |
"outputs": [], | |
"source": [ | |
"from langchain_core.runnables import (\n", | |
" RunnableBranch,\n", | |
" RunnableLambda,\n", | |
" RunnableParallel,\n", | |
" RunnablePassthrough,\n", | |
")\n", | |
"from langchain_core.prompts import ChatPromptTemplate\n", | |
"from langchain_core.prompts.prompt import PromptTemplate\n", | |
"from langchain_core.pydantic_v1 import BaseModel, Field\n", | |
"from typing import Tuple, List, Optional\n", | |
"from langchain_core.messages import AIMessage, HumanMessage\n", | |
"from langchain_core.output_parsers import StrOutputParser\n", | |
"import os\n", | |
"from langchain_community.graphs import Neo4jGraph\n", | |
"from langchain.document_loaders import WikipediaLoader\n", | |
"from langchain.text_splitter import TokenTextSplitter\n", | |
"from langchain_openai import ChatOpenAI\n", | |
"from langchain_experimental.graph_transformers import LLMGraphTransformer\n", | |
"from neo4j import GraphDatabase\n", | |
"from yfiles_jupyter_graphs import GraphWidget\n", | |
"from langchain_community.vectorstores import Neo4jVector\n", | |
"from langchain_openai import OpenAIEmbeddings\n", | |
"from langchain_community.vectorstores.neo4j_vector import remove_lucene_chars\n", | |
"from langchain_core.runnables import ConfigurableField, RunnableParallel, RunnablePassthrough\n", | |
"\n", | |
"try:\n", | |
" import google.colab\n", | |
" from google.colab import output\n", | |
" output.enable_custom_widget_manager()\n", | |
"except:\n", | |
" pass" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"source": [ | |
"# Enhancing RAG-Based Applications with Knowledge Graphs\n", | |
"\n", | |
"Graph Retrieval Augmented Generation (Graph RAG) is increasingly recognized for its ability to enhance the accuracy and contextuality of information retrieval in natural language processing applications. This method harnesses the structured nature of graph databases, organizing data as interconnected nodes and relationships, to enrich the depth and relevance of retrieved information.\n", | |
"\n", | |
"Graph databases excel in representing and organizing heterogeneous and interconnected data, effortlessly capturing intricate relationships and attributes across various data types. On the other hand, traditional vector databases struggle with such structured information, primarily designed for handling unstructured data through high-dimensional vectors. Integrating structured graph data with vector search through unstructured text in RAG applications enables the amalgamation of structured and unstructured data, leveraging the strengths of both approaches.\n", | |
"\n", | |
"However, constructing a knowledge graph, a fundamental step in leveraging graph-based data representation, can be challenging. It involves gathering and structuring data, demanding a profound understanding of domain-specific knowledge and graph modeling principles. To streamline this process, we have been exploring the capabilities of Large Language Models (LLMs). These models, with their extensive language understanding, can automate aspects of knowledge graph creation by analyzing text data, identifying entities, discerning relationships, and suggesting optimal representations within a graph structure. As a result, we have integrated the initial version of the graph construction module into LangChain, our framework, which we'll illustrate in this post.\n", | |
"the Neo4j Desktop application and configuring a local database instance.\n", | |
"## Neo4j Environment Setup\n", | |
"\n", | |
"You need to set up a Neo4j instance follow along with the examples in this blog post. The easiest way is to start a free instance on [Neo4j Aura](https://neo4j.com/cloud/platform/aura-graph-database/), which offers cloud instances of Neo4j database. Alternatively, you can also set up a local instance of the Neo4j database by downloading the Neo4j Desktop application and creating a local database instance." | |
], | |
"metadata": { | |
"id": "-KeAfaMQ0VKh" | |
} | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": null, | |
"metadata": { | |
"id": "L0nXP1aYtHNW" | |
}, | |
"outputs": [], | |
"source": [ | |
"os.environ[\"OPENAI_API_KEY\"] = \"sk- your openai api key\"\n", | |
"os.environ[\"NEO4J_URI\"] = \"Your url\"\n", | |
"os.environ[\"NEO4J_USERNAME\"] = \"your user name\"\n", | |
"os.environ[\"NEO4J_PASSWORD\"] = \"your password\"\n", | |
"\n", | |
"\n", | |
"\n", | |
"graph = Neo4jGraph()" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"source": [ | |
"Additionally, you must provide an [OpenAI key](https://openai.com/), as we will use their models in this blog post.\n", | |
"## Data ingestion\n", | |
"For this demonstration, we will use Elizabeth I's Wikipedia page. We can utilize [LangChain loaders](https://python.langchain.com/docs/modules/data_connection/document_loaders/) to fetch and split the documents from Wikipedia seamlessly." | |
], | |
"metadata": { | |
"id": "GHKExwYntyD9" | |
} | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": null, | |
"metadata": { | |
"id": "sGhtLTAStHNW", | |
"outputId": "9580ed70-588f-4a96-a02b-ae8c392d5457", | |
"colab": { | |
"base_uri": "https://localhost:8080/" | |
} | |
}, | |
"outputs": [ | |
{ | |
"output_type": "stream", | |
"name": "stderr", | |
"text": [ | |
"/usr/local/lib/python3.10/dist-packages/wikipedia/wikipedia.py:389: GuessedAtParserWarning: No parser was explicitly specified, so I'm using the best available HTML parser for this system (\"lxml\"). This usually isn't a problem, but if you run this code on another system, or in a different virtual environment, it may use a different parser and behave differently.\n", | |
"\n", | |
"The code that caused this warning is on line 389 of the file /usr/local/lib/python3.10/dist-packages/wikipedia/wikipedia.py. To get rid of this warning, pass the additional argument 'features=\"lxml\"' to the BeautifulSoup constructor.\n", | |
"\n", | |
" lis = BeautifulSoup(html).find_all('li')\n" | |
] | |
} | |
], | |
"source": [ | |
"# Read the wikipedia article\n", | |
"raw_documents = WikipediaLoader(query=\"Elizabeth I\").load()\n", | |
"# Define chunking strategy\n", | |
"text_splitter = TokenTextSplitter(chunk_size=512, chunk_overlap=24)\n", | |
"documents = text_splitter.split_documents(raw_documents[:3])" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"source": [ | |
"!pip install PyPDF\n" | |
], | |
"metadata": { | |
"id": "TF0rJe_Gz-Qn", | |
"outputId": "b504de99-a29b-4a0c-ecef-279888617d1b", | |
"colab": { | |
"base_uri": "https://localhost:8080/" | |
} | |
}, | |
"execution_count": null, | |
"outputs": [ | |
{ | |
"output_type": "stream", | |
"name": "stdout", | |
"text": [ | |
"Collecting PyPDF\n", | |
" Downloading pypdf-4.2.0-py3-none-any.whl (290 kB)\n", | |
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m290.4/290.4 kB\u001b[0m \u001b[31m5.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", | |
"\u001b[?25hRequirement already satisfied: typing_extensions>=4.0 in /usr/local/lib/python3.10/dist-packages (from PyPDF) (4.11.0)\n", | |
"Installing collected packages: PyPDF\n", | |
"Successfully installed PyPDF-4.2.0\n" | |
] | |
} | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"source": [ | |
"# import PyPDF2\n", | |
"pdf_path = \"pdf_path.pdf\"\n", | |
"\n", | |
"from langchain_community.document_loaders import PyPDFLoader\n", | |
"\n", | |
"loader = PyPDFLoader(pdf_path)\n", | |
"pages = loader.load_and_split()\n", | |
"\n" | |
], | |
"metadata": { | |
"id": "F4fuss6Dz5YU", | |
"colab": { | |
"base_uri": "https://localhost:8080/", | |
"height": 356 | |
}, | |
"outputId": "dccea2ea-e7db-4e11-95f4-26a053f4f6b4" | |
}, | |
"execution_count": null, | |
"outputs": [ | |
{ | |
"output_type": "error", | |
"ename": "ValueError", | |
"evalue": "File path ['/content/SA_PrismPortrait_NikoD.pdf', '/content/SA_PrismPortrait_NikoD.pdf'] is not a valid file or url", | |
"traceback": [ | |
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", | |
"\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", | |
"\u001b[0;32m<ipython-input-4-8a64f39b3440>\u001b[0m in \u001b[0;36m<cell line: 6>\u001b[0;34m()\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mlangchain_community\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdocument_loaders\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mPyPDFLoader\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 6\u001b[0;31m \u001b[0mloader\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mPyPDFLoader\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpdf_path\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 7\u001b[0m \u001b[0mpages\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mloader\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mload_and_split\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 8\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", | |
"\u001b[0;32m/usr/local/lib/python3.10/dist-packages/langchain_community/document_loaders/pdf.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, file_path, password, headers, extract_images)\u001b[0m\n\u001b[1;32m 180\u001b[0m \u001b[0;34m\"pypdf package not found, please install it with \"\u001b[0m \u001b[0;34m\"`pip install pypdf`\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 181\u001b[0m )\n\u001b[0;32m--> 182\u001b[0;31m \u001b[0msuper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__init__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfile_path\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mheaders\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mheaders\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 183\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mparser\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mPyPDFParser\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpassword\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mpassword\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mextract_images\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mextract_images\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 184\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", | |
"\u001b[0;32m/usr/local/lib/python3.10/dist-packages/langchain_community/document_loaders/pdf.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, file_path, headers)\u001b[0m\n\u001b[1;32m 114\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfile_path\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mstr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtemp_pdf\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 115\u001b[0m \u001b[0;32melif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mos\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpath\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0misfile\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfile_path\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 116\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mValueError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"File path %s is not a valid file or url\"\u001b[0m \u001b[0;34m%\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfile_path\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 117\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 118\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m__del__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m->\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", | |
"\u001b[0;31mValueError\u001b[0m: File path ['/content/SA_PrismPortrait_NikoD.pdf', '/content/SA_PrismPortrait_NikoD.pdf'] is not a valid file or url" | |
] | |
} | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"source": [ | |
"Now it's time to construct a graph based on the retrieved documents. For this purpose, we have implemented an `LLMGraphTransformermodule` that significantly simplifies constructing and storing a knowledge graph in a graph database." | |
], | |
"metadata": { | |
"id": "kphZMjjVuGAM" | |
} | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": null, | |
"metadata": { | |
"id": "pXf7OTGHtHNW" | |
}, | |
"outputs": [], | |
"source": [ | |
"llm=ChatOpenAI(temperature=0, model_name=\"gpt-3.5-turbo-0125\") # gpt-4-0125-preview occasionally has issues\n", | |
"llm_transformer = LLMGraphTransformer(llm=llm)\n", | |
"# raw_schema = self.chain.invoke({\"input\": text})\n", | |
"graph_documents = llm_transformer.convert_to_graph_documents(pages)\n", | |
"graph.add_graph_documents(\n", | |
" graph_documents,\n", | |
" baseEntityLabel=True,\n", | |
" include_source=True\n", | |
")" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"source": [ | |
"You can define which LLM you want the knowledge graph generation chain to use. At the moment, we support only function calling models from OpenAI and Mistral. However, we plan to expand the LLM selection in the future. In this example, we are using the latest GPT-4. Note that the quality of generated graph significantly depends on the model you are using. In theory, you always want to use the most capable one. The LLM graph transformers returns graph documents, which can be imported to Neo4j via the `add_graph_documents` method. The `baseEntityLabel` parameter assigns an additional `__Entity__` label to each node, enhancing indexing and query performance. The `include_source` parameter links nodes to their originating documents, facilitating data traceability and context understanding.\n", | |
"\n", | |
"You can inspect the generated graph with yfiles visualization." | |
], | |
"metadata": { | |
"id": "ll2asQiAugSW" | |
} | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": null, | |
"metadata": { | |
"id": "RMZlhtDmtHNW", | |
"outputId": "866a541a-5ee4-4cb2-c8b6-19c399875fa9", | |
"colab": { | |
"base_uri": "https://localhost:8080/", | |
"height": 817, | |
"referenced_widgets": [ | |
"74cf5d8eec4741c3a42f6ded78c7b56e", | |
"f85c1fbbfed147019ba1e0c14990c55b" | |
] | |
} | |
}, | |
"outputs": [ | |
{ | |
"output_type": "display_data", | |
"data": { | |
"text/plain": [ | |
"GraphWidget(layout=Layout(height='800px', width='100%'))" | |
], | |
"application/vnd.jupyter.widget-view+json": { | |
"version_major": 2, | |
"version_minor": 0, | |
"model_id": "74cf5d8eec4741c3a42f6ded78c7b56e" | |
} | |
}, | |
"metadata": { | |
"application/vnd.jupyter.widget-view+json": { | |
"colab": { | |
"custom_widget_manager": { | |
"url": "https://ssl.gstatic.com/colaboratory-static/widgets/colab-cdn-widget-manager/2b70e893a8ba7c0f/manager.min.js" | |
} | |
} | |
} | |
} | |
} | |
], | |
"source": [ | |
"# directly show the graph resulting from the given Cypher query\n", | |
"default_cypher = \"MATCH (s)-[r:!MENTIONS]->(t) RETURN s,r,t LIMIT 50\"\n", | |
"\n", | |
"def showGraph(cypher: str = default_cypher):\n", | |
" # create a neo4j session to run queries\n", | |
" driver = GraphDatabase.driver(\n", | |
" uri = os.environ[\"NEO4J_URI\"],\n", | |
" auth = (os.environ[\"NEO4J_USERNAME\"],\n", | |
" os.environ[\"NEO4J_PASSWORD\"]))\n", | |
" session = driver.session()\n", | |
" widget = GraphWidget(graph = session.run(cypher).graph())\n", | |
" widget.node_label_mapping = 'id'\n", | |
" #display(widget)\n", | |
" return widget\n", | |
"\n", | |
"showGraph()" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"source": [ | |
"## Hybrid Retrieval for RAG\n", | |
"After the graph generation, we will use a hybrid retrieval approach that combines vector and keyword indexes with graph retrieval for RAG applications.\n", | |
"\n", | |
"\n", | |
"\n", | |
"The diagram illustrates a retrieval process beginning with a user posing a question, which is then directed to an RAG retriever. This retriever employs keyword and vector searches to search through unstructured text data and combines it with the information it collects from the knowledge graph. Since Neo4j features both keyword and vector indexes, you can implement all three retrieval options with a single database system. The collected data from these sources is fed into an LLM to generate and deliver the final answer.\n", | |
"## Unstructured data retriever\n", | |
"You can use the Neo4jVector.from_existing_graph method to add both keyword and vector retrieval to documents. This method configures keyword and vector search indexes for a hybrid search approach, targeting nodes labeled Document. Additionally, it calculates text embedding values if they are missing.\n", | |
"\n" | |
], | |
"metadata": { | |
"id": "1guHjU4uyEZK" | |
} | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": null, | |
"metadata": { | |
"id": "GHbJPMfDtHNW" | |
}, | |
"outputs": [], | |
"source": [ | |
"vector_index = Neo4jVector.from_existing_graph(\n", | |
" OpenAIEmbeddings(),\n", | |
" search_type=\"hybrid\",\n", | |
" node_label=\"Document\",\n", | |
" text_node_properties=[\"text\"],\n", | |
" embedding_node_property=\"embedding\"\n", | |
")" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"source": [ | |
"The vector index can then be called with the similarity_search method.\n", | |
"## Graph retriever\n", | |
"On the other hand, configuring a graph retrieval is more involved but offers more freedom. In this example, we will use a full-text index to identify relevant nodes and then return their direct neighborhood.\n", | |
"\n", | |
"\n", | |
"\n", | |
"The graph retriever starts by identifying relevant entities in the input. For simplicity, we instruct the LLM to identify people, organizations, and locations. To achieve this, we will use LCEL with the newly added `with_structured_output` method to achieve this." | |
], | |
"metadata": { | |
"id": "2nzfPwvvy0Yz" | |
} | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": null, | |
"metadata": { | |
"id": "6yCMz_sRtHNW" | |
}, | |
"outputs": [], | |
"source": [ | |
"# Retriever\n", | |
"\n", | |
"graph.query(\n", | |
" \"CREATE FULLTEXT INDEX entity IF NOT EXISTS FOR (e:__Entity__) ON EACH [e.id]\")\n", | |
"\n", | |
"# Extract entities from text\n", | |
"class Entities(BaseModel):\n", | |
" \"\"\"Identifying information about entities.\"\"\"\n", | |
"\n", | |
" names: List[str] = Field(\n", | |
" ...,\n", | |
" description=\"All the person, organization, or business entities that \"\n", | |
" \"appear in the text\",\n", | |
" )\n", | |
"\n", | |
"prompt = ChatPromptTemplate.from_messages(\n", | |
" [\n", | |
" (\n", | |
" \"system\",\n", | |
" \"You are extracting organization and person entities from the text.\",\n", | |
" ),\n", | |
" (\n", | |
" \"human\",\n", | |
" \"Use the given format to extract information from the following \"\n", | |
" \"input: {question}\",\n", | |
" ),\n", | |
" ]\n", | |
")\n", | |
"\n", | |
"entity_chain = prompt | llm.with_structured_output(Entities)" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"source": [ | |
"Let's test it out:" | |
], | |
"metadata": { | |
"id": "n-Cs7RFAzdT3" | |
} | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": null, | |
"metadata": { | |
"id": "54H15KNAtHNX", | |
"outputId": "c8750bd1-84f8-453a-bd05-880252b23bc4", | |
"colab": { | |
"base_uri": "https://localhost:8080/" | |
} | |
}, | |
"outputs": [ | |
{ | |
"output_type": "execute_result", | |
"data": { | |
"text/plain": [ | |
"['Niko']" | |
] | |
}, | |
"metadata": {}, | |
"execution_count": 30 | |
} | |
], | |
"source": [ | |
"entity_chain.invoke({\"question\": \"Describe Niko's primary personality\"}).names" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"source": [ | |
"Great, now that we can detect entities in the question, let's use a full-text index to map them to the knowledge graph. First, we need to define a full-text index and a function that will generate full-text queries that allow a bit of misspelling, which we won't go into much detail here." | |
], | |
"metadata": { | |
"id": "e2S2aWq5zfQO" | |
} | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": null, | |
"metadata": { | |
"id": "dY8huoM8tHNX" | |
}, | |
"outputs": [], | |
"source": [ | |
"def generate_full_text_query(input: str) -> str:\n", | |
" \"\"\"\n", | |
" Generate a full-text search query for a given input string.\n", | |
"\n", | |
" This function constructs a query string suitable for a full-text search.\n", | |
" It processes the input string by splitting it into words and appending a\n", | |
" similarity threshold (~2 changed characters) to each word, then combines\n", | |
" them using the AND operator. Useful for mapping entities from user questions\n", | |
" to database values, and allows for some misspelings.\n", | |
" \"\"\"\n", | |
" full_text_query = \"\"\n", | |
" words = [el for el in remove_lucene_chars(input).split() if el]\n", | |
" for word in words[:-1]:\n", | |
" full_text_query += f\" {word}~2 AND\"\n", | |
" full_text_query += f\" {words[-1]}~2\"\n", | |
" return full_text_query.strip()\n", | |
"\n", | |
"# Fulltext index query\n", | |
"def structured_retriever(question: str) -> str:\n", | |
" \"\"\"\n", | |
" Collects the neighborhood of entities mentioned\n", | |
" in the question\n", | |
" \"\"\"\n", | |
" result = \"\"\n", | |
" entities = entity_chain.invoke({\"question\": question})\n", | |
" for entity in entities.names:\n", | |
" response = graph.query(\n", | |
" \"\"\"CALL db.index.fulltext.queryNodes('entity', $query, {limit:2})\n", | |
" YIELD node,score\n", | |
" CALL {\n", | |
" WITH node\n", | |
" MATCH (node)-[r:!MENTIONS]->(neighbor)\n", | |
" RETURN node.id + ' - ' + type(r) + ' -> ' + neighbor.id AS output\n", | |
" UNION ALL\n", | |
" WITH node\n", | |
" MATCH (node)<-[r:!MENTIONS]-(neighbor)\n", | |
" RETURN neighbor.id + ' - ' + type(r) + ' -> ' + node.id AS output\n", | |
" }\n", | |
" RETURN output LIMIT 50\n", | |
" \"\"\",\n", | |
" {\"query\": generate_full_text_query(entity)},\n", | |
" )\n", | |
" result += \"\\n\".join([el['output'] for el in response])\n", | |
" return result" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"source": [ | |
"The `structured_retriever` function starts by detecting entities in the user question. Next, it iterates over the detected entities and uses a Cypher template to retrieve the neighborhood of relevant nodes. Let's test it out!" | |
], | |
"metadata": { | |
"id": "g-F9BjghzjdH" | |
} | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": null, | |
"metadata": { | |
"id": "_6fOJRPntHNX", | |
"outputId": "e171acc7-295c-4d20-bcc5-acc633472cc3", | |
"colab": { | |
"base_uri": "https://localhost:8080/" | |
} | |
}, | |
"outputs": [ | |
{ | |
"output_type": "stream", | |
"name": "stdout", | |
"text": [ | |
"Niko Drakoulis - PRIMARY_PERSONALITY -> Instructor\n", | |
"Niko Drakoulis - PRIMARY_PERSONALITY -> Powerful\n", | |
"Niko Drakoulis - PRIMARY_PERSONALITY_TRAIT -> Powerful\n", | |
"Niko Drakoulis - PERSONALITY_UNDER_PRESSURE_TRAIT -> Versatile\n", | |
"Niko Drakoulis - PERSONALITY_TRAIT -> Big Picture Ideas\n", | |
"Niko Drakoulis - PERSONALITY_TRAIT -> Motives Identification\n", | |
"Niko Drakoulis - PERSONALITY_TRAIT -> Goal Direction\n", | |
"Niko Drakoulis - PERSONALITY_TRAIT -> Pressure Application\n", | |
"Niko Drakoulis - PERSONALITY_TRAIT -> Relationship Impact\n", | |
"Niko Drakoulis - PERSONALITY_TRAIT -> Inspiration\n", | |
"Niko Drakoulis - PERSONALITY_TRAIT -> Persuasion\n", | |
"Niko Drakoulis - PERSONALITY_TRAIT -> Recruitment\n", | |
"Niko Drakoulis - PERSONALITY_TRAIT -> Exhaustion\n", | |
"Niko Drakoulis - BLIND_SPOT -> Tenacity\n", | |
"Niko Drakoulis - BLIND_SPOT -> Over-Controlling\n", | |
"Niko Drakoulis - BLIND_SPOT -> Insensitivity\n", | |
"Niko Drakoulis - PRISM_PRACTICE_AREA -> Presenting Facts\n", | |
"Niko Drakoulis - PRISM_PRACTICE_AREA -> Managing Emotions\n", | |
"Niko Drakoulis - PRISM_PRACTICE_AREA -> Awareness Of Others' Needs\n", | |
"Niko Drakoulis - PRISM_PRACTICE_AREA -> Seeking Feedback\n", | |
"Niko Drakoulis - PRISM_PRACTICE_AREA -> Drive Action\n", | |
"Niko Drakoulis - PRISM_PRACTICE_AREA -> Giving & Receiving Feedback\n", | |
"Niko Drakoulis - PRISM_PRACTICE_AREA -> Appreciation & Recognition\n", | |
"Niko Drakoulis - PRISM_PRACTICE_AREA -> Releasing Control\n", | |
"Niko Drakoulis - PRISM_PRACTICE_AREA -> Active Listening\n", | |
"Niko Drakoulis - PRIMARY_PERSONALITY_&_PERSONALITY_UNDER_PRESSURE -> Powerful/Versatile\n", | |
"Niko Drakoulis - EMOTIONAL_POSTURE -> Powerful/Versatile\n", | |
"Niko Drakoulis - ASSESSMENT_OF_OTHERS -> Powerful/Versatile\n", | |
"Niko Drakoulis - INFLUENCE_ON_OTHERS -> Powerful/Versatile\n", | |
"Niko Drakoulis - BEST_ENVIRONMENT -> Challenges, Variety, Opportunity To Influence And Inspire Others\n", | |
"Niko Drakoulis - OPERATION -> People, Tasks, Goals\n", | |
"Niko Drakoulis - ACTIVITY_PREFERENCE -> Variety Of Activities\n", | |
"Niko Drakoulis - UNDER_PRESSURE_BEHAVIOR -> Argumentative, Passive-Aggressive\n", | |
"Niko Drakoulis - UNEASINESS -> Loss Of Power Or Influence, Fear Of Being Too Soft\n", | |
"Niko Drakoulis - POTENTIAL_ROLE_MATCH -> Independence, Challenges, Obstacles, Meeting Or Entertaining People, Active Opportunities, Building, Communicating, Creating, Directing, Entertaining, Influencing, Initiating Solutions, Leading Others\n", | |
"Niko Drakoulis - HAS_PERSONALITY_TRAIT -> Entrepreneurial Attitude\n", | |
"Niko Drakoulis - HAS_PERSONALITY_TRAIT -> Assertive\n", | |
"Niko Drakoulis - HAS_PERSONALITY_TRAIT -> Persuasive\n", | |
"Niko Drakoulis - HAS_PERSONALITY_TRAIT -> Independent\n", | |
"Niko Drakoulis - HAS_PERSONALITY_TRAIT -> Adventurous\n", | |
"Niko Drakoulis - HAS_PERSONALITY_TRAIT -> Stubborn Determination\n", | |
"Niko Drakoulis - HAS_PERSONALITY_TRAIT -> Flexible\n", | |
"Niko Drakoulis - HAS_PERSONALITY_TRAIT -> Self-Motivated\n", | |
"Niko Drakoulis - HAS_PERSONALITY_TRAIT -> High Tolerance For Pressure\n", | |
"Niko Drakoulis - HAS_SKILL -> Verbal Skills\n", | |
"Niko Drakoulis - HAS_SKILL -> High Energy\n", | |
"Niko Drakoulis - HAS_SKILL -> Mobilize People\n", | |
"Niko Drakoulis - HAS_COMMUNICATION_PREFERENCE -> Big Picture Focus\n", | |
"Niko Drakoulis - HAS_COMMUNICATION_PREFERENCE -> Short Message For Details\n", | |
"Niko Drakoulis - HAS_COMMUNICATION_PREFERENCE -> Persuaded By Information About Endorsements\n" | |
] | |
} | |
], | |
"source": [ | |
"print(structured_retriever(\"Who is Niko\"))" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"source": [ | |
"## Final retriever\n", | |
"As we mentioned at the start, we'll combine the unstructured and graph retriever to create the final context that will be passed to an LLM." | |
], | |
"metadata": { | |
"id": "xN9c_dEozyaO" | |
} | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": null, | |
"metadata": { | |
"id": "iCTMp3prtHNX" | |
}, | |
"outputs": [], | |
"source": [ | |
"def retriever(question: str):\n", | |
" print(f\"Search query: {question}\")\n", | |
" structured_data = structured_retriever(question)\n", | |
" unstructured_data = [el.page_content for el in vector_index.similarity_search(question)]\n", | |
" final_data = f\"\"\"Structured data:\n", | |
"{structured_data}\n", | |
"Unstructured data:\n", | |
"{\"#Document \". join(unstructured_data)}\n", | |
" \"\"\"\n", | |
" return final_data" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"source": [ | |
"As we are dealing with Python, we can simply concatenate the outputs using the f-string.\n", | |
"## Defining the RAG chain\n", | |
"We have successfully implemented the retrieval component of the RAG. First, we will introduce the query rewriting part that allows conversational follow up questions.\n" | |
], | |
"metadata": { | |
"id": "NZG9Q8Ohz3Hn" | |
} | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": null, | |
"metadata": { | |
"id": "vu68Z79ttHNX" | |
}, | |
"outputs": [], | |
"source": [ | |
"# Condense a chat history and follow-up question into a standalone question\n", | |
"_template = \"\"\"Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question,\n", | |
"in its original language.\n", | |
"Chat History:\n", | |
"{chat_history}\n", | |
"Follow Up Input: {question}\n", | |
"Standalone question:\"\"\" # noqa: E501\n", | |
"CONDENSE_QUESTION_PROMPT = PromptTemplate.from_template(_template)\n", | |
"\n", | |
"def _format_chat_history(chat_history: List[Tuple[str, str]]) -> List:\n", | |
" buffer = []\n", | |
" for human, ai in chat_history:\n", | |
" buffer.append(HumanMessage(content=human))\n", | |
" buffer.append(AIMessage(content=ai))\n", | |
" return buffer\n", | |
"\n", | |
"_search_query = RunnableBranch(\n", | |
" # If input includes chat_history, we condense it with the follow-up question\n", | |
" (\n", | |
" RunnableLambda(lambda x: bool(x.get(\"chat_history\"))).with_config(\n", | |
" run_name=\"HasChatHistoryCheck\"\n", | |
" ), # Condense follow-up question and chat into a standalone_question\n", | |
" RunnablePassthrough.assign(\n", | |
" chat_history=lambda x: _format_chat_history(x[\"chat_history\"])\n", | |
" )\n", | |
" | CONDENSE_QUESTION_PROMPT\n", | |
" | ChatOpenAI(temperature=0)\n", | |
" | StrOutputParser(),\n", | |
" ),\n", | |
" # Else, we have no chat history, so just pass through the question\n", | |
" RunnableLambda(lambda x : x[\"question\"]),\n", | |
")" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"source": [ | |
"Next, we introduce a prompt that leverages the context provided by the integrated hybrid retriever to produce the response, completing the implementation of the RAG chain." | |
], | |
"metadata": { | |
"id": "CsH90hbvz_aF" | |
} | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": null, | |
"metadata": { | |
"id": "Dzb2jcittHNY" | |
}, | |
"outputs": [], | |
"source": [ | |
"template = \"\"\"Answer the question based only on the following context:\n", | |
"{context}\n", | |
"\n", | |
"Question: {question}\n", | |
"Use natural language and be concise.\n", | |
"Answer:\"\"\"\n", | |
"prompt = ChatPromptTemplate.from_template(template)\n", | |
"\n", | |
"chain = (\n", | |
" RunnableParallel(\n", | |
" {\n", | |
" \"context\": _search_query | retriever,\n", | |
" \"question\": RunnablePassthrough(),\n", | |
" }\n", | |
" )\n", | |
" | prompt\n", | |
" | llm\n", | |
" | StrOutputParser()\n", | |
")" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"source": [ | |
"Finally, we can go ahead and test our hybrid RAG implementation." | |
], | |
"metadata": { | |
"id": "w3SeRw0L0Gy3" | |
} | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": null, | |
"metadata": { | |
"id": "dtU0iMNgtHNY", | |
"outputId": "8936bf8a-b6a3-4f3a-9268-2c1eb0975718", | |
"colab": { | |
"base_uri": "https://localhost:8080/", | |
"height": 53 | |
} | |
}, | |
"outputs": [ | |
{ | |
"output_type": "stream", | |
"name": "stdout", | |
"text": [ | |
"Search query: What is Niko's primary trait\n" | |
] | |
}, | |
{ | |
"output_type": "execute_result", | |
"data": { | |
"text/plain": [ | |
"\"Niko's primary trait is being powerful.\"" | |
], | |
"application/vnd.google.colaboratory.intrinsic+json": { | |
"type": "string" | |
} | |
}, | |
"metadata": {}, | |
"execution_count": 36 | |
} | |
], | |
"source": [ | |
"chain.invoke({\"question\": \"What is Niko's primary trait\"})" | |
] | |
}, | |
{ | |
"cell_type": "markdown", | |
"source": [ | |
"Let's test a follow up question!" | |
], | |
"metadata": { | |
"id": "aLjhppwj0Jz_" | |
} | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": null, | |
"metadata": { | |
"id": "ln_Hz2obtHNY", | |
"outputId": "3d93034b-5412-4a19-b692-96d2788ff221", | |
"colab": { | |
"base_uri": "https://localhost:8080/", | |
"height": 53 | |
} | |
}, | |
"outputs": [ | |
{ | |
"output_type": "stream", | |
"name": "stdout", | |
"text": [ | |
"Search query: Niko's action point to win a arugement\n" | |
] | |
}, | |
{ | |
"output_type": "execute_result", | |
"data": { | |
"text/plain": [ | |
"\"Niko's action point to win an argument is to collaborate and seek a win-win outcome.\"" | |
], | |
"application/vnd.google.colaboratory.intrinsic+json": { | |
"type": "string" | |
} | |
}, | |
"metadata": {}, | |
"execution_count": 38 | |
} | |
], | |
"source": [ | |
"chain.invoke(\n", | |
" {\n", | |
" \"question\": \"Niko's action point to win a arugement\",\n", | |
"\n", | |
" }\n", | |
")" | |
] | |
}, | |
{ | |
"cell_type": "code", | |
"execution_count": null, | |
"metadata": { | |
"id": "CvaTiHtNtHNY", | |
"colab": { | |
"base_uri": "https://localhost:8080/" | |
}, | |
"outputId": "4ffd93c0-b1e7-4128-d1e6-8bb431e4ffdf" | |
}, | |
"outputs": [ | |
{ | |
"output_type": "stream", | |
"name": "stdout", | |
"text": [ | |
"{'id': 'chatcmpl-9RGv3wfJBEUew81j4AQsckXiIJ4uV', 'object': 'chat.completion', 'created': 1716286921, 'model': 'gpt-3.5-turbo-0125', 'choices': [{'index': 0, 'message': {'role': 'assistant', 'content': 'Hello, I am an AI assistant here to help you with any questions or tasks you may have. I am programmed to provide information and assistance in a variety of topics. Feel free to ask me anything, and I will do my best to help you.'}, 'logprobs': None, 'finish_reason': 'stop'}], 'usage': {'prompt_tokens': 10, 'completion_tokens': 51, 'total_tokens': 61}, 'system_fingerprint': None}\n" | |
] | |
} | |
], | |
"source": [ | |
"import requests\n", | |
"\n", | |
"# Replace 'your_openai_api_key' with your actual OpenAI API key\n", | |
"api_key = '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'\n", | |
"\n", | |
"# Define the endpoint URL\n", | |
"url = \"https://api.openai.com/v1/chat/completions\"\n", | |
"\n", | |
"# Define the headers\n", | |
"headers = {\n", | |
" \"Content-Type\": \"application/json\",\n", | |
" \"Authorization\": f\"Bearer {api_key}\"\n", | |
"}\n", | |
"\n", | |
"# Define the payload\n", | |
"payload = {\n", | |
" \"model\": \"gpt-3.5-turbo\",\n", | |
" \"messages\": [{\"role\": \"user\", \"content\": \"Give yourself intro\"}],\n", | |
" \"temperature\": 0.7\n", | |
"}\n", | |
"\n", | |
"# Make the POST request\n", | |
"response = requests.post(url, headers=headers, json=payload)\n", | |
"\n", | |
"# Check if the request was successful\n", | |
"if response.status_code == 200:\n", | |
" # Print the response from the API\n", | |
" print(response.json())\n", | |
"else:\n", | |
" # Print the error if the request was not successful\n", | |
" print(f\"Error: {response.status_code}\")\n", | |
" print(response.json())\n" | |
] | |
} | |
], | |
"metadata": { | |
"kernelspec": { | |
"display_name": "Python 3 (ipykernel)", | |
"language": "python", | |
"name": "python3" | |
}, | |
"language_info": { | |
"codemirror_mode": { | |
"name": "ipython", | |
"version": 3 | |
}, | |
"file_extension": ".py", | |
"mimetype": "text/x-python", | |
"name": "python", | |
"nbconvert_exporter": "python", | |
"pygments_lexer": "ipython3", | |
"version": "3.11.5" | |
}, | |
"colab": { | |
"provenance": [], | |
"include_colab_link": true | |
}, | |
"widgets": { | |
"application/vnd.jupyter.widget-state+json": { | |
"74cf5d8eec4741c3a42f6ded78c7b56e": { | |
"model_module": "yfiles-jupyter-graphs", | |
"model_name": "GraphModel", | |
"model_module_version": "^1.6.2", | |
"state": { | |
"_context_pane_mapping": [ | |
{ | |
"id": "Neighborhood", | |
"title": "Neighborhood" | |
}, | |
{ | |
"id": "Data", | |
"title": "Data" | |
}, | |
{ | |
"id": "Search", | |
"title": "Search" | |
}, | |
{ | |
"id": "About", | |
"title": "About" | |
} | |
], | |
"_data_importer": "neo4j", | |
"_directed": true, | |
"_dom_classes": [], | |
"_edges": [ | |
{ | |
"id": 1152922604118474800, | |
"start": 1, | |
"end": 2, | |
"properties": { | |
"label": "RULED" | |
}, | |
"label": "RULED", | |
"color": "#9C27B0", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1155174403932160000, | |
"start": 1, | |
"end": 3, | |
"properties": { | |
"label": "RULED" | |
}, | |
"label": "RULED", | |
"color": "#9C27B0", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1152923703630102500, | |
"start": 1, | |
"end": 4, | |
"properties": { | |
"label": "BELONGED_TO" | |
}, | |
"label": "BELONGED_TO", | |
"color": "#2196F3", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1152924803141730300, | |
"start": 1, | |
"end": 5, | |
"properties": { | |
"label": "PARENT" | |
}, | |
"label": "PARENT", | |
"color": "#4CAF50", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1155176602955415600, | |
"start": 1, | |
"end": 6, | |
"properties": { | |
"label": "PARENT" | |
}, | |
"label": "PARENT", | |
"color": "#4CAF50", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1152925902653358000, | |
"start": 1, | |
"end": 7, | |
"properties": { | |
"label": "SIBLING" | |
}, | |
"label": "SIBLING", | |
"color": "#F44336", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1155177702467043300, | |
"start": 1, | |
"end": 9, | |
"properties": { | |
"label": "SIBLING" | |
}, | |
"label": "SIBLING", | |
"color": "#F44336", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1152927002164985900, | |
"start": 1, | |
"end": 8, | |
"properties": { | |
"label": "IGNORED_SUCCESSION" | |
}, | |
"label": "IGNORED_SUCCESSION", | |
"color": "#607D8B", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1152928101676613600, | |
"start": 1, | |
"end": 10, | |
"properties": { | |
"label": "DEPENDED_ON" | |
}, | |
"label": "DEPENDED_ON", | |
"color": "#673AB7", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1155179901490299000, | |
"start": 1, | |
"end": 16, | |
"properties": { | |
"label": "DEPENDED_ON" | |
}, | |
"label": "DEPENDED_ON", | |
"color": "#673AB7", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1152929201188241400, | |
"start": 1, | |
"end": 11, | |
"properties": { | |
"label": "CREATED" | |
}, | |
"label": "CREATED", | |
"color": "#CDDC39", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1152930300699869200, | |
"start": 1, | |
"end": 12, | |
"properties": { | |
"label": "SUCCEEDED_BY" | |
}, | |
"label": "SUCCEEDED_BY", | |
"color": "#9E9E9E", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1152931400211497000, | |
"start": 1, | |
"end": 13, | |
"properties": { | |
"label": "RELATIVE" | |
}, | |
"label": "RELATIVE", | |
"color": "#9C27B0", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1152932499723124700, | |
"start": 1, | |
"end": 14, | |
"properties": { | |
"label": "FOREIGN_AFFAIRS" | |
}, | |
"label": "FOREIGN_AFFAIRS", | |
"color": "#2196F3", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1155184299536810000, | |
"start": 1, | |
"end": 15, | |
"properties": { | |
"label": "FOREIGN_AFFAIRS" | |
}, | |
"label": "FOREIGN_AFFAIRS", | |
"color": "#2196F3", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 6917570809082937000, | |
"start": 1, | |
"end": 12, | |
"properties": { | |
"label": "SUCCEEDED_TO" | |
}, | |
"label": "SUCCEEDED_TO", | |
"color": "#4CAF50", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1157466885676073000, | |
"start": 5, | |
"end": 1, | |
"properties": { | |
"label": "SUCCEEDED_TO" | |
}, | |
"label": "SUCCEEDED_TO", | |
"color": "#4CAF50", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1152963286048702500, | |
"start": 5, | |
"end": 7, | |
"properties": { | |
"label": "SUCCEEDED_TO" | |
}, | |
"label": "SUCCEEDED_TO", | |
"color": "#4CAF50", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1155215085862387700, | |
"start": 5, | |
"end": 54, | |
"properties": { | |
"label": "SUCCEEDED_TO" | |
}, | |
"label": "SUCCEEDED_TO", | |
"color": "#4CAF50", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1159718685489758200, | |
"start": 5, | |
"end": 55, | |
"properties": { | |
"label": "SUCCEEDED_TO" | |
}, | |
"label": "SUCCEEDED_TO", | |
"color": "#4CAF50", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1161970485303443500, | |
"start": 5, | |
"end": 56, | |
"properties": { | |
"label": "SUCCEEDED_TO" | |
}, | |
"label": "SUCCEEDED_TO", | |
"color": "#4CAF50", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1164222285117128700, | |
"start": 5, | |
"end": 57, | |
"properties": { | |
"label": "SUCCEEDED_TO" | |
}, | |
"label": "SUCCEEDED_TO", | |
"color": "#4CAF50", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1166474084930814000, | |
"start": 5, | |
"end": 58, | |
"properties": { | |
"label": "SUCCEEDED_TO" | |
}, | |
"label": "SUCCEEDED_TO", | |
"color": "#4CAF50", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1152924803141730300, | |
"start": 13, | |
"end": 65, | |
"properties": { | |
"label": "PARENT" | |
}, | |
"label": "PARENT", | |
"color": "#4CAF50", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1152967684095213600, | |
"start": 13, | |
"end": 18, | |
"properties": { | |
"label": "SENT_AMBASSADORS" | |
}, | |
"label": "SENT_AMBASSADORS", | |
"color": "#F44336", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1152968783606841300, | |
"start": 13, | |
"end": 65, | |
"properties": { | |
"label": "PLAUSIBLE_SUCCESSOR" | |
}, | |
"label": "PLAUSIBLE_SUCCESSOR", | |
"color": "#607D8B", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1152969883118469000, | |
"start": 13, | |
"end": 65, | |
"properties": { | |
"label": "ROMAN_CATHOLIC" | |
}, | |
"label": "ROMAN_CATHOLIC", | |
"color": "#673AB7", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1152970982630097000, | |
"start": 13, | |
"end": 65, | |
"properties": { | |
"label": "POLITICAL_PROBLEM" | |
}, | |
"label": "POLITICAL_PROBLEM", | |
"color": "#CDDC39", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1152972082141724700, | |
"start": 13, | |
"end": 65, | |
"properties": { | |
"label": "EXECUTED" | |
}, | |
"label": "EXECUTED", | |
"color": "#9E9E9E", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1152933599234752500, | |
"start": 18, | |
"end": 15, | |
"properties": { | |
"label": "WAR" | |
}, | |
"label": "WAR", | |
"color": "#9C27B0", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1157438298373750800, | |
"start": 18, | |
"end": 3, | |
"properties": { | |
"label": "CAMPAIGN" | |
}, | |
"label": "CAMPAIGN", | |
"color": "#2196F3", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1155186498560065500, | |
"start": 18, | |
"end": 14, | |
"properties": { | |
"label": "CAMPAIGN" | |
}, | |
"label": "CAMPAIGN", | |
"color": "#2196F3", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1152934698746380300, | |
"start": 18, | |
"end": 19, | |
"properties": { | |
"label": "CAMPAIGN" | |
}, | |
"label": "CAMPAIGN", | |
"color": "#2196F3", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1152935798258008000, | |
"start": 18, | |
"end": 20, | |
"properties": { | |
"label": "ERA_LEAD" | |
}, | |
"label": "ERA_LEAD", | |
"color": "#4CAF50", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1155187598071693300, | |
"start": 18, | |
"end": 21, | |
"properties": { | |
"label": "ERA_LEAD" | |
}, | |
"label": "ERA_LEAD", | |
"color": "#4CAF50", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1157439397885378600, | |
"start": 18, | |
"end": 22, | |
"properties": { | |
"label": "ERA_LEAD" | |
}, | |
"label": "ERA_LEAD", | |
"color": "#4CAF50", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1159691197699063800, | |
"start": 18, | |
"end": 23, | |
"properties": { | |
"label": "ERA_LEAD" | |
}, | |
"label": "ERA_LEAD", | |
"color": "#4CAF50", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1161942997512749000, | |
"start": 18, | |
"end": 24, | |
"properties": { | |
"label": "ERA_LEAD" | |
}, | |
"label": "ERA_LEAD", | |
"color": "#4CAF50", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1152950091909169200, | |
"start": 18, | |
"end": 46, | |
"properties": { | |
"label": "DAUGHTER" | |
}, | |
"label": "DAUGHTER", | |
"color": "#F44336", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1155201891722854400, | |
"start": 18, | |
"end": 47, | |
"properties": { | |
"label": "DAUGHTER" | |
}, | |
"label": "DAUGHTER", | |
"color": "#F44336", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1152936897769635800, | |
"start": 26, | |
"end": 27, | |
"properties": { | |
"label": "HELD_TITLE" | |
}, | |
"label": "HELD_TITLE", | |
"color": "#607D8B", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1152937997281263600, | |
"start": 26, | |
"end": 28, | |
"properties": { | |
"label": "DAUGHTER_OF" | |
}, | |
"label": "DAUGHTER_OF", | |
"color": "#673AB7", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1155189797094948900, | |
"start": 26, | |
"end": 29, | |
"properties": { | |
"label": "DAUGHTER_OF" | |
}, | |
"label": "DAUGHTER_OF", | |
"color": "#673AB7", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1152939096792891400, | |
"start": 26, | |
"end": 30, | |
"properties": { | |
"label": "RELATED_TO" | |
}, | |
"label": "RELATED_TO", | |
"color": "#CDDC39", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1155190896606576600, | |
"start": 26, | |
"end": 31, | |
"properties": { | |
"label": "RELATED_TO" | |
}, | |
"label": "RELATED_TO", | |
"color": "#CDDC39", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1157442696420262000, | |
"start": 26, | |
"end": 32, | |
"properties": { | |
"label": "RELATED_TO" | |
}, | |
"label": "RELATED_TO", | |
"color": "#CDDC39", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1152940196304519200, | |
"start": 26, | |
"end": 33, | |
"properties": { | |
"label": "DECLARED_AS_HEIR" | |
}, | |
"label": "DECLARED_AS_HEIR", | |
"color": "#9E9E9E", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1152941295816147000, | |
"start": 26, | |
"end": 34, | |
"properties": { | |
"label": "SUPPORTED_FOUNDATION_OF_UNIVERSITY_OF_MOSCOW" | |
}, | |
"label": "SUPPORTED_FOUNDATION_OF_UNIVERSITY_OF_MOSCOW", | |
"color": "#9C27B0", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1152942395327774700, | |
"start": 26, | |
"end": 35, | |
"properties": { | |
"label": "ENCOURAGED_FOUNDATION_OF_IMPERIAL_ACADEMY_OF_ARTS" | |
}, | |
"label": "ENCOURAGED_FOUNDATION_OF_IMPERIAL_ACADEMY_OF_ARTS", | |
"color": "#2196F3", | |
"thickness_factor": 1, | |
"directed": true | |
}, | |
{ | |
"id": 1152943494839402500, | |
"start": 26, | |
"end": 36, | |
"properties": { | |
"label": "SUPPORTED_ARCHITECTURAL_PROJECTS" | |
}, | |
"label": "SUPPORTED_ARCHITECTURAL_PROJECTS", | |
"color": "#4CAF50", | |
"thickness_factor": 1, | |
"directed": true | |
} | |
], | |
"_graph_layout": {}, | |
"_highlight": [], | |
"_license": {}, | |
"_model_module": "yfiles-jupyter-graphs", | |
"_model_module_version": "^1.6.2", | |
"_model_name": "GraphModel", | |
"_neighborhood": {}, | |
"_nodes": [ | |
{ | |
"id": 1, | |
"properties": { | |
"id": "Elizabeth I", | |
"label": "Person:__Entity__" | |
}, | |
"color": "#2196F3", | |
"styles": {}, | |
"label": "Elizabeth I", | |
"scale_factor": 1, | |
"type": "#2196F3", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 2, | |
"properties": { | |
"id": "England", | |
"label": "Country:__Entity__" | |
}, | |
"color": "#4CAF50", | |
"styles": {}, | |
"label": "England", | |
"scale_factor": 1, | |
"type": "#4CAF50", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 3, | |
"properties": { | |
"id": "Ireland", | |
"label": "Country:__Entity__" | |
}, | |
"color": "#4CAF50", | |
"styles": {}, | |
"label": "Ireland", | |
"scale_factor": 1, | |
"type": "#4CAF50", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 4, | |
"properties": { | |
"id": "House Of Tudor", | |
"label": "Royal house:__Entity__" | |
}, | |
"color": "#F44336", | |
"styles": {}, | |
"label": "House Of Tudor", | |
"scale_factor": 1, | |
"type": "#F44336", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 5, | |
"properties": { | |
"id": "Henry Viii", | |
"label": "Person:__Entity__" | |
}, | |
"color": "#2196F3", | |
"styles": {}, | |
"label": "Henry Viii", | |
"scale_factor": 1, | |
"type": "#2196F3", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 6, | |
"properties": { | |
"id": "Anne Boleyn", | |
"label": "Person:__Entity__" | |
}, | |
"color": "#2196F3", | |
"styles": {}, | |
"label": "Anne Boleyn", | |
"scale_factor": 1, | |
"type": "#2196F3", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 7, | |
"properties": { | |
"id": "Edward Vi", | |
"label": "Person:__Entity__" | |
}, | |
"color": "#2196F3", | |
"styles": {}, | |
"label": "Edward Vi", | |
"scale_factor": 1, | |
"type": "#2196F3", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 9, | |
"properties": { | |
"id": "Mary", | |
"label": "Person:__Entity__" | |
}, | |
"color": "#2196F3", | |
"styles": {}, | |
"label": "Mary", | |
"scale_factor": 1, | |
"type": "#2196F3", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 8, | |
"properties": { | |
"id": "Lady Jane Grey", | |
"label": "Person:__Entity__" | |
}, | |
"color": "#2196F3", | |
"styles": {}, | |
"label": "Lady Jane Grey", | |
"scale_factor": 1, | |
"type": "#2196F3", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 10, | |
"properties": { | |
"id": "William Cecil", | |
"label": "Person:__Entity__" | |
}, | |
"color": "#2196F3", | |
"styles": {}, | |
"label": "William Cecil", | |
"scale_factor": 1, | |
"type": "#2196F3", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 16, | |
"properties": { | |
"id": "Sir Francis Walsingham", | |
"label": "Person:__Entity__" | |
}, | |
"color": "#2196F3", | |
"styles": {}, | |
"label": "Sir Francis Walsingham", | |
"scale_factor": 1, | |
"type": "#2196F3", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 11, | |
"properties": { | |
"id": "Baron Burghley", | |
"label": "Title:__Entity__" | |
}, | |
"color": "#607D8B", | |
"styles": {}, | |
"label": "Baron Burghley", | |
"scale_factor": 1, | |
"type": "#607D8B", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 12, | |
"properties": { | |
"id": "James Vi Of Scotland", | |
"label": "Person:__Entity__" | |
}, | |
"color": "#2196F3", | |
"styles": {}, | |
"label": "James Vi Of Scotland", | |
"scale_factor": 1, | |
"type": "#2196F3", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 13, | |
"properties": { | |
"id": "Mary, Queen Of Scots", | |
"label": "Person:__Entity__" | |
}, | |
"color": "#2196F3", | |
"styles": {}, | |
"label": "Mary, Queen Of Scots", | |
"scale_factor": 1, | |
"type": "#2196F3", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 14, | |
"properties": { | |
"id": "France", | |
"label": "Country:__Entity__" | |
}, | |
"color": "#4CAF50", | |
"styles": {}, | |
"label": "France", | |
"scale_factor": 1, | |
"type": "#4CAF50", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 15, | |
"properties": { | |
"id": "Spain", | |
"label": "Country:__Entity__" | |
}, | |
"color": "#4CAF50", | |
"styles": {}, | |
"label": "Spain", | |
"scale_factor": 1, | |
"type": "#4CAF50", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 54, | |
"properties": { | |
"id": "Mary I", | |
"label": "Person:__Entity__" | |
}, | |
"color": "#2196F3", | |
"styles": {}, | |
"label": "Mary I", | |
"scale_factor": 1, | |
"type": "#2196F3", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 55, | |
"properties": { | |
"id": "Jane Grey", | |
"label": "Person:__Entity__" | |
}, | |
"color": "#2196F3", | |
"styles": {}, | |
"label": "Jane Grey", | |
"scale_factor": 1, | |
"type": "#2196F3", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 56, | |
"properties": { | |
"id": "Katherine Grey", | |
"label": "Person:__Entity__" | |
}, | |
"color": "#2196F3", | |
"styles": {}, | |
"label": "Katherine Grey", | |
"scale_factor": 1, | |
"type": "#2196F3", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 57, | |
"properties": { | |
"id": "Mary Grey", | |
"label": "Person:__Entity__" | |
}, | |
"color": "#2196F3", | |
"styles": {}, | |
"label": "Mary Grey", | |
"scale_factor": 1, | |
"type": "#2196F3", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 58, | |
"properties": { | |
"id": "Margaret Clifford", | |
"label": "Person:__Entity__" | |
}, | |
"color": "#2196F3", | |
"styles": {}, | |
"label": "Margaret Clifford", | |
"scale_factor": 1, | |
"type": "#2196F3", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 65, | |
"properties": { | |
"id": "James Vi", | |
"label": "Person:__Entity__" | |
}, | |
"color": "#2196F3", | |
"styles": {}, | |
"label": "James Vi", | |
"scale_factor": 1, | |
"type": "#2196F3", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 18, | |
"properties": { | |
"id": "Elizabeth", | |
"label": "Person:__Entity__" | |
}, | |
"color": "#2196F3", | |
"styles": {}, | |
"label": "Elizabeth", | |
"scale_factor": 1, | |
"type": "#2196F3", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 19, | |
"properties": { | |
"id": "Netherlands", | |
"label": "Country:__Entity__" | |
}, | |
"color": "#4CAF50", | |
"styles": {}, | |
"label": "Netherlands", | |
"scale_factor": 1, | |
"type": "#4CAF50", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 20, | |
"properties": { | |
"id": "William Shakespeare", | |
"label": "Playwright:__Entity__" | |
}, | |
"color": "#673AB7", | |
"styles": {}, | |
"label": "William Shakespeare", | |
"scale_factor": 1, | |
"type": "#673AB7", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 21, | |
"properties": { | |
"id": "Christopher Marlowe", | |
"label": "Playwright:__Entity__" | |
}, | |
"color": "#673AB7", | |
"styles": {}, | |
"label": "Christopher Marlowe", | |
"scale_factor": 1, | |
"type": "#673AB7", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 22, | |
"properties": { | |
"id": "Francis Drake", | |
"label": "Maritime adventurer:__Entity__" | |
}, | |
"color": "#CDDC39", | |
"styles": {}, | |
"label": "Francis Drake", | |
"scale_factor": 1, | |
"type": "#CDDC39", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 23, | |
"properties": { | |
"id": "Walter Raleigh", | |
"label": "Maritime adventurer:__Entity__" | |
}, | |
"color": "#CDDC39", | |
"styles": {}, | |
"label": "Walter Raleigh", | |
"scale_factor": 1, | |
"type": "#CDDC39", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 24, | |
"properties": { | |
"id": "Spanish Armada", | |
"label": "__Entity__:Event" | |
}, | |
"color": "#9E9E9E", | |
"styles": {}, | |
"label": "Spanish Armada", | |
"scale_factor": 1, | |
"type": "#9E9E9E", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 46, | |
"properties": { | |
"id": "Peter The Great", | |
"label": "Person:__Entity__" | |
}, | |
"color": "#2196F3", | |
"styles": {}, | |
"label": "Peter The Great", | |
"scale_factor": 1, | |
"type": "#2196F3", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 47, | |
"properties": { | |
"id": "Catherine", | |
"label": "Person:__Entity__" | |
}, | |
"color": "#2196F3", | |
"styles": {}, | |
"label": "Catherine", | |
"scale_factor": 1, | |
"type": "#2196F3", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 26, | |
"properties": { | |
"id": "Elizabeth Petrovna", | |
"label": "Person:__Entity__" | |
}, | |
"color": "#2196F3", | |
"styles": {}, | |
"label": "Elizabeth Petrovna", | |
"scale_factor": 1, | |
"type": "#2196F3", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 27, | |
"properties": { | |
"id": "Empress Of Russia", | |
"label": "Title:__Entity__" | |
}, | |
"color": "#607D8B", | |
"styles": {}, | |
"label": "Empress Of Russia", | |
"scale_factor": 1, | |
"type": "#607D8B", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 28, | |
"properties": { | |
"id": "Tsar Peter The Great", | |
"label": "Person:__Entity__" | |
}, | |
"color": "#2196F3", | |
"styles": {}, | |
"label": "Tsar Peter The Great", | |
"scale_factor": 1, | |
"type": "#2196F3", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 29, | |
"properties": { | |
"id": "Catherine I Of Russia", | |
"label": "Person:__Entity__" | |
}, | |
"color": "#2196F3", | |
"styles": {}, | |
"label": "Catherine I Of Russia", | |
"scale_factor": 1, | |
"type": "#2196F3", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 30, | |
"properties": { | |
"id": "Peter Ii", | |
"label": "Person:__Entity__" | |
}, | |
"color": "#2196F3", | |
"styles": {}, | |
"label": "Peter Ii", | |
"scale_factor": 1, | |
"type": "#2196F3", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 31, | |
"properties": { | |
"id": "Anna", | |
"label": "Person:__Entity__" | |
}, | |
"color": "#2196F3", | |
"styles": {}, | |
"label": "Anna", | |
"scale_factor": 1, | |
"type": "#2196F3", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 32, | |
"properties": { | |
"id": "Ivan Vi", | |
"label": "Person:__Entity__" | |
}, | |
"color": "#2196F3", | |
"styles": {}, | |
"label": "Ivan Vi", | |
"scale_factor": 1, | |
"type": "#2196F3", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 33, | |
"properties": { | |
"id": "Peter Iii", | |
"label": "Person:__Entity__" | |
}, | |
"color": "#2196F3", | |
"styles": {}, | |
"label": "Peter Iii", | |
"scale_factor": 1, | |
"type": "#2196F3", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 34, | |
"properties": { | |
"id": "Mikhail Lomonosov", | |
"label": "Person:__Entity__" | |
}, | |
"color": "#2196F3", | |
"styles": {}, | |
"label": "Mikhail Lomonosov", | |
"scale_factor": 1, | |
"type": "#2196F3", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 35, | |
"properties": { | |
"id": "Ivan Shuvalov", | |
"label": "Person:__Entity__" | |
}, | |
"color": "#2196F3", | |
"styles": {}, | |
"label": "Ivan Shuvalov", | |
"scale_factor": 1, | |
"type": "#2196F3", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
}, | |
{ | |
"id": 36, | |
"properties": { | |
"id": "Bartolomeo Rastrelli", | |
"label": "Person:__Entity__" | |
}, | |
"color": "#2196F3", | |
"styles": {}, | |
"label": "Bartolomeo Rastrelli", | |
"scale_factor": 1, | |
"type": "#2196F3", | |
"size": [ | |
55, | |
55 | |
], | |
"position": [ | |
0, | |
0 | |
] | |
} | |
], | |
"_overview": { | |
"enabled": null, | |
"overview_set": false | |
}, | |
"_selected_graph": [ | |
[], | |
[] | |
], | |
"_sidebar": { | |
"enabled": true, | |
"start_with": "" | |
}, | |
"_view_count": null, | |
"_view_module": "yfiles-jupyter-graphs", | |
"_view_module_version": "^1.6.2", | |
"_view_name": "GraphView", | |
"layout": "IPY_MODEL_f85c1fbbfed147019ba1e0c14990c55b" | |
} | |
}, | |
"f85c1fbbfed147019ba1e0c14990c55b": { | |
"model_module": "@jupyter-widgets/base", | |
"model_name": "LayoutModel", | |
"model_module_version": "1.2.0", | |
"state": { | |
"_model_module": "@jupyter-widgets/base", | |
"_model_module_version": "1.2.0", | |
"_model_name": "LayoutModel", | |
"_view_count": null, | |
"_view_module": "@jupyter-widgets/base", | |
"_view_module_version": "1.2.0", | |
"_view_name": "LayoutView", | |
"align_content": null, | |
"align_items": null, | |
"align_self": null, | |
"border": null, | |
"bottom": null, | |
"display": null, | |
"flex": null, | |
"flex_flow": null, | |
"grid_area": null, | |
"grid_auto_columns": null, | |
"grid_auto_flow": null, | |
"grid_auto_rows": null, | |
"grid_column": null, | |
"grid_gap": null, | |
"grid_row": null, | |
"grid_template_areas": null, | |
"grid_template_columns": null, | |
"grid_template_rows": null, | |
"height": "800px", | |
"justify_content": null, | |
"justify_items": null, | |
"left": null, | |
"margin": null, | |
"max_height": null, | |
"max_width": null, | |
"min_height": null, | |
"min_width": null, | |
"object_fit": null, | |
"object_position": null, | |
"order": null, | |
"overflow": null, | |
"overflow_x": null, | |
"overflow_y": null, | |
"padding": null, | |
"right": null, | |
"top": null, | |
"visibility": null, | |
"width": "100%" | |
} | |
} | |
} | |
} | |
}, | |
"nbformat": 4, | |
"nbformat_minor": 0 | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment