from sklearn.decomposition import PCA
import matplotlib.pyplot as plt
from gensim.models import Word2Vec

# Sample text data for training word2vec model
sentences = [
    ["king", "queen", "man", "woman", "child", "family"],
    ["dog", "cat", "animal", "pet", "wild", "tame"],
    ["car", "truck", "vehicle", "driver", "road", "highway"],
    ["apple", "orange", "fruit", "banana", "grape", "berry"],
]

# Train Word2Vec model
model = Word2Vec(sentences, vector_size=50, window=3, min_count=1, workers=2)

# Extract vectors for selected words
words = ["king", "queen", "man", "woman", "dog", "cat"]
vectors = [model.wv[word] for word in words]

# Perform PCA to reduce to 2D for visualization
pca = PCA(n_components=2)
reduced_vectors = pca.fit_transform(vectors)

# Plot the vectors in 2D space
plt.figure(figsize=(10, 7))
for word, coord in zip(words, reduced_vectors):
    plt.scatter(coord[0], coord[1], label=word)
    plt.text(coord[0] + 0.02, coord[1], word, fontsize=9)

plt.title("Word2Vec Distance Visualization")
plt.xlabel("PCA Dimension 1")
plt.ylabel("PCA Dimension 2")
plt.grid(True)
plt.legend()
plt.show()
