Spaces:
Sleeping
Sleeping
Kaveh-Workstation
commited on
Commit
·
d19fddf
1
Parent(s):
250bb13
init
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +0 -0
- app.py +105 -0
- captions.txt +0 -0
- image_embeddings_8_clip14_cxrbert.pkl +3 -0
- images/ROCO_00001.jpg +3 -0
- images/ROCO_00006.jpg +3 -0
- images/ROCO_00016.jpg +3 -0
- images/ROCO_00025.jpg +3 -0
- images/ROCO_00031.jpg +3 -0
- images/ROCO_00036.jpg +3 -0
- images/ROCO_00061.jpg +3 -0
- images/ROCO_00084.jpg +3 -0
- images/ROCO_00138.jpg +3 -0
- images/ROCO_00153.jpg +3 -0
- images/ROCO_00176.jpg +3 -0
- images/ROCO_00185.jpg +3 -0
- images/ROCO_00190.jpg +3 -0
- images/ROCO_00206.jpg +3 -0
- images/ROCO_00218.jpg +3 -0
- images/ROCO_00251.jpg +3 -0
- images/ROCO_00258.jpg +3 -0
- images/ROCO_00261.jpg +3 -0
- images/ROCO_00264.jpg +3 -0
- images/ROCO_00271.jpg +3 -0
- images/ROCO_00300.jpg +3 -0
- images/ROCO_00302.jpg +3 -0
- images/ROCO_00303.jpg +3 -0
- images/ROCO_00307.jpg +3 -0
- images/ROCO_00316.jpg +3 -0
- images/ROCO_00319.jpg +3 -0
- images/ROCO_00328.jpg +3 -0
- images/ROCO_00332.jpg +3 -0
- images/ROCO_00333.jpg +3 -0
- images/ROCO_00341.jpg +3 -0
- images/ROCO_00350.jpg +3 -0
- images/ROCO_00382.jpg +3 -0
- images/ROCO_00391.jpg +3 -0
- images/ROCO_00402.jpg +3 -0
- images/ROCO_00425.jpg +3 -0
- images/ROCO_00428.jpg +3 -0
- images/ROCO_00446.jpg +3 -0
- images/ROCO_00447.jpg +3 -0
- images/ROCO_00450.jpg +3 -0
- images/ROCO_00454.jpg +3 -0
- images/ROCO_00468.jpg +3 -0
- images/ROCO_00477.jpg +3 -0
- images/ROCO_00481.jpg +3 -0
- images/ROCO_00494.jpg +3 -0
- images/ROCO_00496.jpg +3 -0
- images/ROCO_00506.jpg +3 -0
.gitattributes
CHANGED
|
The diff for this file is too large to render.
See raw diff
|
|
|
app.py
ADDED
|
@@ -0,0 +1,105 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import gradio as gr
|
| 2 |
+
import torch
|
| 3 |
+
import pickle
|
| 4 |
+
import numpy as np
|
| 5 |
+
import pandas as pd
|
| 6 |
+
from transformers import CLIPProcessor, CLIPModel
|
| 7 |
+
from transformers import VisionTextDualEncoderModel, VisionTextDualEncoderProcessor
|
| 8 |
+
from sklearn.metrics.pairwise import cosine_similarity
|
| 9 |
+
import csv
|
| 10 |
+
from PIL import Image
|
| 11 |
+
|
| 12 |
+
model_path = "kaveh/rclip"
|
| 13 |
+
embeddings_file = './image_embeddings_8_clip14_cxrbert.pkl'
|
| 14 |
+
csv_path = "./captions.csv"
|
| 15 |
+
|
| 16 |
+
def load_image_ids(csv_file):
|
| 17 |
+
ids = []
|
| 18 |
+
captions = []
|
| 19 |
+
with open(csv_file, 'r') as f:
|
| 20 |
+
reader = csv.reader(f, delimiter='\t')
|
| 21 |
+
for row in reader:
|
| 22 |
+
ids.append(row[0])
|
| 23 |
+
captions.append(row[1])
|
| 24 |
+
return ids, captions
|
| 25 |
+
|
| 26 |
+
def load_embeddings(embeddings_file):
|
| 27 |
+
with open(embeddings_file, 'rb') as f:
|
| 28 |
+
image_embeddings = pickle.load(f)
|
| 29 |
+
return image_embeddings
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def find_similar_images(query_embedding, image_embeddings, k=2):
|
| 33 |
+
similarities = cosine_similarity(query_embedding.reshape(1, -1), image_embeddings)
|
| 34 |
+
closest_indices = np.argsort(similarities[0])[::-1][:k]
|
| 35 |
+
scores = sorted(similarities[0])[::-1][:k]
|
| 36 |
+
return closest_indices, scores
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def main(query, k=2):
|
| 40 |
+
# Load RCLIP model
|
| 41 |
+
model = VisionTextDualEncoderModel.from_pretrained(model_path)
|
| 42 |
+
processor = VisionTextDualEncoderProcessor.from_pretrained(model_path)
|
| 43 |
+
|
| 44 |
+
# Load image embeddings
|
| 45 |
+
image_embeddings = load_embeddings(embeddings_file)
|
| 46 |
+
|
| 47 |
+
# Embed the query
|
| 48 |
+
inputs = processor(text=query, images=None, return_tensors="pt", padding=True)
|
| 49 |
+
with torch.no_grad():
|
| 50 |
+
query_embedding = model.get_text_features(**inputs)[0].numpy()
|
| 51 |
+
|
| 52 |
+
# Get image names
|
| 53 |
+
ids, captions = load_image_ids(csv_path)
|
| 54 |
+
|
| 55 |
+
# Find similar images
|
| 56 |
+
similar_image_indices, scores = find_similar_images(query_embedding, image_embeddings, k=int(k))
|
| 57 |
+
|
| 58 |
+
# Return the results
|
| 59 |
+
similar_image_names = [f"./images/{ids[index]}.jpg" for index in similar_image_indices]
|
| 60 |
+
similar_image_captions = [captions[index] for index in similar_image_indices]
|
| 61 |
+
similar_images = [Image.open(i) for i in similar_image_names]
|
| 62 |
+
|
| 63 |
+
return similar_images, pd.DataFrame([[t+1 for t in range(k)], similar_image_names, similar_image_captions, scores], index=["#", "path", "caption", "score"]).T
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
# Define the Gradio interface
|
| 67 |
+
examples = [
|
| 68 |
+
["Chest X-ray photos",5],
|
| 69 |
+
["Orthopantogram (OPG)",5],
|
| 70 |
+
["Brain Scan",5],
|
| 71 |
+
["tomography",5]
|
| 72 |
+
]
|
| 73 |
+
|
| 74 |
+
title="RCLIP Image Retrieval"
|
| 75 |
+
description = "CLIP model fine-tuned on the ROCO dataset"
|
| 76 |
+
|
| 77 |
+
with gr.Blocks(title=title) as demo:
|
| 78 |
+
with gr.Row():
|
| 79 |
+
with gr.Column(scale=5):
|
| 80 |
+
gr.Markdown("# "+title)
|
| 81 |
+
gr.Markdown(description)
|
| 82 |
+
gr.HTML(value="<img src=\"https://newresults.co.uk/wp-content/uploads/2022/02/teesside-university-logo.png\" alt=\"teesside logo\" width=\"120\" height=\"70\">", show_label=False,scale=1)
|
| 83 |
+
#Image.open("./data/teesside university logo.png"), height=70, show_label=False, container=False)
|
| 84 |
+
with gr.Column(variant="compact"):
|
| 85 |
+
with gr.Row(variant="compact"):
|
| 86 |
+
query = gr.Textbox(label="Enter your query", show_label=False, placeholder= "Enter your query" , scale=5)
|
| 87 |
+
btn = gr.Button("Search query", variant="primary", scale=1)
|
| 88 |
+
|
| 89 |
+
n_s = gr.Slider(2, 10, label='Number of Top Results', value=5, step=1.0, show_label=True)
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
with gr.Column(variant="compact"):
|
| 93 |
+
gr.Markdown("## Results")
|
| 94 |
+
gallery = gr.Gallery(label="found images", show_label=True, elem_id="gallery", columns=[2], rows=[4], object_fit="contain", height="auto", preview=True)
|
| 95 |
+
gr.Markdown("Information of the found images")
|
| 96 |
+
df = gr.DataFrame()
|
| 97 |
+
btn.click(main, [query, n_s], [gallery, df])
|
| 98 |
+
|
| 99 |
+
with gr.Column(variant="compact"):
|
| 100 |
+
gr.Markdown("## Examples")
|
| 101 |
+
gr.Examples(examples, [query, n_s])
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
demo.launch(debug='True')
|
| 105 |
+
|
captions.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
image_embeddings_8_clip14_cxrbert.pkl
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:fc76673a76678efa04336a1badc9dd8e29bc45ac0b0f97284ca18d3ddb73c298
|
| 3 |
+
size 16742563
|
images/ROCO_00001.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00006.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00016.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00025.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00031.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00036.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00061.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00084.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00138.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00153.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00176.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00185.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00190.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00206.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00218.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00251.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00258.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00261.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00264.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00271.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00300.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00302.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00303.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00307.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00316.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00319.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00328.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00332.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00333.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00341.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00350.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00382.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00391.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00402.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00425.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00428.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00446.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00447.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00450.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00454.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00468.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00477.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00481.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00494.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00496.jpg
ADDED
|
Git LFS Details
|
images/ROCO_00506.jpg
ADDED
|
Git LFS Details
|