ADITYA KUMAR
commited on
Commit
·
1bbd364
1
Parent(s):
e072e4e
Upload 3 files
Browse files- extract-colors.py +37 -0
- predict.py +17 -0
- tokenizer.py +2 -0
extract-colors.py
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from sklearn.cluster import KMeans
|
| 2 |
+
from collections import Counter
|
| 3 |
+
import numpy as np
|
| 4 |
+
import cv2
|
| 5 |
+
|
| 6 |
+
def get_image(pil_image):
|
| 7 |
+
nimg = np.array(pil_image)
|
| 8 |
+
image = cv2.cvtColor(nimg, cv2.COLOR_RGB2BGR)
|
| 9 |
+
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
|
| 10 |
+
return image
|
| 11 |
+
|
| 12 |
+
def get_labels(rimg):
|
| 13 |
+
clf = KMeans(n_clusters=5)
|
| 14 |
+
labels = clf.fit_predict(rimg)
|
| 15 |
+
return labels, clf
|
| 16 |
+
|
| 17 |
+
def get_closest_color(colors):
|
| 18 |
+
white = (255, 255, 255)
|
| 19 |
+
closest_color = min(colors, key=lambda c: np.linalg.norm(np.array(c) - white))
|
| 20 |
+
return closest_color
|
| 21 |
+
|
| 22 |
+
def RGB2HEX(color):
|
| 23 |
+
return "#{:02x}{:02x}{:02x}".format(int(color[0]), int(color[1]), int(color[2]))
|
| 24 |
+
|
| 25 |
+
def extract_colors_and_closest_to_white(image_path):
|
| 26 |
+
img = cv2.imread(image_path)
|
| 27 |
+
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
|
| 28 |
+
reshaped_img = img.reshape(img.shape[0] * img.shape[1], img.shape[2])
|
| 29 |
+
labels, clf = get_labels(reshaped_img)
|
| 30 |
+
counts = Counter(labels)
|
| 31 |
+
center_colors = clf.cluster_centers_
|
| 32 |
+
ordered_colors = [center_colors[i] for i in counts.keys()]
|
| 33 |
+
hex_colors = [RGB2HEX(ordered_colors[i]) for i in counts.keys()]
|
| 34 |
+
|
| 35 |
+
closest_color_to_white = get_closest_color(center_colors)
|
| 36 |
+
hex_closest_color_to_white = RGB2HEX(closest_color_to_white)
|
| 37 |
+
return hex_colors, hex_closest_color_to_white
|
predict.py
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from transformers import pipeline, AutoTokenizer, AutoModelForSequenceClassification
|
| 2 |
+
from PIL import Image
|
| 3 |
+
from io import BytesIO
|
| 4 |
+
import requests
|
| 5 |
+
import numpy as np
|
| 6 |
+
|
| 7 |
+
model = AutoModelForSequenceClassification.from_pretrained("./model/extract-colors.py")
|
| 8 |
+
tokenizer = AutoTokenizer.from_pretrained("./model/tokenizer", tokenizer_func=tokenizer_function)
|
| 9 |
+
model_class = pipeline('image-classification', model=model, tokenizer=tokenizer)
|
| 10 |
+
|
| 11 |
+
def get_colors_and_closest_to_white(image_url):
|
| 12 |
+
response = requests.get(image_url)
|
| 13 |
+
img = Image.open(BytesIO(response.content))
|
| 14 |
+
img_array = np.array(img)
|
| 15 |
+
|
| 16 |
+
result = model_class(image_array=img_array)
|
| 17 |
+
return result
|
tokenizer.py
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
def tokenizer_function(text):
|
| 2 |
+
return text.strip().split('\n')
|