summaryrefslogtreecommitdiff
path: root/make_dataset.py
blob: 0fb44b236e10c63b3e78113662cf192fd5d19f5d (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
#!/usr/bin/env python

import huggingface
import json
import os

from datasets import Dataset, Image
from glob import glob


def make_dataset(base="./baseline"):  # TODO: Make actual hf dataset
    prompt = "You are a professional English-German translator and also a renowned photography critic.\n\nWrite a detailed caption for this image in a single sentence. Translate the caption into German. The output needs to be JSON, the keys being 'English' and 'German' for the respective captions. Only output the JSON, nothing else." + "<start_of_image>"

    user_prompts = []
    images = []
    assistant_replies = []

    for filename in glob(f"{base}/*.jsonl"):
        with open(filename, "r") as f:
            data = json.loads(f.read())
        image_path = f"../d/Images/{os.path.basename(filename).removesuffix(".jsonl")}.jpg"
        user_prompts.append(prompt)
        assistant_replies.append(json.dumps({
            "English": data["English"],
            "German": data["Translation"],
        }, ensure_ascii=False, indent=0))
        images.append(image_path)

    return Dataset.from_dict({"image": images, "user": user_prompts, "assistant": assistant_replies}).cast_column("image", Image())


def main():
    huggingface.login()
    dataset = make_dataset()
    dataset.push_to_repo("asdf2k/caption_translation")


if __name__ == "__main__":
    main()