File size: 3,704 Bytes
7362797
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
import spaces
import subprocess
import shutil
import gradio as gr
from PIL import Image
from huggingface_hub import snapshot_download
import json
import os

# Specify the repository ID
repo_id = "GAIR/Anole-7b-v0.1"

if not os.path.exists("./Anole-7b-v0.1"):
    os.system("git lfs install")
    os.system("git clone https://huggingface.co/GAIR/Anole-7b-v0.1")

subprocess.run(["/bin/bash", "install.sh"], capture_output=True, text=True)
result = subprocess.run(["/bin/bash", "install.sh"], capture_output=True, text=True)

@spaces.GPU(duration=90)
def text_to_image(instruction):
    result = subprocess.run(["python", "text2image.py", "-i", instruction, "-b", "1"], capture_output=True, text=True)
    if result.returncode == 0:
        return gr.update(value="Image Generated. Check the display below.", visible=True), "outputs/text2image/1.png"
    else:
        return "Error: " + result.stderr, None

@spaces.GPU(duration=150)
def text_to_interleaved(instruction):
    result = subprocess.run(["python", "interleaved_generation.py", "-i", instruction], capture_output=True, text=True)
    if result.returncode == 0:
        outputs = [None for i in range(7)]
        box_index = 0
        
        # Read the segments.jsonl file
        with open('./segments.jsonl', 'r') as file:
            for line in file:
                line_dict = json.loads(line.strip())
                if line_dict['type'] == 'text':
                    if box_index % 2 != 0:
                        box_index += 1
                    outputs[box_index] = line_dict['content']
                elif line_dict['type'] == 'image':
                    if box_index % 2 == 0:
                        box_index += 1
                    outputs[box_index] = Image.open(line_dict['content'])
                box_index += 1
    
        return outputs[0], outputs[1], outputs[2], outputs[3], outputs[4], outputs[5], outputs[6]
    else:
        return ("Error: " + result.stderr, ) * 7

# Use Blocks to organize the interfaces side by side
with gr.Blocks() as demo:
    # Create a row to place columns side by side
    with gr.Row():
        # First column for Text-to-Image Interface
        with gr.Column():
            gr.Interface(
                fn=text_to_image,  # Function to generate cat images
                inputs=gr.Textbox(label="Enter Instruction for Image Generation"),  # Input textbox for user instructions
                outputs=[gr.Text(label="Status"), gr.Image(label="Generated Image")],  # Outputs: status message and generated image
                title="Anole: Text-to-Image",  # Title of the interface
                description="Generate images based on text instructions. Check https://github.com/GAIR-NLP/anole for more information. Model can be downloaded at: https://huggingface.co/GAIR/Anole-7b-v0.1."
            )
        # Second column for Text-to-Interleaved Image-Text Interface
        with gr.Column():
            gr.Interface(
                fn=text_to_interleaved,
                inputs=gr.Textbox(label="Enter Instruction for Interleaved Content"),
                outputs=[gr.Text(label="Text Output 1"), gr.Image(label="Image Output 1"), gr.Text(label="Text Output 2"), gr.Image(label="Image Output 2"), gr.Text(label="Text Output 3"), gr.Image(label="Image Output 3"), gr.Text(label="Text Output 4")],
                title="Anole: Text-to-Interleaved",  # Title of the interface
                description="Generate interleaved text and images based on text instructions. Check https://github.com/GAIR-NLP/anole for more information. Model can be downloaded at: https://huggingface.co/GAIR/Anole-7b-v0.1."
            )

# Launch the entire Blocks interface
demo.launch()