File size: 4,331 Bytes
e78c2e1 2417a18 e78c2e1 2417a18 2567e33 2417a18 e78c2e1 2417a18 e78c2e1 2417a18 e78c2e1 2417a18 e78c2e1 2567e33 2417a18 e78c2e1 2567e33 2417a18 e78c2e1 2567e33 2417a18 e78c2e1 2567e33 2417a18 2567e33 2417a18 2567e33 e78c2e1 2417a18 e78c2e1 2567e33 e78c2e1 2417a18 e78c2e1 2417a18 e78c2e1 2567e33 e78c2e1 2567e33 2417a18 e78c2e1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 |
from transformers import pipeline, AutoModelForCausalLM, AutoTokenizer
import random
import gradio as gr
# Model options
model_options = {
"distilgpt2": "distilgpt2",
"GPT-Neo 125M": "EleutherAI/gpt-neo-125M",
}
# Load default model
default_model_name = model_options["GPT-Neo 125M"]
tokenizer = AutoTokenizer.from_pretrained(default_model_name)
model = AutoModelForCausalLM.from_pretrained(default_model_name)
generator = pipeline("text-generation", model=model, tokenizer=tokenizer, device=-1) # Use CPU
# Predefined options for randomization
names = ["John Doe", "Jane Smith", "Ali Khan"]
locations = ["Pump House 1", "Main Valve Station", "Chemical Storage Area"]
work_types = ["Routine pump maintenance", "Valve inspection", "Chemical handling"]
durations = [30, 45, 60]
good_practices = ["Good Practice"]
deviations = ["Deviation"]
plant_observations = [
("Energy sources controlled", "Good Practice", "Lockout/tagout procedures were followed."),
("Leaks/spills contained", "Deviation", "Oil spill near a pump flagged for cleanup."),
("Housekeeping standard high", "Deviation", "Scattered tools were organized after reminder."),
]
# Function to set seed
def set_seed(seed_value):
random.seed(seed_value)
# AI-based SOC report generation
def generate_soc(model_choice, seed=None):
# Set seed if provided
if seed:
set_seed(seed)
# Update the generator if model_choice changes
global generator
model_name = model_options[model_choice]
if generator.tokenizer.name_or_path != model_name:
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)
generator = pipeline("text-generation", model=model, tokenizer=tokenizer, device=-1)
# Randomized fields
observer_name = random.choice(names)
location = random.choice(locations)
work_type = random.choice(work_types)
duration = random.choice(durations)
# Generate random plant observations
observations = "\n".join(
f"{i+1}. {obs[0]}\n{obs[1]}\n{obs[2]}"
for i, obs in enumerate(random.sample(plant_observations, len(plant_observations)))
)
# AI Prompt
prompt = f"""
Write a detailed Safety Observation and Conversation (SOC) report for a water injection plant.
Key Safety Conclusions/Comments/Agreements Made:
Briefly summarize safety observations, key concerns, and corrective actions.
Observer's Name: {observer_name}
KOC ID No.: [Insert KOC ID here]
Type of Work Observed: {work_type}
Location: {location}
Duration (in mins): {duration}
--- Plant Observations:
{observations}
--- People Observations:
Include details on PPE compliance, hazard understanding, and good practices or deviations.
--- Process Observations:
Summarize job safety analysis, procedures followed, and improvements needed.
--- Performance Observations:
Evaluate the overall safety performance, including work pace and supervision.
"""
result = generator(prompt, max_length=512, num_return_sequences=1)[0]["generated_text"]
return result
# Gradio Interface
def app_interface(model_choice, seed):
return generate_soc(model_choice, seed)
# Gradio Layout
with gr.Blocks() as app:
gr.Markdown("# AI-Generated Safety Observation and Conversation (SOC) Reports")
gr.Markdown(
"""
Generate detailed SOC reports for a water injection plant using AI assistance.
Customize your report with multiple models, randomization, and reproducibility through seeds.
"""
)
with gr.Row():
model_choice = gr.Dropdown(
label="Select Model",
choices=list(model_options.keys()),
value="GPT-Neo 125M",
)
seed = gr.Number(label="Seed (Optional)", value=None, precision=0)
output_box = gr.Textbox(
label="Generated SOC Report",
placeholder="Your SOC report will appear here...",
lines=30,
)
with gr.Row():
generate_btn = gr.Button("Generate SOC Report")
copy_btn = gr.Button("Copy to Clipboard")
generate_btn.click(app_interface, inputs=[model_choice, seed], outputs=output_box)
copy_btn.click(lambda text: text, inputs=output_box, outputs=None)
# Launch the app
app.launch() |