hmnshudhmn24 commited on
Commit
b8a2bdc
·
verified ·
1 Parent(s): c301534

Upload 15 files

Browse files
.gitattributes CHANGED
@@ -1,35 +1,2 @@
1
- *.7z filter=lfs diff=lfs merge=lfs -text
2
- *.arrow filter=lfs diff=lfs merge=lfs -text
3
- *.bin filter=lfs diff=lfs merge=lfs -text
4
- *.bz2 filter=lfs diff=lfs merge=lfs -text
5
- *.ckpt filter=lfs diff=lfs merge=lfs -text
6
- *.ftz filter=lfs diff=lfs merge=lfs -text
7
- *.gz filter=lfs diff=lfs merge=lfs -text
8
- *.h5 filter=lfs diff=lfs merge=lfs -text
9
- *.joblib filter=lfs diff=lfs merge=lfs -text
10
- *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
- *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
- *.model filter=lfs diff=lfs merge=lfs -text
13
- *.msgpack filter=lfs diff=lfs merge=lfs -text
14
- *.npy filter=lfs diff=lfs merge=lfs -text
15
- *.npz filter=lfs diff=lfs merge=lfs -text
16
- *.onnx filter=lfs diff=lfs merge=lfs -text
17
- *.ot filter=lfs diff=lfs merge=lfs -text
18
- *.parquet filter=lfs diff=lfs merge=lfs -text
19
- *.pb filter=lfs diff=lfs merge=lfs -text
20
- *.pickle filter=lfs diff=lfs merge=lfs -text
21
- *.pkl filter=lfs diff=lfs merge=lfs -text
22
- *.pt filter=lfs diff=lfs merge=lfs -text
23
- *.pth filter=lfs diff=lfs merge=lfs -text
24
- *.rar filter=lfs diff=lfs merge=lfs -text
25
- *.safetensors filter=lfs diff=lfs merge=lfs -text
26
- saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
- *.tar.* filter=lfs diff=lfs merge=lfs -text
28
- *.tar filter=lfs diff=lfs merge=lfs -text
29
- *.tflite filter=lfs diff=lfs merge=lfs -text
30
- *.tgz filter=lfs diff=lfs merge=lfs -text
31
- *.wasm filter=lfs diff=lfs merge=lfs -text
32
- *.xz filter=lfs diff=lfs merge=lfs -text
33
- *.zip filter=lfs diff=lfs merge=lfs -text
34
- *.zst filter=lfs diff=lfs merge=lfs -text
35
- *tfevents* filter=lfs diff=lfs merge=lfs -text
 
1
+ *.py linguist-language=Python
2
+ *.ipynb -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Himanshu Kumar
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
README.md CHANGED
@@ -1,3 +1,21 @@
1
- ---
2
- license: apache-2.0
3
- ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ language:
3
+ - en
4
+ license: mit
5
+ tags:
6
+ - text-generation
7
+ - worldbuilding
8
+ - creative-ai
9
+ - storytelling
10
+ pipeline_tag: text-generation
11
+ library_name: transformers
12
+ model_name: dreamforge
13
+ base_model: google/flan-t5-xl
14
+ ---
15
+
16
+ # 🌌 DREAMFORGE — Generative World Builder
17
+
18
+ DREAMFORGE converts short seed prompts into rich fictional universes (world names, geography, species, histories, factions, culture, and story hooks).
19
+ This repo contains a lightweight pipeline and a Streamlit UI to experiment locally.
20
+
21
+ See `examples/` for sample prompts and `notebooks/` for a demo notebook.
api.py ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ from pydantic import BaseModel
3
+ from world_generator import generate_world
4
+ from utils import load_config
5
+
6
+ app = FastAPI(title='DREAMFORGE API')
7
+
8
+ class Request(BaseModel):
9
+ prompt: str
10
+
11
+ @app.post('/generate')
12
+ def generate(req: Request):
13
+ cfg = load_config()
14
+ text = generate_world(req.prompt)
15
+ return {'world': text}
app.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from world_generator import generate_world
3
+ from lore_generator import generate_lore
4
+ from species_generator import generate_species
5
+ from utils import load_config
6
+
7
+ st.set_page_config(page_title='DREAMFORGE', layout='wide')
8
+ st.title('🌌 DREAMFORGE — Generative World Builder')
9
+
10
+ cfg = load_config()
11
+
12
+ seed = st.text_area('Seed prompt', value='A mist-covered archipelago where floating cities harvest storms.')
13
+ extra = st.text_area('Extra notes (optional)', value='Low technology, focus on trade and storms.')
14
+
15
+ if st.button('Generate World'):
16
+ with st.spinner('Generating world (model may be downloaded on first run)...'):
17
+ world = generate_world(seed)
18
+ lore = generate_lore(seed)
19
+ species = generate_species(seed)
20
+
21
+ st.subheader('World Overview')
22
+ st.markdown(world)
23
+
24
+ st.subheader('Lore & Timeline')
25
+ st.markdown(lore)
26
+
27
+ st.subheader('Species & Ecosystems')
28
+ st.markdown(species)
29
+
30
+ st.download_button('Download world (md)', world, file_name='dreamforge_world.md')
config.yaml ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ generation:
2
+ model_name: "google/flan-t5-xl"
3
+ max_new_tokens: 600
4
+ num_beams: 4
5
+ temperature: 0.85
6
+ top_p: 0.9
7
+ templates:
8
+ world_overview: |
9
+ Generate a detailed fictional world from the prompt below.
10
+ Provide sections: World Name, Summary (1 paragraph), Geography (regions, climate), Major Species (brief descriptions),
11
+ Key Locations (3-6 with short descriptions), History (timeline: 5 events), Factions & Politics (major groups),
12
+ Culture & Technology (short notes), Notable Figures (3 with short bios), Plot Hooks (5 story seeds).
13
+ Output in clear markdown sections and keep language creative.
examples/prompts.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ a mist-covered archipelago where floating cities harvest storms
2
+ a vast desert where ruins sing at night, inhabited by sand-people
3
+ a planet of bioluminescent jungles and slow-moving sky-whales
4
+ an iceball world with subterranean magma gardens and glass-harvesters
5
+ an industrial renaissance island ruled by guilds of tinkers and wind-sailors
examples/sample_output.md ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ # Example: a mist-covered archipelago where floating cities harvest storms
2
+
3
+ ## World Name
4
+ The Stormforges
5
+
6
+ ## Summary
7
+ A chain of islands shrouded in perpetual mist, each crowned with floating cities that harvest and channel storms for energy...
lore_generator.py ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from transformers import pipeline
3
+ from utils import load_config
4
+
5
+ def generate_lore(seed_prompt):
6
+ cfg = load_config()
7
+ model_name = cfg['generation']['model_name']
8
+ device = 0 if torch.cuda.is_available() else -1
9
+ pipe = pipeline('text2text-generation', model=model_name, device=device)
10
+ prompt = f"Provide a deep history and mythology for this world seed:\n{seed_prompt}\n\nRespond in markdown with timeline and myth summaries."
11
+ out = pipe(prompt, max_new_tokens=400, do_sample=True, temperature=cfg['generation']['temperature'])
12
+ return out[0].get('generated_text') if isinstance(out, list) else str(out)
notebooks/demo_dreamforge.ipynb ADDED
@@ -0,0 +1 @@
 
 
1
+ {"cells": [{"cell_type": "markdown", "metadata": {}, "source": ["# DREAMFORGE demo", "Run world_generator.py from the notebook or CLI."]}, {"cell_type": "code", "metadata": {}, "source": ["!python world_generator.py --prompt \"a frozen desert ruled by glass cities\" --out demo_world.md\n", "!sed -n '1,200p' demo_world.md\n"]}], "metadata": {"kernelspec": {"display_name": "Python 3", "language": "python", "name": "python3"}, "language_info": {"name": "python"}}, "nbformat": 4, "nbformat_minor": 5}
prompt_builder.py ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from utils import load_config
2
+ import textwrap
3
+
4
+ def build_world_prompt(user_prompt: str, extra_notes: str = "") -> str:
5
+ cfg = load_config()
6
+ template = cfg.get('templates', {}).get('world_overview', '').strip()
7
+ parts = ["User seed:", user_prompt.strip()]
8
+ if extra_notes:
9
+ parts += ["Extra notes:", extra_notes.strip()]
10
+ parts += ["Instructions:", template]
11
+ return "\n\n".join(parts)
requirements.txt ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ transformers>=4.30.0
2
+ torch>=1.12.0
3
+ accelerate
4
+ sentencepiece
5
+ pyyaml
6
+ streamlit
7
+ fastapi
8
+ uvicorn
9
+ pydantic
species_generator.py ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from transformers import pipeline
3
+ from utils import load_config
4
+
5
+ def generate_species(seed_prompt):
6
+ cfg = load_config()
7
+ model_name = cfg['generation']['model_name']
8
+ device = 0 if torch.cuda.is_available() else -1
9
+ pipe = pipeline('text2text-generation', model=model_name, device=device)
10
+ prompt = f"Invent 4-6 major species for a world described as:\n{seed_prompt}\n\nProvide short bios and ecological roles."
11
+ out = pipe(prompt, max_new_tokens=300, do_sample=True, temperature=cfg['generation']['temperature'])
12
+ return out[0].get('generated_text') if isinstance(out, list) else str(out)
utils.py ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pathlib import Path
2
+ import yaml
3
+
4
+ def load_config(path='config.yaml'):
5
+ p = Path(path)
6
+ if not p.exists():
7
+ raise FileNotFoundError(f"Missing config file: {path}")
8
+ return yaml.safe_load(p.read_text())
9
+
10
+ def save_text(path, text):
11
+ Path(path).write_text(text, encoding='utf-8')
world_generator.py ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from transformers import pipeline
3
+ from prompt_builder import build_world_prompt
4
+ from utils import load_config, save_text
5
+
6
+ def get_pipeline(model_name):
7
+ device = 0 if torch.cuda.is_available() else -1
8
+ # Use text2text for Flan-T5 family
9
+ return pipeline('text2text-generation', model=model_name, device=device, truncation=True)
10
+
11
+ def generate_world(prompt_seed, out_path=None):
12
+ cfg = load_config()
13
+ model_name = cfg['generation']['model_name']
14
+ max_new_tokens = cfg['generation'].get('max_new_tokens', 512)
15
+ num_beams = cfg['generation'].get('num_beams', 4)
16
+ temperature = cfg['generation'].get('temperature', 0.9)
17
+ top_p = cfg['generation'].get('top_p', 0.95)
18
+
19
+ prompt = build_world_prompt(prompt_seed, extra_notes="")
20
+ pipe = get_pipeline(model_name)
21
+ result = pipe(prompt, max_new_tokens=max_new_tokens, num_beams=num_beams, do_sample=True, temperature=temperature, top_p=top_p)
22
+ text = result[0].get('generated_text') if isinstance(result, list) else str(result)
23
+ if out_path:
24
+ save_text(out_path, text)
25
+ return text
26
+
27
+ if __name__ == '__main__':
28
+ import argparse
29
+ p = argparse.ArgumentParser()
30
+ p.add_argument('--prompt', required=True)
31
+ p.add_argument('--out', default='dreamforge_output.md')
32
+ args = p.parse_args()
33
+ out = generate_world(args.prompt, out_path=args.out)
34
+ print('Saved to', args.out)