pathfinder-2-sqlite-MIRROR/gendb.sh

49 lines
1.6 KiB
Bash
Raw Normal View History

rm pf2.db
2019-08-03 23:21:47 -04:00
echo 'loading schema'
sqlite3 pf2.db < schema/sources.sql
sqlite3 pf2.db < schema/damagetypes.sql
2019-08-08 14:00:45 -04:00
sqlite3 pf2.db < schema/conditions.sql
2019-08-06 20:16:59 -04:00
sqlite3 pf2.db < schema/backgrounds.sql
2019-08-05 22:55:19 -04:00
sqlite3 pf2.db < schema/abilityscores.sql
sqlite3 pf2.db < schema/bulks.sql
sqlite3 pf2.db < schema/sizes.sql
sqlite3 pf2.db < schema/langs.sql
sqlite3 pf2.db < schema/traits.sql
2019-08-12 23:01:33 -04:00
sqlite3 pf2.db < schema/actions.sql
2019-08-08 18:28:33 -04:00
sqlite3 pf2.db < schema/spells.sql
sqlite3 pf2.db < schema/feats.sql
sqlite3 pf2.db < schema/senses.sql
sqlite3 pf2.db < schema/ancestries.sql
2019-08-07 01:01:39 -04:00
sqlite3 pf2.db < schema/armor.sql
sqlite3 pf2.db < schema/gear.sql
2019-08-03 23:21:47 -04:00
echo 'loading data'
sqlite3 pf2.db < data/sources.sql
sqlite3 pf2.db < data/damagetypes.sql
2019-08-08 14:00:45 -04:00
sqlite3 pf2.db < data/conditions.sql
2019-08-06 20:16:59 -04:00
sqlite3 pf2.db < data/backgrounds.sql
2019-08-05 22:55:19 -04:00
sqlite3 pf2.db < data/abilityscores.sql
sqlite3 pf2.db < data/bulks.sql
2019-08-06 22:21:18 -04:00
sqlite3 pf2.db < data/senses.sql
sqlite3 pf2.db < data/sizes.sql
sqlite3 pf2.db < data/langs.sql
sqlite3 pf2.db < data/traits.sql
2019-08-12 23:01:33 -04:00
sqlite3 pf2.db < data/actions.sql
2019-08-08 18:28:33 -04:00
sqlite3 pf2.db < data/spells.sql
2019-08-06 22:08:18 -04:00
sqlite3 pf2.db < data/feats.sql
sqlite3 pf2.db < data/ancestries.sql
2019-08-07 01:01:39 -04:00
sqlite3 pf2.db < data/armor.sql
sqlite3 pf2.db < data/gear.sql
sqlite3 pf2.db < data/heritages.sql
# Comment out the following three lines if you don't want to generate the spell data.
cd data/third_party_json
python3 spells.py
cd ../..
# TODO Eventually we will stop relying on the spells.py script and I will have
# the actual .sql files for the spell data; I am waiting to see if the
# third-party source improves the data in the next few weeks. If not, we'll
# "divorce" from that data, dump to .sql, and manually manipulate going
# forward.