Merge branch 'UI_test' into damage_calc

This commit is contained in:
hppeng 2022-06-26 00:08:58 -07:00
commit 472c35e425
87 changed files with 2005 additions and 2075 deletions

1439
builder/doc.html Normal file

File diff suppressed because it is too large Load diff

BIN
dev/builder_colorcode.png Executable file

Binary file not shown.

After

Width:  |  Height:  |  Size: 178 KiB

12
dev/compute_graph.svg Executable file

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 63 KiB

View file

@ -892,12 +892,73 @@
Last updated: 30 May 2022
</p>
</div>
<div class="row section" title="Wynnbuilder Internals (compute graph)">
<p>
This section is about how Wynnbuilder's main builder page processes user input and calculates results.
Might be useful if you want to script wynnbuilder or extend it! Or for wynnbuilder developers (internal docs).
</p>
<div class="row section" title="Why?">
<p>
Modeling wynnbuilder's internal computations as a directed graph has a few advantages:
</p>
<ul class = "indent">
<li>Each compute "node" is small(er); easier to debug.</li>
<li>Information flow is specified explicitly (easier to debug).</li>
<li>Easy to build caching for arbitrary computations (only calculate what u need)</li>
<li>Stateless builder! Abstract the entire builder as a chain of function calls</li>
<li>Makes for pretty pictures</li>
</ul>
</div>
<div class="row section" title="TODO ComputeNode details">
TODO
</div>
<p>
An overview of wynnbuilder's internal structure can be seen <a href = "./compute_graph.svg" target = "_blank">here</a>. Arrows indicate flow of information.
Colors correspond roughly as follows:
</p>
<img src="./builder_colorcode.png"/>
<p>
The overall logic flow is as follows:
<ul class = "indent">
<li>Item and Powder inputs are parsed. Powders are applied to items.</li>
<li>Items and level information are combined to make a build.</li>
<li>Information from input fields for skill points and edit IDs is collected into an ID bonuses table.</li>
<li>Information about active powder specials, strength boosts, etc. are collected into their own ID tables.</li>
<li>All of the above tables are merged with the build's stats table to produce the "Final" ID bonus table.</li>
<li>Which spell variant (think: major id) to use for each of the 4 spells is computed based on the build.</li>
<li>Spell damage is calculated, using the merged stat table, spell info, and weapon info.</li>
</ul>
</p>
<p>
Outputs are computed as follows:
<ul class = "indent">
<li>Input box highlights are computed from the items produced by item input box nodes.</li>
<li>Item display is computed from item input boxes.</li>
<li>Build hash/URL is computed from the build, and skillpoint assignment.</li>
<li>Spell damage is displayed based on calculated spell damage results.</li>
<li>Build stats are displayed by builder-stats-display (this same node also displays a bunch of stuff at the bottom of the screen...)</li>
</ul>
</p>
<div class="row section" title="Gotchas">
<p>
The build sets default skillpoints and edited IDs automatically, whenever a build item/level is updated.
This is done using "soft links" by two nodes shown in red (builder-skillpoint-setter and builder-id-setter).
</p>
<p>
A soft link is where something goes and manually marks nodes dirty and calls their update methods.
This is useful for these cases because the skillpoints and editable ID fields usually take their value from
user input, but in some cases we want to programatically set them.
</p>
<p>
For example another soft link (not shown) is used to implement the reset button.
</p>
</div>
</div>
<!-- <div class="row section" title="Test Section">
</div> -->
</div>
<script type="text/javascript" src="../js/dev.js"></script>
<script type="text/javascript" src="../js/sq2icons.js"></script>
</body>
</html>
</html>

View file

@ -1,3 +1,4 @@
let all_nodes = [];
class ComputeNode {
/**
* Make a generic compute node.
@ -16,6 +17,7 @@ class ComputeNode {
this.dirty = true;
this.inputs_dirty = new Map();
this.inputs_dirty_count = 0;
all_nodes.push(this);
}
/**
@ -91,9 +93,9 @@ class ComputeNode {
this.inputs.push(parent_node)
link_name = (link_name !== undefined) ? link_name : parent_node.name;
this.input_translation.set(parent_node.name, link_name);
this.inputs_dirty.set(parent_node.name, parent_node.dirty);
if (parent_node.dirty) {
if (parent_node.dirty || (parent_node.value === null && !this.fail_cb)) {
this.inputs_dirty_count += 1;
this.inputs_dirty.set(parent_node.name, true);
}
parent_node.children.push(this);
return this;

30
js/d3_export.js vendored Normal file
View file

@ -0,0 +1,30 @@
// http://bl.ocks.org/rokotyan/0556f8facbaf344507cdc45dc3622177
// Set-up the export button
function set_export_button(svg, button_id, output_id) {
d3.select('#'+button_id).on('click', function(){
//get svg source.
var serializer = new XMLSerializer();
var source = serializer.serializeToString(svg.node());
console.log(source);
source = source.replace(/^<g/, '<svg');
source = source.replace(/<\/g>$/, '</svg>');
//add name spaces.
if(!source.match(/^<svg[^>]+xmlns="http\:\/\/www\.w3\.org\/2000\/svg"/)){
source = source.replace(/^<svg/, '<svg xmlns="http://www.w3.org/2000/svg"');
}
if(!source.match(/^<svg[^>]+"http\:\/\/www\.w3\.org\/1999\/xlink"/)){
source = source.replace(/^<svg/, '<svg xmlns:xlink="http://www.w3.org/1999/xlink"');
}
//add xml declaration
source = '<?xml version="1.0" standalone="no"?>\r\n' + source;
//convert svg source to URI data scheme.
var url = "data:image/svg+xml;charset=utf-8,"+encodeURIComponent(source);
//set url value to a element's href attribute.
document.getElementById(output_id).href = url;
});
}

View file

@ -64,4 +64,4 @@ function toggleSection(section) {
}
init_dev();
init_dev();

176
js/render_compute_graph.js Normal file
View file

@ -0,0 +1,176 @@
d3.select("#graph_body")
.append("div")
.attr("style", "width: 100%; height: 100%; min-height: 0px; flex-grow: 1")
.append("svg")
.attr("preserveAspectRatio", "xMinYMin meet")
.classed("svg-content-responsive", true);
let graph = d3.select("svg");
let svg = graph.append('g');
let margin = {top: 20, right: 20, bottom: 35, left: 40};
function bbox() {
let ret = graph.node().parentNode.getBoundingClientRect();
return ret;
}
let _bbox = bbox();
const colors = ['aqua', 'yellow', 'fuchsia', 'white', 'teal', 'olive', 'purple', 'gray', 'blue', 'lime', 'red', 'silver', 'navy', 'green', 'maroon'];
const n_colors = colors.length;
const view = svg.append("rect")
.attr("class", "view")
.attr("x", 0)
.attr("y", 0);
function convert_data(nodes_raw) {
let edges = [];
let node_id = new Map();
nodes = [];
for (let i in nodes_raw) {
node_id.set(nodes_raw[i], i);
nodes.push({id: i, color: 0, data: nodes_raw[i]});
}
for (const node of nodes_raw) {
const to = node_id.get(node);
for (const input of node.inputs) {
const from = node_id.get(input);
let name = input.name;
let link_name = node.input_translation.get(name);
edges.push({
source: from,
target: to,
name: link_name
});
}
}
return {
nodes: nodes,
links: edges
}
}
function create_svg(data, redraw_func) {
// Initialize the links
var link = svg
.selectAll("line")
.data(data.links)
.enter()
.append("line")
.style("stroke", "#aaa")
// Initialize the nodes
let node = svg
.selectAll("g")
.data(data.nodes);
let node_enter = node.enter()
.append('g')
let circles = node_enter.append("circle")
.attr("r", 20)
.style("fill", ({id, color, data}) => colors[color])
node_enter.append('text')
.attr("dx", -20)
.attr("dy", -22)
.style('fill', 'white')
.text(({id, color, data}) => data.name);
// Let's list the force we wanna apply on the network
var simulation = d3.forceSimulation(data.nodes) // Force algorithm is applied to data.nodes
.force("link", d3.forceLink().strength(0.1) // This force provides links between nodes
.id(function(d) { return d.id; }) // This provide the id of a node
.links(data.links) // and this the list of links
)
.force("charge", d3.forceManyBody().strength(-400)) // This adds repulsion between nodes. Play with the -400 for the repulsion strength
//.force("center", d3.forceCenter(_bbox.width / 2, _bbox.height / 2).strength(0.1)) // This force attracts nodes to the center of the svg area
.on("tick", ticked);
// This function is run at each iteration of the force algorithm, updating the nodes position.
let scale_transform = {k: 1, x: 0, y: 0}
function ticked() {
link
.attr("x1", function(d) { return d.source.x; })
.attr("y1", function(d) { return d.source.y; })
.attr("x2", function(d) { return d.target.x; })
.attr("y2", function(d) { return d.target.y; });
node_enter.attr("transform", function (d) { return 'translate('+scale_transform.x+','+scale_transform.y+') scale('+scale_transform.k+') translate('+d.x+','+d.y+')' })
}
const drag = d3.drag()
.on("start", dragstart)
.on("drag", dragged);
node_enter.call(drag).on('click', click);
function click(event, d) {
if (event.ctrlKey) {
// Color cycle.
d.color = (d.color + 1) % n_colors;
d3.select(this).selectAll('circle').style("fill", ({id, color, data}) => colors[color])
}
else {
delete d.fx;
delete d.fy;
d3.select(this).classed("fixed", false);
simulation.alpha(0.5).restart();
}
}
function dragstart() {
d3.select(this).classed("fixed", true);
}
function dragged(event, d) {
d.fx = event.x;
d.fy = event.y;
simulation.alpha(0.5).restart();
}
const zoom = d3.zoom()
.scaleExtent([0.01, 10])
.translateExtent([[-10000, -10000], [10000, 10000]])
.filter(filter)
.on("zoom", zoomed);
view.call(zoom);
function zoomed({ transform }) {
link.attr('transform', transform);
scale_transform = transform;
node_enter.attr("transform", function (d) { return 'translate('+scale_transform.x+','+scale_transform.y+') scale('+scale_transform.k+') translate('+d.x+','+d.y+')' })
redraw_func();
}
// prevent scrolling then apply the default filter
function filter(event) {
event.preventDefault();
return (!event.ctrlKey || event.type === 'wheel') && !event.button;
}
}
set_export_button(svg, 'saveButton', 'saveLink');
(async function() {
// JANKY
while (edit_id_output === undefined) {
await sleep(500);
}
function redraw() {
_bbox = bbox();
graph.attr("viewBox", [0, 0, _bbox.width, _bbox.height]);
view.attr("width", _bbox.width - 1)
.attr("height", _bbox.height - 1);
}
d3.select(window)
.on("resize", function() {
redraw();
});
redraw();
const data = convert_data(all_nodes);
create_svg(data, redraw);
console.log("render");
})();

View file

@ -1,8 +1,6 @@
Process for getting new data:
1. run `python3 dump.py`. This will overwrite `dump.json` and `../ingreds.json`
2. Copy `../old clean.json` or `../compress.json` into `updated.json`
3. Run `python3 transform_merge.py`
4. Run `python3 ing_transform_combine.py`
5. Check validity (json differ or whatever)
6. Copy `clean.json` and `compress.json` into toplevel for usage
1. Get new data from API with `get.py`
2. Clean the data (may have to do manually) with the `process` related py files
3. Check validity (json differ or whatever)
4. Create clean and compress versions and copy them into toplevel for usage (can use `clean_json.py` and `compress_json.py` for this).

View file

@ -1,3 +1,5 @@
#parses all CI and creates a json file with all of them
import os
import re

View file

@ -1,3 +1,5 @@
#looks like something that hpp does with curl
import os
with open("ci.txt.2") as infile:

19
py_script/clean_json.py Normal file
View file

@ -0,0 +1,19 @@
'''
A generic file used for turning a json into a "clean" version of itself (human-friendly whitespace).
Clean files are useful for human reading and dev debugging.
Usage: python clean_json.py [infile rel path] [outfile rel path]
'''
if __name__ == "__main__":
import json
import argparse
parser = argparse.ArgumentParser(description="Pull data from wynn API.")
parser.add_argument('infile', help='input file to read data from')
parser.add_argument('outfile', help='output file to dump clean data into')
args = parser.parse_args()
infile, outfile = args.infile, args.outfile
json.dump(json.load(open(infile)), open(outfile, "w"), indent = 2)

View file

@ -1,8 +1,18 @@
import sys
import json
infile = sys.argv[1]
outfile = sys.argv[2]
if len(sys.argv) > 3 and sys.argv[3] == "decompress":
json.dump(json.load(open(infile)), open(outfile, "w"), indent=4)
else:
'''
A generic file used for turning a json into a compressed version of itself (minimal whitespaces).
Compressed files are useful for lowering the amount of data sent.
Usage: python compress_json.py [infile rel path] [outfile rel path]
'''
if __name__ == "__main__":
import json
import argparse
parser = argparse.ArgumentParser(description="Pull data from wynn API.")
parser.add_argument('infile', help='input file to read data from')
parser.add_argument('outfile', help='output file to dump clean data into')
args = parser.parse_args()
infile, outfile = args.infile, args.outfile
json.dump(json.load(open(infile)), open(outfile, "w"))

View file

@ -1,22 +0,0 @@
import requests
import json
import numpy as np
response = requests.get("https://api.wynncraft.com/public_api.php?action=itemDB&category=all")
with open("dump.json", "w") as outfile:
outfile.write(json.dumps(response.json()))
arr = np.array([])
for i in range(4):
response = requests.get("https://api.wynncraft.com/v2/ingredient/search/tier/" + str(i))
arr = np.append(arr, np.array(response.json()['data']))
with open("../ingreds.json", "w") as outfile:
outfile.write(json.dumps(list(arr)))
with open("../ingreds_compress.json", "w") as outfile:
outfile.write(json.dumps(list(arr)))
with open("../ingreds_clean.json", "w") as outfile:
json.dump(list(arr), outfile, indent = 2) #needs further cleaning

65
py_script/get.py Normal file
View file

@ -0,0 +1,65 @@
"""
Used to GET data from the Wynncraft API. Has shorthand options and allows
for requesting from a specific url.
Usage: python get.py [url or command] [outfile rel path]
Relevant page: https://docs.wynncraft.com/
"""
import argparse
import json
import numpy as np
import requests
parser = argparse.ArgumentParser(description="Pull data from wynn API.")
parser.add_argument('target', help='an API page, or preset [items, ings, recipes, terrs, maploc]')
parser.add_argument('outfile', help='output file to dump results into')
args = parser.parse_args()
req, outfile = args.target, args.outfile
CURR_WYNN_VERS = 2.0
#default to empty file output
response = {}
if req.lower() == "items":
response = requests.get("https://api.wynncraft.com/public_api.php?action=itemDB&category=all")
elif req.lower() == "ings":
response = {"ings":[]}
for i in range(4):
response['ings'].extend(requests.get("https://api.wynncraft.com/v2/ingredient/search/tier/" + str(i)).json()['data'])
elif req.lower() == "recipes":
temp = requests.get("https://api.wynncraft.com/v2/recipe/list")
response = {"recipes":[]}
for i in range(len(temp['data'])):
response["recipes"].extend(requests.get("https://api.wynncraft.com/v2/recipe/get/" + temp['data'][i]).json()['data'])
print("" + str(i) + " / " + str(len(temp['data'])))
elif req.lower() == "terrs":
response = requests.get("https://api.wynncraft.com/public_api.php?action=territoryList").json()['territories']
delkeys = ["territory","acquired","attacker"]
for t in response:
for key in delkeys:
del response[t][key]
response[t]["neighbors"] = []
#Dependency on a third-party manually-collected data source. May not update in sync with API.
terr_data = requests.get("https://gist.githubusercontent.com/kristofbolyai/87ae828ecc740424c0f4b3749b2287ed/raw/0735f2e8bb2d2177ba0e7e96ade421621070a236/territories.json").json()
for t in data:
response[t]["neighbors"] = data[t]["Routes"]
response[t]["resources"] = data[t]["Resources"]
response[t]["storage"] = data[t]["Storage"]
response[t]["emeralds"] = data[t]["Emeralds"]
response[t]["doubleemeralds"] = data[t]["DoubleEmerald"]
response[t]["doubleresource"] = data[t]["DoubleResource"]
elif req.lower() == "maploc":
response = requests.get('https://api.wynncraft.com/public_api.php?action=mapLocations')
else:
response = requests.get(req)
response['version'] = CURR_WYNN_VERS
json.dump(response, open(outfile, "w+"))

View file

@ -3646,5 +3646,11 @@
"Narcissist": 3648,
"Mask of the Spirits": 3649,
"Inhibitor": 3650,
"Spear of Testiness": 3651
}
"Spear of Testiness": 3651,
"Blue Wynnter Sweater": 3648,
"Green Wynnter Sweater": 3649,
"Purple Wynnter Sweater": 3650,
"Red Wynnter Sweater": 3651,
"Snowtread Boots": 3652,
"White Wynnter Sweater": 3653
}

View file

@ -1,3 +1,7 @@
"""
Used for grabbing image files at some point. Not used recently.
"""
import os
import json

View file

@ -1,4 +1,8 @@
"""Json diff checker for manual testing."""
"""
Json diff checker for manual testing - mainly debug
"""
import argparse
import json

View file

@ -1,3 +1,9 @@
"""
Used to parse a changelog at some point in the past. Could be used in the future.
Not a typically used file
"""
import json
import difflib

View file

@ -1,3 +1,9 @@
"""
Parses a set from a single file.
Usage: python parse_set_individual.py [infile]
"""
import sys
set_infile = sys.argv[1]

View file

@ -1,4 +0,0 @@
with open("sets.txt", "r") as setsFile:
sets_split = (x.split("'", 2)[1][2:] for x in setsFile.read().split("a href=")[1:])
with open("sets_list.txt", "w") as outFile:
outFile.write("\n".join(sets_split))

View file

@ -1,3 +1,7 @@
"""
Generates data for dps_vis
"""
import matplotlib.pyplot as plt
import json
import numpy as np

View file

@ -1,15 +1,30 @@
"""
Used to process the raw data about ingredients pulled from the API.
Usage:
- python process_ings.py [infile] [outfile]
OR
- python process_ings.py [infile and outfile]
"""
import json
with open("../ingreds.json", "r") as infile:
ing_data = json.loads(infile.read())
ings = ing_data
#this data does not have request :)
import sys
import os
if os.path.exists("../ing_map.json"):
with open("../ing_map.json","r") as ing_mapfile:
import base64
import argparse
parser = argparse.ArgumentParser(description="Process raw pulled ingredient data.")
parser.add_argument('infile', help='input file to read data from')
parser.add_argument('outfile', help='output file to dump clean data into')
args = parser.parse_args()
infile, outfile = args.infile, args.outfile
with open(infile, "r") as in_file:
ing_data = json.loads(in_file.read())
ings = ing_data['ings']
if os.path.exists("ing_map.json"):
with open("ing_map.json","r") as ing_mapfile:
ing_map = json.load(ing_mapfile)
else:
ing_map = {ing["name"]: i for i, ing in enumerate(ings)}
@ -146,8 +161,6 @@ ing_delete_keys = [
"skin"
]
print("loaded all files.")
for ing in ings:
for key in ing_delete_keys:
if key in ing:
@ -202,13 +215,10 @@ for ing in ings:
print(f'New Ingred: {ing["name"]}')
ing["id"] = ing_map[ing["name"]]
with open("../ingreds_clean.json", "w") as outfile:
json.dump(ing_data, outfile, indent = 2)
with open("../ingreds_compress.json", "w") as outfile:
json.dump(ing_data, outfile)
with open("../ing_map.json", "w") as ing_mapfile:
#save ing ids
with open("ing_map.json", "w+") as ing_mapfile:
json.dump(ing_map, ing_mapfile, indent = 2)
print('All ing jsons updated.')
#save ings
with open(outfile, "w+") as out_file:
json.dump(ing_data, out_file)

View file

@ -1,44 +1,35 @@
"""
Used to process the raw item data pulled from the API.
NOTE!!!!!!!
Usage:
- python process_items.py [infile] [outfile]
OR
- python process_items.py [infile and outfile]
DEMON TIDE 1.20 IS HARD CODED!
AMBIVALENCE IS REMOVED!
NOTE: id_map.json is due for change. Should be updated manually when Wynn2.0/corresponding WB version drops.
"""
import json
import sys
import os
import base64
import argparse
with open("dump.json", "r") as infile:
data = json.load(infile)
parser = argparse.ArgumentParser(description="Process raw pulled item data.")
parser.add_argument('infile', help='input file to read data from')
parser.add_argument('outfile', help='output file to dump clean data into')
args = parser.parse_args()
infile, outfile = args.infile, args.outfile
with open(infile, "r") as in_file:
data = json.loads(in_file.read())
with open("updated.json", "r") as oldfile:
old_data = json.load(oldfile)
items = data["items"]
old_items = old_data["items"]
if "request" in data:
del data["request"]
# import os
# sets = dict()
# for filename in os.listdir('sets'):
# if "json" not in filename:
# continue
# set_name = filename[1:].split(".")[0].replace("+", " ").replace("%27", "'")
# with open("sets/"+filename) as set_info:
# set_obj = json.load(set_info)
# for item in set_obj["items"]:
# item_set_map[item] = set_name
# sets[set_name] = set_obj
#
# data["sets"] = sets
data["sets"] = old_data["sets"]
item_set_map = dict()
for set_name, set_data in data["sets"].items():
for item_name in set_data["items"]:
item_set_map[item_name] = set_name
translate_mappings = {
#"name": "name",
@ -141,7 +132,12 @@ delete_keys = [
#"material"
]
with open("../clean.json", "r") as oldfile:
old_data = json.load(oldfile)
old_items = old_data['items']
id_map = {item["name"]: item["id"] for item in old_items}
with open("id_map.json", "r") as idmap_file:
id_map = json.load(idmap_file)
used_ids = set([v for k, v in id_map.items()])
max_id = 0
@ -150,8 +146,8 @@ known_item_names = set()
for item in items:
known_item_names.add(item["name"])
old_items_map = dict()
remap_items = []
old_items_map = dict()
for item in old_items:
if "remapID" in item:
remap_items.append(item)
@ -186,16 +182,18 @@ for item in items:
item_name = item["displayName"]
else:
item_name = item["name"]
if item_name in item_set_map:
item["set"] = item_set_map[item_name]
if item["name"] in old_items_map:
old_item = old_items_map[item["name"]]
if "hideSet" in old_item:
item["hideSet"] = old_item["hideSet"]
items.extend(remap_items)
with open("clean.json", "w") as outfile:
json.dump(data, outfile, indent=2)
with open("compress.json", "w") as outfile:
json.dump(data, outfile)
#write items back into data
data["items"] = items
#save id map
with open("id_map.json","w") as id_mapfile:
json.dump(id_map, id_mapfile, indent=2)
#write the data back to the outfile
with open(outfile, "w+") as out_file:
json.dump(data, out_file)

View file

@ -0,0 +1,59 @@
"""
Used to process the raw data about crafting recipes pulled from the API.
Usage:
- python process_recipes.py [infile] [outfile]
OR
- python process_recipes.py [infile and outfile]
"""
import json
import sys
import os
import base64
import argparse
parser = argparse.ArgumentParser(description="Process raw pulled recipe data.")
parser.add_argument('infile', help='input file to read data from')
parser.add_argument('outfile', help='output file to dump clean data into')
args = parser.parse_args()
infile, outfile = args.infile, args.outfile
with open(infile, "r") as in_file:
recipe_data = json.loads(in_file.read())
recipes = recipe_data["recipes"]
if os.path.exists("recipe_map.json"):
with open("recipe_map.json","r") as recipe_mapfile:
recipe_map = json.load(recipe_mapfile)
else:
recipe_map = {recipe["name"]: i for i, recipe in enumerate(recipes)}
recipe_translate_mappings = {
"level" : "lvl",
"id" : "name",
}
recipe_delete_keys = [ #lol
]
for recipe in recipes:
for key in recipe_delete_keys:
if key in recipe:
del recipe[key]
for k, v in recipe_translate_mappings.items():
if k in recipe:
recipe[v] = recipe[k]
del recipe[k]
if not (recipe["name"] in recipe_map):
recipe_map[recipe["name"]] = len(recipe_map)
print(f'New Recipe: {recipe["name"]}')
recipe["id"] = recipe_map[recipe["name"]]
#save recipe id map
with open("recipe_map.json", "w") as recipe_mapfile:
json.dump(recipe_map, recipe_mapfile, indent = 2)
#save recipe data
with open(outfile, "w+") as out_file:
json.dump(recipe_data, out_file)

View file

@ -1,46 +0,0 @@
import os
with open("../recipes_compress.json", "r") as infile:
recipe_data = json.loads(infile.read())
recipes = recipe_data["recipes"]
if os.path.exists("recipe_map.json"):
with open("recipe_map.json","r") as recipe_mapfile:
recipe_map = json.load(recipe_mapfile)
else:
recipe_map = {recipe["name"]: i for i, recipe in enumerate(recipes)}
recipe_translate_mappings = {
"level" : "lvl",
"id" : "name",
}
recipe_delete_keys = [ #lol
]
print("loaded all files.")
for recipe in recipes:
for key in recipe_delete_keys:
if key in recipe:
del recipe[key]
for k, v in recipe_translate_mappings.items():
if k in recipe:
recipe[v] = recipe[k]
del recipe[k]
if not (recipe["name"] in recipe_map):
recipe_map[recipe["name"]] = len(recipe_map)
print(f'New Recipe: {recipe["name"]}')
recipe["id"] = recipe_map[recipe["name"]]
with open("../recipes_clean.json", "w") as outfile:
json.dump(recipe_data, outfile, indent = 2)
with open("../recipes_compress.json", "w") as outfile:
json.dump(recipe_data, outfile)
with open("../recipe_map.json", "w") as recipe_mapfile:
json.dump(recipe_map,recipe_mapfile,indent = 2)
print('All ing jsons updated.')

File diff suppressed because one or more lines are too long

View file

@ -1,9 +0,0 @@
import requests
import json
response = requests.get("https://api.wynncraft.com/public_api.php?action=itemDB&search=atlas").json()
atlas = response['items'][0]
with open('test.json',"w") as outfile:
json.dump(atlas, outfile, indent = 2)
print(atlas)

View file

@ -1,39 +0,0 @@
{
"items": [
"Adventurer's Cap",
"Adventurer's Boots",
"Adventurer's Pants",
"Adventurer's Tunic"
],
"bonuses": [
{},
{
"sdPct": 4,
"mdPct": 4,
"xpb": 10,
"lb": 5,
"spd": 2,
"hpBonus": 15,
"spRegen": 5
},
{
"sdPct": 12,
"mdPct": 12,
"xpb": 20,
"lb": 10,
"spd": 5,
"hpBonus": 40,
"spRegen": 15
},
{
"mr": 2,
"sdPct": 25,
"mdPct": 25,
"xpb": 50,
"lb": 30,
"spd": 15,
"hpBonus": 175,
"spRegen": 50
}
]
}

View file

@ -1,30 +0,0 @@
{
"items": [
"Air Relic Helmet",
"Air Relic Boots",
"Air Relic Leggings",
"Air Relic Chestplate"
],
"bonuses": [
{},
{
"xpb": 5,
"lb": 10,
"spd": 10,
"hpBonus": 60
},
{
"xpb": 10,
"lb": 25,
"spd": 20,
"hpBonus": 190
},
{
"xpb": 25,
"lb": 50,
"agi": 20,
"spd": 60,
"hpBonus": 400
}
]
}

View file

@ -1,26 +0,0 @@
{
"items": [
"Bandit's Locket",
"Bandit's Bangle",
"Bandit's Knuckle",
"Bandit's Ring"
],
"bonuses": [
{},
{
"xpb": 3,
"lb": 4,
"eSteal": 1
},
{
"xpb": 7,
"lb": 9,
"eSteal": 3
},
{
"xpb": 12,
"lb": 15,
"eSteal": 6
}
]
}

View file

@ -1,14 +0,0 @@
{
"items": [
"Beachside Headwrap",
"Beachside Conch"
],
"bonuses": [
{},
{
"lb": 20,
"wDamPct": 35,
"wDefPct": 25
}
]
}

View file

@ -1,15 +0,0 @@
{
"items": [
"Bear Mask",
"Bear Head",
"Bear Body"
],
"bonuses": [
{},
{
"mdPct": 14,
"hpBonus": 30,
"mdRaw": 20
}
]
}

View file

@ -1,19 +0,0 @@
{
"items": [
"Black Catalyst"
],
"bonuses": [
{
"xpb": -5
},
{
"mr": 1,
"sdPct": 10,
"xpb": 30,
"expd": 10,
"hpBonus": 325,
"spRegen": 10,
"sdRaw": 90
}
]
}

View file

@ -1,29 +0,0 @@
{
"items": [
"Black Cap",
"Black Boots",
"Black Pants",
"Black Tunic"
],
"bonuses": [
{},
{
"ms": 1,
"dex": 2,
"sdRaw": 15,
"mdRaw": 5
},
{
"ms": 1,
"dex": 6,
"sdRaw": 35,
"mdRaw": 10
},
{
"ms": 3,
"dex": 20,
"sdRaw": 65,
"mdRaw": 70
}
]
}

View file

@ -1,14 +0,0 @@
{
"items": [
"Blue Team Boots",
"Blue Team Leggings",
"Blue Team Chestplate",
"Blue Team Helmet"
],
"bonuses": [
{},
{},
{},
{}
]
}

View file

@ -1,14 +0,0 @@
{
"items": [
"Bony Circlet",
"Bony Bow"
],
"bonuses": [
{},
{
"agi": 8,
"mdRaw": 45,
"aDamPct": 15
}
]
}

View file

@ -1,20 +0,0 @@
{
"items": [
"Builder's Helmet",
"Builder's Boots",
"Builder's Trousers",
"Builder's Breastplate"
],
"bonuses": [
{},
{
"xpb": 5
},
{
"xpb": 10
},
{
"xpb": 15
}
]
}

View file

@ -1,21 +0,0 @@
{
"items": [
"Champion Helmet",
"Champion Boots",
"Champion Leggings",
"Champion Chestplate"
],
"bonuses": [
{},
{},
{},
{
"mr": 5,
"sdPct": 75,
"mdPct": 75,
"ms": 5,
"ls": 400,
"hprRaw": 600
}
]
}

View file

@ -1,58 +0,0 @@
{
"items": [
"Clock Helm",
"Clock Amulet",
"Watch Bracelet",
"Clockwork Ring",
"Time Ring",
"Clock Boots",
"Clock Leggings",
"Clock Mail"
],
"bonuses": [
{},
{
"fDamPct": 15,
"wDamPct": 6,
"aDamPct": 5,
"tDamPct": 18,
"eDamPct": 8
},
{
"fDamPct": 14,
"wDamPct": 12,
"aDamPct": 13
},
{
"fDamPct": 13,
"wDamPct": 18,
"aDamPct": 20,
"tDamPct": 18,
"eDamPct": 14
},
{
"fDamPct": 12,
"wDamPct": 24,
"aDamPct": 28
},
{
"fDamPct": 11,
"wDamPct": 24,
"aDamPct": 24,
"tDamPct": 18,
"eDamPct": 22
},
{
"fDamPct": 10,
"wDamPct": 24,
"aDamPct": 19
},
{
"fDamPct": 9,
"wDamPct": 24,
"aDamPct": 14,
"tDamPct": 18,
"eDamPct": 34
}
]
}

View file

@ -1,24 +0,0 @@
{
"items": [
"Corrupted Nii Mukluk",
"Corrupted Nii Plate",
"Corrupted Nii Shako"
],
"bonuses": [
{},
{
"int": 3,
"def": 3,
"hprRaw": 60
},
{
"mr": 4,
"int": 15,
"def": 15,
"hpBonus": 1500,
"hprRaw": 270,
"fDefPct": 60,
"wDefPct": 60
}
]
}

View file

@ -1,24 +0,0 @@
{
"items": [
"Corrupted Uth Sandals",
"Corrupted Uth Belt",
"Corrupted Uth Plume"
],
"bonuses": [
{},
{
"ls": 125,
"agi": 3,
"def": 3
},
{
"ls": 375,
"ref": 70,
"agi": 15,
"def": 15,
"thorns": 70,
"fDefPct": 75,
"aDefPct": 75
}
]
}

View file

@ -1,44 +0,0 @@
{
"items": [
"Cosmic Visor",
"Cosmic Walkers",
"Cosmic Ward",
"Cosmic Vest"
],
"bonuses": [
{},
{
"xpb": 15,
"lb": 15,
"ref": 5,
"spRegen": 15,
"fDefPct": 10,
"wDefPct": 10,
"aDefPct": 10,
"tDefPct": 10,
"eDefPct": 10
},
{
"xpb": 35,
"lb": 35,
"ref": 15,
"spRegen": 35,
"fDefPct": 20,
"wDefPct": 20,
"aDefPct": 20,
"tDefPct": 20,
"eDefPct": 20
},
{
"xpb": 50,
"lb": 50,
"ref": 30,
"spRegen": 50,
"fDefPct": 30,
"wDefPct": 30,
"aDefPct": 30,
"tDefPct": 30,
"eDefPct": 30
}
]
}

View file

@ -1,30 +0,0 @@
{
"items": [
"Earth Relic Helmet",
"Earth Relic Boots",
"Earth Relic Leggings",
"Earth Relic Chestplate"
],
"bonuses": [
{},
{
"mdPct": 10,
"xpb": 5,
"lb": 10,
"hpBonus": 65
},
{
"mdPct": 20,
"xpb": 10,
"lb": 25,
"hpBonus": 200
},
{
"mdPct": 45,
"xpb": 25,
"lb": 50,
"str": 20,
"hpBonus": 425
}
]
}

View file

@ -1,33 +0,0 @@
{
"items": [
"Elf Cap",
"Elf Shoes",
"Elf Pants",
"Elf Robe"
],
"bonuses": [
{},
{
"hprPct": 10,
"lb": 8,
"agi": 5,
"def": 5,
"spd": 6
},
{
"hprPct": 20,
"lb": 16,
"agi": 7,
"def": 7,
"spd": 14
},
{
"hprPct": 45,
"lb": 32,
"agi": 10,
"def": 10,
"spd": 20,
"hprRaw": 45
}
]
}

View file

@ -1,30 +0,0 @@
{
"items": [
"Fire Relic Helmet",
"Fire Relic Boots",
"Fire Relic Leggings",
"Fire Relic Chestplate"
],
"bonuses": [
{},
{
"xpb": 5,
"lb": 10,
"hpBonus": 90,
"hprRaw": 12
},
{
"xpb": 10,
"lb": 25,
"hpBonus": 270,
"hprRaw": 40
},
{
"xpb": 25,
"lb": 50,
"def": 20,
"hpBonus": 570,
"hprRaw": 100
}
]
}

View file

@ -1,22 +0,0 @@
{
"items": [
"Flashfire Gauntlet",
"Flashfire Knuckle"
],
"bonuses": [
{},
{
"spd": 8,
"atkTier": 1,
"wDamPct": -15,
"wDefPct": -15
},
{
"spd": 16,
"atkTier": 1,
"fDamPct": 12,
"wDamPct": -15,
"wDefPct": -15
}
]
}

View file

@ -1,20 +0,0 @@
{
"items": [
"GM's Helmet",
"GM's Boots",
"GM's Trousers",
"GM's Breastplate"
],
"bonuses": [
{},
{
"xpb": 5
},
{
"xpb": 10
},
{
"xpb": 15
}
]
}

View file

@ -1,35 +0,0 @@
{
"items": [
"Ghostly Cap",
"Ghostly Boots",
"Ghostly Pants",
"Ghostly Tunic"
],
"bonuses": [
{},
{
"mr": -1,
"ms": 2,
"sdRaw": 35,
"wDamPct": 5,
"tDamPct": 5,
"eDamPct": -34
},
{
"mr": -2,
"ms": 4,
"sdRaw": 100,
"wDamPct": 10,
"tDamPct": 10,
"eDamPct": -67
},
{
"mr": -3,
"ms": 6,
"sdRaw": 195,
"wDamPct": 25,
"tDamPct": 25,
"eDamPct": -100
}
]
}

View file

@ -1,30 +0,0 @@
{
"items": [
"Goblin Hood",
"Goblin Runners",
"Goblin Cloak"
],
"bonuses": [
{
"sdPct": -6,
"mdPct": -6,
"sdRaw": 15,
"mdRaw": 10
},
{
"sdPct": -12,
"mdPct": -12,
"ls": 22,
"sdRaw": 55,
"mdRaw": 45
},
{
"sdPct": -23,
"mdPct": -23,
"ls": 51,
"ms": 2,
"sdRaw": 130,
"mdRaw": 105
}
]
}

View file

@ -1,14 +0,0 @@
{
"items": [
"Treat",
"Trick"
],
"bonuses": [
{},
{
"xpb": 15,
"spRegen": 10,
"eSteal": 5
}
]
}

View file

@ -1,16 +0,0 @@
{
"items": [
"Horse Mask",
"Horse Hoof"
],
"bonuses": [
{},
{
"mdPct": 11,
"xpb": 10,
"spd": 20,
"aDamPct": 15,
"eDamPct": 15
}
]
}

View file

@ -1,37 +0,0 @@
{
"items": [
"Jester Necklace",
"Jester Bracelet",
"Jester Ring"
],
"bonuses": [
{
"xpb": -25,
"lb": -25
},
{
"xpb": -50,
"lb": -50,
"spd": -10,
"hpBonus": 300,
"sdRaw": -110,
"mdRaw": 60
},
{
"xpb": -75,
"lb": -75,
"spd": 5,
"hpBonus": -150,
"sdRaw": 100,
"mdRaw": -75
},
{
"xpb": -100,
"lb": -100,
"spd": 5,
"hpBonus": -150,
"sdRaw": 100,
"mdRaw": -75
}
]
}

View file

@ -1,17 +0,0 @@
{
"items": [
"Kaerynn's Mind",
"Kaerynn's Body"
],
"bonuses": [
{},
{
"mr": 2,
"xpb": 12,
"str": 4,
"hpBonus": 400,
"sdRaw": 100,
"mdRaw": 50
}
]
}

View file

@ -1,29 +0,0 @@
{
"items": [
"Leaf Cap",
"Leaf Boots",
"Leaf Pants",
"Leaf Tunic"
],
"bonuses": [
{},
{
"hprPct": 5,
"thorns": 7,
"hpBonus": 10,
"hprRaw": 1
},
{
"hprPct": 12,
"thorns": 18,
"hpBonus": 20,
"hprRaw": 3
},
{
"hprPct": 25,
"thorns": 35,
"hpBonus": 60,
"hprRaw": 7
}
]
}

View file

@ -1,73 +0,0 @@
{
"items": [
"Morph-Stardust",
"Morph-Ruby",
"Morph-Amethyst",
"Morph-Emerald",
"Morph-Topaz",
"Morph-Gold",
"Morph-Iron",
"Morph-Steel"
],
"bonuses": [
{},
{
"xpb": 5,
"lb": 5
},
{
"mr": 1,
"xpb": 10,
"lb": 10,
"spRaw2": -1,
"hpBonus": 125
},
{
"mr": 1,
"xpb": 15,
"lb": 15,
"spRaw2": -1,
"hpBonus": 425
},
{
"mr": 2,
"xpb": 35,
"lb": 35,
"hpBonus": 1325,
"spRaw2": -1,
"spRaw4": -1
},
{
"mr": 2,
"xpb": 55,
"lb": 55,
"hpBonus": 2575,
"spRaw2": -1,
"spRaw4": -1
},
{
"mr": 3,
"xpb": 80,
"lb": 80,
"hpBonus": 4450,
"spRaw1": -1,
"spRaw2": -1,
"spRaw4": -1
},
{
"mr": 4,
"xpb": 100,
"lb": 100,
"str": 15,
"dex": 15,
"int": 15,
"agi": 15,
"def": 15,
"hpBonus": 6800,
"spRaw1": -1,
"spRaw2": -1,
"spRaw3": -1,
"spRaw4": -1
}
]
}

View file

@ -1,33 +0,0 @@
{
"items": [
"Nether Cap",
"Nether Boots",
"Nether Pants",
"Nether Tunic"
],
"bonuses": [
{},
{
"ls": 5,
"expd": 2,
"hprRaw": -1,
"fDamPct": 2,
"wDamPct": -10
},
{
"ls": 15,
"expd": 10,
"hprRaw": -2,
"fDamPct": 8,
"wDamPct": -25
},
{
"ls": 50,
"def": 15,
"expd": 60,
"hprRaw": -20,
"fDamPct": 42,
"wDamPct": -45
}
]
}

View file

@ -1,29 +0,0 @@
{
"items": [
"Outlaw Cap",
"Outlaw Boots",
"Outlaw Pants",
"Outlaw Tunic"
],
"bonuses": [
{},
{
"ls": 11,
"xpb": 5,
"agi": 4,
"eSteal": 2
},
{
"ls": 22,
"xpb": 10,
"agi": 8,
"eSteal": 4
},
{
"ls": 45,
"xpb": 25,
"agi": 28,
"eSteal": 8
}
]
}

View file

@ -1,13 +0,0 @@
{
"items": [
"Pigman Helmet",
"Pigman Battle Hammer"
],
"bonuses": [
{},
{
"str": 20,
"eDamPct": 40
}
]
}

View file

@ -1,14 +0,0 @@
{
"items": [
"Red Team Boots",
"Red Team Leggings",
"Red Team Chestplate",
"Red Team Helmet"
],
"bonuses": [
{},
{},
{},
{}
]
}

View file

@ -1,46 +0,0 @@
{
"items": [
"Relic Helmet",
"Relic Boots",
"Relic Leggings",
"Relic Chestplate"
],
"bonuses": [
{},
{
"xpb": 10,
"lb": 10,
"hpBonus": 65,
"fDamPct": 5,
"wDamPct": 5,
"aDamPct": 5,
"tDamPct": 5,
"eDamPct": 5
},
{
"xpb": 25,
"lb": 25,
"hpBonus": 200,
"fDamPct": 12,
"wDamPct": 12,
"aDamPct": 12,
"tDamPct": 12,
"eDamPct": 12
},
{
"xpb": 50,
"lb": 50,
"str": 8,
"dex": 8,
"int": 8,
"agi": 8,
"def": 8,
"hpBonus": 425,
"fDamPct": 25,
"wDamPct": 25,
"aDamPct": 25,
"tDamPct": 25,
"eDamPct": 25
}
]
}

View file

@ -1,38 +0,0 @@
{
"items": [
"Saint's Shawl",
"Saint's Sandals",
"Saint's Leggings",
"Saint's Tunic"
],
"bonuses": [
{},
{
"mr": 1,
"sdPct": -5,
"mdPct": -10,
"def": 5,
"spRegen": 5,
"wDamPct": 10,
"aDamPct": 10
},
{
"mr": 3,
"sdPct": -10,
"mdPct": -20,
"def": 10,
"spRegen": 10,
"wDamPct": 20,
"aDamPct": 20
},
{
"mr": 5,
"sdPct": -15,
"mdPct": -35,
"def": 30,
"spRegen": 100,
"wDamPct": 35,
"aDamPct": 35
}
]
}

View file

@ -1,17 +0,0 @@
{
"items": [
"Silverfish Helm",
"Silverfish Boots"
],
"bonuses": [
{
"spd": 5
},
{
"agi": 10,
"thorns": 20,
"spd": 20,
"poison": 290
}
]
}

View file

@ -1,24 +0,0 @@
{
"items": [
"Skien Boots",
"Skien Leggings",
"Skien's Fatigues"
],
"bonuses": [
{},
{
"sdPct": -10,
"mdPct": 12,
"sdRaw": -40,
"mdRaw": 30
},
{
"sdPct": -35,
"mdPct": 30,
"dex": 15,
"spd": 8,
"sdRaw": -90,
"mdRaw": 125
}
]
}

View file

@ -1,17 +0,0 @@
{
"items": [
"Slime Boots",
"Slime Plate"
],
"bonuses": [
{},
{
"hprPct": 35,
"thorns": 15,
"spd": -6,
"poison": 300,
"hpBonus": 600,
"jh": 1
}
]
}

View file

@ -1,38 +0,0 @@
{
"items": [
"Snail Helm",
"Snail Boots",
"Snail Leggings",
"Snail Mail"
],
"bonuses": [
{},
{
"str": 7,
"agi": -5,
"thorns": 10,
"spd": -5,
"poison": 880,
"hpBonus": 1100,
"hprRaw": 125
},
{
"str": 14,
"agi": -10,
"thorns": 20,
"spd": -10,
"poison": 2650,
"hpBonus": 2675,
"hprRaw": 275
},
{
"str": 21,
"agi": -15,
"thorns": 40,
"spd": -15,
"poison": 5500,
"hpBonus": 5500,
"hprRaw": 575
}
]
}

View file

@ -1,32 +0,0 @@
{
"items": [
"Snow Helmet",
"Snow Boots",
"Snow Pants",
"Snow Tunic"
],
"bonuses": [
{},
{
"hprPct": -10,
"mr": 1,
"sdPct": 4,
"ref": 10,
"thorns": 8
},
{
"hprPct": -20,
"mr": 2,
"sdPct": 12,
"ref": 30,
"thorns": 24
},
{
"hprPct": -35,
"mr": 4,
"sdPct": 28,
"ref": 70,
"thorns": 55
}
]
}

View file

@ -1,24 +0,0 @@
{
"items": [
"Spinneret",
"Abdomen",
"Cephalothorax"
],
"bonuses": [
{},
{
"xpb": 10,
"dex": 2,
"agi": 2,
"spd": 7,
"poison": 35
},
{
"xpb": 25,
"dex": 6,
"agi": 6,
"spd": 19,
"poison": 130
}
]
}

View file

@ -1,14 +0,0 @@
{
"items": [
"Spore Cap",
"Spore Shortsword"
],
"bonuses": [
{},
{
"ls": 20,
"expd": 20,
"poison": 70
}
]
}

View file

@ -1,42 +0,0 @@
{
"items": [
"Thanos Legionnaire Helm",
"Thanos Legionnaire Greaves",
"Thanos Legionnaire Leggings",
"Thanos Legionnaire Plate"
],
"bonuses": [
{},
{
"str": 1,
"dex": -1,
"int": -1,
"agi": 1,
"def": 1,
"spd": 2,
"hprRaw": 60,
"mdRaw": 60
},
{
"str": 4,
"dex": -4,
"int": -4,
"agi": 4,
"def": 4,
"spd": 8,
"hprRaw": 180,
"mdRaw": 180
},
{
"str": 15,
"dex": -15,
"int": -15,
"agi": 15,
"def": 15,
"spd": 20,
"atkTier": 1,
"hprRaw": 480,
"mdRaw": 480
}
]
}

View file

@ -1,30 +0,0 @@
{
"items": [
"Thunder Relic Helmet",
"Thunder Relic Boots",
"Thunder Relic Leggings",
"Thunder Relic Chestplate"
],
"bonuses": [
{},
{
"xpb": 5,
"lb": 10,
"hpBonus": 55,
"mdRaw": 12
},
{
"xpb": 10,
"lb": 25,
"hpBonus": 180,
"mdRaw": 32
},
{
"xpb": 25,
"lb": 50,
"dex": 20,
"hpBonus": 380,
"mdRaw": 105
}
]
}

View file

@ -1,27 +0,0 @@
{
"items": [
"Tribal Cap",
"Tribal Boots",
"Tribal Pants",
"Tribal Tunic"
],
"bonuses": [
{},
{
"str": 2,
"spd": 5
},
{
"str": 5,
"agi": 2,
"spd": 10
},
{
"sdPct": -15,
"str": 10,
"agi": 5,
"spd": 15,
"atkTier": 1
}
]
}

View file

@ -1,35 +0,0 @@
{
"items": [
"Ultramarine Crown",
"Ultramarine Boots",
"Ultramarine Belt",
"Ultramarine Cape"
],
"bonuses": [
{},
{
"mr": 2,
"mdPct": -24,
"int": 5,
"wDamPct": 10,
"tDamPct": -8,
"wDefPct": 16
},
{
"mr": 5,
"mdPct": -54,
"int": 15,
"wDamPct": 20,
"tDamPct": -18,
"wDefPct": 36
},
{
"mr": 8,
"mdPct": -90,
"int": 25,
"wDamPct": 40,
"tDamPct": -30,
"wDefPct": 56
}
]
}

View file

@ -1,15 +0,0 @@
{
"items": [
"Veekhat's Horns",
"Veekhat's Udders"
],
"bonuses": [
{},
{
"mdPct": 30,
"ms": 2,
"spd": 25,
"spPct2": -40
}
]
}

View file

@ -1,16 +0,0 @@
{
"items": [
"Mask of the Dark Vexations",
"Staff of the Dark Vexations"
],
"bonuses": [
{},
{
"mr": 2,
"sdPct": 15,
"mdPct": -15,
"sdRaw": 30,
"spPct2": -50
}
]
}

View file

@ -1,14 +0,0 @@
{
"items": [
"Villager Pants",
"Villager Mail"
],
"bonuses": [
{},
{
"xpb": 20,
"lb": 60,
"eSteal": 8
}
]
}

View file

@ -1,35 +0,0 @@
{
"items": [
"Visceral Skullcap",
"Visceral Toe",
"Visceral Legs",
"Visceral Chest"
],
"bonuses": [
{},
{
"hprPct": 30,
"mdPct": 10,
"ls": 45,
"hpBonus": -1000,
"hprRaw": 35,
"mdRaw": 40
},
{
"hprPct": 100,
"mdPct": 25,
"ls": 90,
"hpBonus": -2500,
"hprRaw": 75,
"mdRaw": 80
},
{
"hprPct": 350,
"mdPct": 50,
"ls": 180,
"hpBonus": -4000,
"hprRaw": 145,
"mdRaw": 165
}
]
}

View file

@ -1,30 +0,0 @@
{
"items": [
"Water Relic Helmet",
"Water Relic Boots",
"Water Relic Leggings",
"Water Relic Chestplate"
],
"bonuses": [
{},
{
"mr": 1,
"xpb": 5,
"lb": 10,
"hpBonus": 55
},
{
"mr": 2,
"xpb": 10,
"lb": 25,
"hpBonus": 170
},
{
"mr": 4,
"xpb": 25,
"lb": 50,
"int": 20,
"hpBonus": 360
}
]
}

View file

@ -1,3 +1,8 @@
"""
An old script used for testing skillpoint assignment algorithms. Not used commonly.
"""
import json
import math
import copy
@ -7,7 +12,7 @@ with open("clean.json") as infile:
def clean_item(item):
if not "displayName" in item:
item["displayName"] = item["name"];
item["displayName"] = item["name"]
return item
items = data["items"]

View file

@ -1,63 +0,0 @@
import requests
import json
import time
#used for requesting the api
'''response = requests.get("https://api.wynncraft.com/public_api.php?action=territoryList")
with open("terrs.json", "w") as outfile:
outfile.write(json.dumps(response.json()))'''
#used for cleaning the data
'''with open("terrs.json", "r") as infile:
data = json.load(infile)
data = data["territories"]
delkeys = ["territory","acquired","attacker"]
for t in data:
for key in delkeys:
del data[t][key]
data[t]["neighbors"] = []
with open("terrs_compress.json", "w") as outfile:
json.dump(data,outfile)
with open("terrs_clean.json", "w") as outfile:
json.dump(data,outfile,indent = 2)'''
#used for pushing data to compress (edit in clean, move to compress)
'''with open("terrs.json", "r") as infile:
data = json.load(infile)["territories"]'''
'''with open("terrs_clean.json", "r") as infile:
newdata = json.load(infile)'''
'''for t in newdata:
del newdata[t]["attacker"]
del newdata[t]["acquired"]'''
'''response = requests.get("https://gist.githubusercontent.com/kristofbolyai/87ae828ecc740424c0f4b3749b2287ed/raw/0735f2e8bb2d2177ba0e7e96ade421621070a236/territories.json").json()
for t in data:
data[t]["neighbors"] = response[t]["Routes"]
data[t]["resources"] = response[t]["Resources"]
data[t]["storage"] = response[t]["Storage"]
data[t]["emeralds"] = response[t]["Emeralds"]
data[t]["doubleemeralds"] = response[t]["DoubleEmerald"]
data[t]["doubleresource"] = response[t]["DoubleResource"]'''
'''with open("terrs_clean.json", "w") as outfile:
json.dump(newdata,outfile,indent=2)
with open("terrs_compress.json", "w") as outfile:
json.dump(newdata,outfile)'''
response = requests.get('https://api.wynncraft.com/public_api.php?action=mapLocations').json()
del response["request"]
with open("maploc.json", "w") as outfile:
json.dump(response, outfile)
with open("maploc_clean.json", "w") as outfile:
json.dump(response, outfile, indent = 2)
with open("maploc_compress.json", "w") as outfile:
json.dump(response, outfile)

View file

@ -1,179 +0,0 @@
"""
NOTE!!!!!!!
DEMON TIDE 1.20 IS HARD CODED!
AMBIVALENCE IS REMOVED!
"""
import json
with open("dump.json", "r") as infile:
data = json.loads(infile.read())
items = data["items"]
if "request" in data:
del data["request"]
import os
sets = dict()
item_set_map = dict()
for filename in os.listdir('sets'):
if "json" not in filename:
continue
set_name = filename[1:].split(".")[0].replace("+", " ").replace("%27", "'")
with open("sets/"+filename) as set_info:
set_obj = json.load(set_info)
for item in set_obj["items"]:
item_set_map[item] = set_name
sets[set_name] = set_obj
data["sets"] = sets
translate_mappings = {
#"name": "name",
#"displayName": "displayName",
#"tier": "tier",
#"set": "set",
"sockets": "slots",
#"type": "type",
#"armorType": "armorType", (deleted)
#"armorColor": "color", (deleted)
#"addedLore": "lore", (deleted)
#"material": "material", (deleted)
"dropType": "drop",
#"quest": "quest",
"restrictions": "restrict",
"damage": "nDam",
"fireDamage": "fDam",
"waterDamage": "wDam",
"airDamage": "aDam",
"thunderDamage": "tDam",
"earthDamage": "eDam",
"attackSpeed": "atkSpd",
"health": "hp",
"fireDefense": "fDef",
"waterDefense": "wDef",
"airDefense": "aDef",
"thunderDefense": "tDef",
"earthDefense": "eDef",
"level": "lvl",
"classRequirement": "classReq",
"strength": "strReq",
"dexterity": "dexReq",
"intelligence": "intReq",
"agility": "agiReq",
"defense": "defReq",
"healthRegen": "hprPct",
"manaRegen": "mr",
"spellDamage": "sdPct",
"damageBonus": "mdPct",
"lifeSteal": "ls",
"manaSteal": "ms",
"xpBonus": "xpb",
"lootBonus": "lb",
"reflection": "ref",
"strengthPoints": "str",
"dexterityPoints": "dex",
"intelligencePoints": "int",
"agilityPoints": "agi",
"defensePoints": "def",
#"thorns": "thorns",
"exploding": "expd",
"speed": "spd",
"attackSpeedBonus": "atkTier",
#"poison": "poison",
"healthBonus": "hpBonus",
"soulPoints": "spRegen",
"emeraldStealing": "eSteal",
"healthRegenRaw": "hprRaw",
"spellDamageRaw": "sdRaw",
"damageBonusRaw": "mdRaw",
"bonusFireDamage": "fDamPct",
"bonusWaterDamage": "wDamPct",
"bonusAirDamage": "aDamPct",
"bonusThunderDamage": "tDamPct",
"bonusEarthDamage": "eDamPct",
"bonusFireDefense": "fDefPct",
"bonusWaterDefense": "wDefPct",
"bonusAirDefense": "aDefPct",
"bonusThunderDefense": "tDefPct",
"bonusEarthDefense": "eDefPct",
"accessoryType": "type",
"identified": "fixID",
#"skin": "skin",
#"category": "category",
"spellCostPct1": "spPct1",
"spellCostRaw1": "spRaw1",
"spellCostPct2": "spPct2",
"spellCostRaw2": "spRaw2",
"spellCostPct3": "spPct3",
"spellCostRaw3": "spRaw3",
"spellCostPct4": "spPct4",
"spellCostRaw4": "spRaw4",
"rainbowSpellDamageRaw": "rainbowRaw",
#"sprint": "sprint",
"sprintRegen": "sprintReg",
"jumpHeight": "jh",
"lootQuality": "lq",
"gatherXpBonus": "gXp",
"gatherSpeed": "gSpd",
}
delete_keys = [
"addedLore",
#"skin",
"armorType",
"armorColor",
"material"
]
import os
if os.path.exists("id_map.json"):
with open("id_map.json","r") as id_mapfile:
id_map = json.load(id_mapfile)
else:
id_map = {item["name"]: i for i, item in enumerate(items)}
texture_names = []
import base64
for item in items:
for key in delete_keys:
if key in item:
del item[key]
for k in list(item.keys()):
if (item[k] == 0 or item[k] is None):
del item[k]
for k, v in translate_mappings.items():
if k in item:
item[v] = item[k]
del item[k]
if not (item["name"] in id_map):
id_map[item["name"]] = len(id_map)
print(f'New item: {item["name"]}')
item["id"] = id_map[item["name"]]
item["type"] = item["type"].lower()
if item["name"] in item_set_map:
item["set"] = item_set_map[item["name"]]
#with open("1_20_ci.json", "r") as ci_file:
# ci_items = json.load(ci_file)
# items.extend(ci_items)
with open("id_map.json","w") as id_mapfile:
json.dump(id_map, id_mapfile, indent=2)
with open("clean.json", "w") as outfile:
json.dump(data, outfile, indent=2)
with open("compress.json", "w") as outfile:
json.dump(data, outfile)

View file

@ -1,214 +0,0 @@
"""
NOTE!!!!!!!
DEMON TIDE 1.20 IS HARD CODED!
AMBIVALENCE IS REMOVED!
"""
import json
import os
with open("dump.json", "r") as infile:
data = json.load(infile)
with open("updated.json", "r") as oldfile:
old_data = json.load(oldfile)
items = data["items"]
old_items = old_data["items"]
old_tomes = old_data["tomes"]
if "request" in data:
del data["request"]
#this script does not change sets or tomes. use the dedicated set and tome update scripts to update.
data["sets"] = old_data["sets"]
data["tomes"] = old_data["tomes"]
item_set_map = dict()
for set_name, set_data in data["sets"].items():
for item_name in set_data["items"]:
item_set_map[item_name] = set_name
must_mappings = [
"strength",
"dexterity",
"intelligence",
"agility",
"defense",
"strengthPoints",
"dexterityPoints",
"intelligencePoints",
"agilityPoints",
"defensePoints",
]
translate_mappings = {
#"name": "name",
#"displayName": "displayName",
#"tier": "tier",
#"set": "set",
"sockets": "slots",
#"type": "type",
#"armorType": "armorType", (deleted)
"armorColor": "color", #(deleted)
"addedLore": "lore", #(deleted)
#"material": "material", (deleted)
"dropType": "drop",
#"quest": "quest",
"restrictions": "restrict",
"damage": "nDam",
"fireDamage": "fDam",
"waterDamage": "wDam",
"airDamage": "aDam",
"thunderDamage": "tDam",
"earthDamage": "eDam",
"attackSpeed": "atkSpd",
"health": "hp",
"fireDefense": "fDef",
"waterDefense": "wDef",
"airDefense": "aDef",
"thunderDefense": "tDef",
"earthDefense": "eDef",
"level": "lvl",
"classRequirement": "classReq",
"strength": "strReq",
"dexterity": "dexReq",
"intelligence": "intReq",
"agility": "agiReq",
"defense": "defReq",
"healthRegen": "hprPct",
"manaRegen": "mr",
"spellDamage": "sdPct",
"damageBonus": "mdPct",
"lifeSteal": "ls",
"manaSteal": "ms",
"xpBonus": "xpb",
"lootBonus": "lb",
"reflection": "ref",
"strengthPoints": "str",
"dexterityPoints": "dex",
"intelligencePoints": "int",
"agilityPoints": "agi",
"defensePoints": "def",
#"thorns": "thorns",
"exploding": "expd",
"speed": "spd",
"attackSpeedBonus": "atkTier",
#"poison": "poison",
"healthBonus": "hpBonus",
"soulPoints": "spRegen",
"emeraldStealing": "eSteal",
"healthRegenRaw": "hprRaw",
"spellDamageRaw": "sdRaw",
"damageBonusRaw": "mdRaw",
"bonusFireDamage": "fDamPct",
"bonusWaterDamage": "wDamPct",
"bonusAirDamage": "aDamPct",
"bonusThunderDamage": "tDamPct",
"bonusEarthDamage": "eDamPct",
"bonusFireDefense": "fDefPct",
"bonusWaterDefense": "wDefPct",
"bonusAirDefense": "aDefPct",
"bonusThunderDefense": "tDefPct",
"bonusEarthDefense": "eDefPct",
"accessoryType": "type",
"identified": "fixID",
#"skin": "skin",
#"category": "category",
"spellCostPct1": "spPct1",
"spellCostRaw1": "spRaw1",
"spellCostPct2": "spPct2",
"spellCostRaw2": "spRaw2",
"spellCostPct3": "spPct3",
"spellCostRaw3": "spRaw3",
"spellCostPct4": "spPct4",
"spellCostRaw4": "spRaw4",
"rainbowSpellDamageRaw": "rainbowRaw",
#"sprint": "sprint",
"sprintRegen": "sprintReg",
"jumpHeight": "jh",
"lootQuality": "lq",
"gatherXpBonus": "gXp",
"gatherSpeed": "gSpd",
}
delete_keys = [
#"addedLore",
#"skin",
#"armorType",
#"armorColor",
#"material"
]
id_map = {item["name"]: item["id"] for item in old_items}
used_ids = set([v for k, v in id_map.items()])
max_id = 0
known_item_names = set()
for item in items:
known_item_names.add(item["name"])
old_items_map = dict()
unchanged_items = []
remap_items = []
for item in old_items:
if "remapID" in item:
remap_items.append(item)
elif item["name"] not in known_item_names:
unchanged_items.append(item)
old_items_map[item["name"]] = item
for item in items:
for key in delete_keys:
if key in item:
del item[key]
for k in list(item.keys()):
if (item[k] == 0 or item[k] is None) and not k in must_mappings:
del item[k]
for k, v in translate_mappings.items():
if k in item:
item[v] = item[k]
del item[k]
if not (item["name"] in id_map):
while max_id in used_ids:
max_id += 1
used_ids.add(max_id)
id_map[item["name"]] = max_id
print(f'New item: {item["name"]} (id: {max_id})')
item["id"] = id_map[item["name"]]
item["type"] = item["type"].lower()
if "displayName" in item:
item_name = item["displayName"]
else:
item_name = item["name"]
if item_name in item_set_map:
item["set"] = item_set_map[item_name]
if item["name"] in old_items_map:
old_item = old_items_map[item["name"]]
if "hideSet" in old_item:
item["hideSet"] = old_item["hideSet"]
items.extend(unchanged_items)
items.extend(remap_items)
with open("id_map.json","w") as id_mapfile:
print("{", file=id_mapfile)
outputs = []
for v, k in sorted((v, k) for k, v in id_map.items()):
outputs.append(f' "{k}": {v}')
print(',\n'.join(outputs), file=id_mapfile)
print("}", file=id_mapfile)
with open("clean.json", "w") as outfile:
json.dump(data, outfile, indent=2)
with open("compress.json", "w") as outfile:
json.dump(data, outfile)

View file

@ -1,25 +0,0 @@
import os
'''takes the data in updated.json and the jsons in the sets folder to update the sets in the db.'''
with open("updated.json", "r") as oldfile:
data = json.load(oldfile)
#This probably does not work. I have not checked :)
sets = dict()
for filename in os.listdir('sets'):
if "json" not in filename:
continue
set_name = filename[1:].split(".")[0].replace("+", " ").replace("%27", "'")
with open("sets/"+filename) as set_info:
set_obj = json.load(set_info)
for item in set_obj["items"]:
item_set_map[item] = set_name
sets[set_name] = set_obj
data["sets"] = sets
with open("clean.json", "w") as outfile:
json.dump(data, outfile, indent=2)
with open("compress.json", "w") as outfile:
json.dump(data, outfile)

View file

@ -1,42 +0,0 @@
import os
import json
'''takes updated data in tomes.json and updates the tome map'''
#read in tomes json file
with open("../tomes.json", "r") as tomesfile:
tome_data = json.load(tomesfile)
tomes = dict()
tome_mapping = dict()
max_id = 0
for tome in tome_data:
if "id" in tome:
if tome["id"] > max_id:
max_id = tome["id"]
tome_mapping[tome["name"]] = tome["id"]
i = max_id + 1
for tome in tome_data:
if "id" not in tome:
tome["id"] = i
tome_mapping[tome["name"]] = i
i += 1
tomes[tome["name"]] = tome
'''
with open("clean.json", "w") as outfile:
json.dump(data, outfile, indent=2)
with open("compress.json", "w") as outfile:
json.dump(data, outfile)
'''
with open("tome_map.json", "w") as outfile:
json.dump(tome_mapping, outfile, indent = 2)
with open("../tomes2.json", "w") as outfile:
json.dump(tome_data, outfile, indent = 2)

View file

@ -1,3 +1,12 @@
"""
Used to validate item file - searches for duplicate items. Does not change the file.
TODO: Eventually integrate this into the process scripts, including ings and recipes
Usage:
python validate.py [input file]
"""
import json
import sys