1d6b302f38
author hppeng <hppeng> 1699417872 -0800 committer hppeng <hppeng> 1720354753 -0700 parent3e725eded8
author hppeng <hppeng> 1699417872 -0800 committer hppeng <hppeng> 1720354749 -0700 parent3e725eded8
author hppeng <hppeng> 1699417872 -0800 committer hppeng <hppeng> 1720354744 -0700 parent3e725eded8
author hppeng <hppeng> 1699417872 -0800 committer hppeng <hppeng> 1720354739 -0700 parent3e725eded8
author hppeng <hppeng> 1699417872 -0800 committer hppeng <hppeng> 1720354735 -0700 parent3e725eded8
author hppeng <hppeng> 1699417872 -0800 committer hppeng <hppeng> 1720354730 -0700 parent3e725eded8
author hppeng <hppeng> 1699417872 -0800 committer hppeng <hppeng> 1720354688 -0700 Update recipes.json (#265) Change ratio of gems to oil as it has been updated in 2.0.4 > Updated the Jeweling Recipe Changes (Bracelet- 2:1 gems:oil, Necklaces- 3:1 gems:oil) https://forums.wynncraft.com/threads/2-0-4-full-changelog-new-bank-lootruns-more.310535/ Finish updating recipes.json why are there 4 versions of this file active at any given time Fix damage calculation for rainbow raw wow this bug has been here for a LONG time also bump version for ing db Bunch of bugfixes - new major ID - divine honor: reduce earth damage - radiance: don't boost tomes, xp/loot bonuses atree: - parry: minor typo - death magnet: marked dep - nightcloak knife: 15s desc Api v3 (#267) * Tweak ordering to be consistent internally * v3 items (#266) * item_wrapper script for updating item data with v3 endpoint * metadata from v3 * v3 item format For the purpose of wynnbuilder, additional mapping might be needed. * v3 item format additional mapping might be needed for wb * v3 compressed item json * clean item json v3 format * Update translate map to api v3 partially... we will need to redo scripts to flatmap all the items * Fix items for 2.0.4.3 finally * New ingredients (and parse script update) just realized I forgot to commit the parse script this whole time * Forgot to commit data files, and bump ing db version * Sketchily reverse translate major ids internalname and separate lookup table lol * Forgot to update data files todo: script should update all files at once * Bump wynn version number already outdated... * Forgot to update 2.0.4.3 major ids --------- Co-authored-by: hppeng <hppeng> Co-authored-by: RawFish69 <108964215+RawFish69@users.noreply.github.com> Add missing fields to ingreds missing ids and consumableIDs tags in some ingreds Fix missing properties in item search setup these should be unified maybe to avoid duplicated code Fix sacshrine dependency on fluid healing also: fix ": " in item searcher I managed to mess up all major ids note: major ids min file is generated along with atree. it uses numeric ids, not just json compress 2.0.4.4 update (#269) * 2.0.4.4 update Fix v3 item api debug script Implement hellfire (discombob disallow not happening yet) * Fix boiling blood implementation slightly more intuitive also, janky first pass implementation for hellfire * Atree default update Allow sliders to specify a default value, for puppet and boiling blood for now * Fix rainbow def display on items and build stats Calculate into raw def correctly * Atree backend improvements Allow major ids to have dependencies Implement cherry bomb new ver. (wooo replace_spell just works out of the box!) Add comments to atree.js * Fix name of normal items don't you love it when wynn api makes breaking changes for no reason * Misc bugfix Reckless abandon req Tempest new damage ID in search * Fix major id search and temblor desc * Fix blockers on mage * Fix flaming uppercut implementation * Force base dps display to display less digits * Tomes finally pulling from the API but still with alias feature enabled! * Lootrun tomes (finally?) cool? maybe? * Fix beachside set set bonus --------- Co-authored-by: hppeng <hppeng> Fix rainbow def display on items and build stats Calculate into raw def correctly Fix major id search and temblor desc Force base dps display to display less digits Fix beachside set set bonus Fix build decode error reading only 7 tome fields no matter what Give NONE tomes correct ids in load_tome i hate this system so much Allow searching for max/min of ranges Fix crafted item damage display in the process, also update powder calculation logic! Should be fully correct now... TL;DR: Weapon damage is floating point; item display is wrong; ingame displays (damage floaters and compass) are floored. Fluid healing now multiplicative with heal efficiency ID NOTE: this breaks backwards compatibility with older atree jsons. Do we care about this? Realizing how much of a nightmare it will be (and already is) to keep atree fully backwards compatible. Maybe that will be something left to `git clone` instead. fix (#274)
280 lines
8.4 KiB
JavaScript
280 lines
8.4 KiB
JavaScript
const DB_VERSION = 133;
|
|
// @See https://github.com/mdn/learning-area/blob/master/javascript/apis/client-side-storage/indexeddb/video-store/index.jsA
|
|
|
|
let db;
|
|
let reload = false;
|
|
let load_complete = false;
|
|
let load_in_progress = false;
|
|
let items;
|
|
let sets = new Map();
|
|
let itemMap;
|
|
let idMap;
|
|
let redirectMap;
|
|
let itemLists = new Map();
|
|
/*
|
|
* Load item set from local DB. Calls init() on success.
|
|
*/
|
|
async function load_local() {
|
|
return new Promise(function(resolve, reject) {
|
|
let get_tx = db.transaction(['item_db', 'set_db'], 'readonly');
|
|
let sets_store = get_tx.objectStore('set_db');
|
|
let get_store = get_tx.objectStore('item_db');
|
|
let request = get_store.getAll();
|
|
request.onerror = function(event) {
|
|
reject("Could not read local item db...");
|
|
}
|
|
request.onsuccess = function(event) {
|
|
console.log("Successfully read local item db.");
|
|
}
|
|
|
|
// key-value iteration (hpp don't break this again)
|
|
// https://stackoverflow.com/questions/47931595/indexeddb-getting-all-data-with-keys
|
|
let request2 = sets_store.openCursor();
|
|
request2.onerror = function(event) {
|
|
reject("Could not read local set db...");
|
|
}
|
|
request2.onsuccess = function(event) {
|
|
let cursor = event.target.result;
|
|
if (cursor) {
|
|
let key = cursor.primaryKey;
|
|
let value = cursor.value;
|
|
sets.set(key, value);
|
|
cursor.continue();
|
|
}
|
|
else {
|
|
// no more results
|
|
console.log("Successfully read local set db.");
|
|
}
|
|
};
|
|
get_tx.oncomplete = function(event) {
|
|
items = request.result;
|
|
init_maps();
|
|
load_complete = true;
|
|
db.close();
|
|
resolve();
|
|
}
|
|
});
|
|
}
|
|
|
|
/*
|
|
* Clean bad item data.
|
|
* Assigns `displayName` to equal `name` if it is undefined.
|
|
* String values default to empty string.
|
|
* Numeric values default to 0.
|
|
* Major ID defaults to empty list.
|
|
*/
|
|
function clean_item(item) {
|
|
if (item.remapID === undefined) {
|
|
if (item.displayName === undefined) {
|
|
item.displayName = item.name;
|
|
}
|
|
item.skillpoints = [item.str, item.dex, item.int, item.def, item.agi];
|
|
item.reqs = [item.strReq, item.dexReq, item.intReq, item.defReq, item.agiReq];
|
|
item.has_negstat = false;
|
|
for (let i = 0; i < 5; ++i) {
|
|
if (item.reqs[i] === undefined) { item.reqs[i] = 0; }
|
|
if (item.skillpoints[i] === undefined) { item.skillpoints[i] = 0; }
|
|
if (item.skillpoints[i] < 0) { item.has_negstat = true; }
|
|
}
|
|
for (let key of item_fields) {
|
|
if (item[key] === undefined) {
|
|
if (key in str_item_fields) {
|
|
item[key] = "";
|
|
}
|
|
else if (key == "majorIds") {
|
|
item[key] = [];
|
|
}
|
|
else {
|
|
item[key] = 0;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
async function load_old_version(version_str) {
|
|
load_in_progress = true;
|
|
let getUrl = window.location;
|
|
let baseUrl = `${getUrl.protocol}//${getUrl.host}/`;
|
|
// No random string -- we want to use caching
|
|
let url = `${baseUrl}/data/${version_str}/items.json`;
|
|
let result = await (await fetch(url)).json();
|
|
items = result.items;
|
|
for (const item of items) {
|
|
clean_item(item);
|
|
}
|
|
let sets_ = result.sets;
|
|
sets = new Map();
|
|
for (const set in sets_) {
|
|
sets.set(set, sets_[set]);
|
|
}
|
|
init_maps();
|
|
load_complete = true;
|
|
}
|
|
|
|
/*
|
|
* Load item set from remote DB (aka a big json file). Calls init() on success.
|
|
*/
|
|
async function load() {
|
|
let getUrl = window.location;
|
|
let baseUrl = `${getUrl.protocol}//${getUrl.host}/`;
|
|
// "Random" string to prevent caching!
|
|
let url = baseUrl + "/compress.json?"+new Date();
|
|
let result = await (await fetch(url)).json();
|
|
items = result.items;
|
|
let sets_ = result.sets;
|
|
|
|
let add_tx = db.transaction(['item_db', 'set_db'], 'readwrite');
|
|
add_tx.onabort = function(e) {
|
|
console.log(e);
|
|
console.log("Not enough space...");
|
|
};
|
|
let items_store = add_tx.objectStore('item_db');
|
|
let add_promises = [];
|
|
for (const item of items) {
|
|
clean_item(item);
|
|
let req = items_store.add(item, item.name);
|
|
req.onerror = function() {
|
|
console.log("ADD ITEM ERROR? " + item.name);
|
|
};
|
|
add_promises.push(req);
|
|
}
|
|
let sets_store = add_tx.objectStore('set_db');
|
|
for (const set in sets_) {
|
|
add_promises.push(sets_store.add(sets_[set], set));
|
|
sets.set(set, sets_[set]);
|
|
}
|
|
add_promises.push(add_tx.complete);
|
|
|
|
await Promise.all(add_promises);
|
|
init_maps();
|
|
load_complete = true;
|
|
db.close();
|
|
}
|
|
|
|
async function load_init() {
|
|
return new Promise((resolve, reject) => {
|
|
let request = window.indexedDB.open('item_db', DB_VERSION);
|
|
|
|
request.onerror = function() {
|
|
reject("DB failed to open...");
|
|
};
|
|
|
|
request.onsuccess = async function() {
|
|
db = request.result;
|
|
if (load_in_progress) {
|
|
while (!load_complete) {
|
|
await sleep(100);
|
|
}
|
|
console.log("Skipping load...")
|
|
}
|
|
else {
|
|
load_in_progress = true;
|
|
if (reload) {
|
|
console.log("Using new data...")
|
|
await load();
|
|
}
|
|
else {
|
|
console.log("Using stored data...")
|
|
await load_local();
|
|
}
|
|
}
|
|
resolve();
|
|
};
|
|
|
|
request.onupgradeneeded = function(e) {
|
|
reload = true;
|
|
|
|
let db = e.target.result;
|
|
|
|
try {
|
|
db.deleteObjectStore('item_db');
|
|
}
|
|
catch (error) {
|
|
console.log("Could not delete item DB. This is probably fine");
|
|
}
|
|
try {
|
|
db.deleteObjectStore('set_db');
|
|
}
|
|
catch (error) {
|
|
console.log("Could not delete set DB. This is probably fine");
|
|
}
|
|
|
|
db.createObjectStore('item_db');
|
|
db.createObjectStore('set_db');
|
|
|
|
console.log("DB setup complete...");
|
|
};
|
|
});
|
|
}
|
|
|
|
let none_items = [
|
|
["armor", "helmet", "No Helmet"],
|
|
["armor", "chestplate", "No Chestplate"],
|
|
["armor", "leggings", "No Leggings"],
|
|
["armor", "boots", "No Boots"],
|
|
["accessory", "ring", "No Ring 1"],
|
|
["accessory", "ring", "No Ring 2"],
|
|
["accessory", "bracelet", "No Bracelet"],
|
|
["accessory", "necklace", "No Necklace"],
|
|
["weapon", "dagger", "No Weapon"],
|
|
];
|
|
for (let i = 0; i < none_items.length; i++) {
|
|
let item = Object();
|
|
item.slots = 0;
|
|
item.category = none_items[i][0];
|
|
item.type = none_items[i][1];
|
|
item.name = none_items[i][2];
|
|
item.displayName = item.name;
|
|
item.set = null;
|
|
item.quest = null;
|
|
item.skillpoints = [0, 0, 0, 0, 0];
|
|
item.has_negstat = false;
|
|
item.reqs = [0, 0, 0, 0, 0];
|
|
item.fixID = true;
|
|
item.tier = "Normal";
|
|
item.id = 10000 + i;
|
|
item.nDam = "0-0";
|
|
item.eDam = "0-0";
|
|
item.tDam = "0-0";
|
|
item.wDam = "0-0";
|
|
item.fDam = "0-0";
|
|
item.aDam = "0-0";
|
|
clean_item(item);
|
|
|
|
none_items[i] = item;
|
|
}
|
|
|
|
function init_maps() {
|
|
// List of 'raw' "none" items (No Helmet, etc), in order helmet, chestplate... ring1, ring2, brace, neck, weapon.
|
|
for (const it of item_types) {
|
|
itemLists.set(it, []);
|
|
}
|
|
|
|
itemMap = new Map();
|
|
/* Mapping from item names to set names. */
|
|
idMap = new Map();
|
|
redirectMap = new Map();
|
|
items = items.concat(none_items);
|
|
//console.log(items);
|
|
for (const item of items) {
|
|
if (item.remapID === undefined) {
|
|
itemLists.get(item.type).push(item.displayName);
|
|
itemMap.set(item.displayName, item);
|
|
if (none_items.includes(item)) {
|
|
idMap.set(item.id, "");
|
|
}
|
|
else {
|
|
idMap.set(item.id, item.displayName);
|
|
}
|
|
}
|
|
else {
|
|
redirectMap.set(item.id, item.remapID);
|
|
}
|
|
}
|
|
for (const [set_name, set_data] of sets) {
|
|
for (const item_name of set_data.items) {
|
|
itemMap.get(item_name).set = set_name;
|
|
}
|
|
}
|
|
}
|