-
Notifications
You must be signed in to change notification settings - Fork 54
/
build.mjs
329 lines (293 loc) · 11.8 KB
/
build.mjs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
import fs from 'node:fs/promises';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import minify from 'imagemin';
import minifyPng from 'imagemin-pngquant';
import minifyJpeg from 'imagemin-jpegtran';
import fetch from 'node-fetch';
import sharp from 'sharp';
import Progress from './progress.mjs';
import stringify from './stringify.mjs';
import scraper from './scraper.mjs';
import parser from './parser.mjs';
import hashManager from './hashManager.mjs';
import readJson from './readJson.mjs';
const imageCache = await readJson(new URL('../data/cache/.images.json', import.meta.url));
const allowedCustomCategories = ['SentinelWeapons'];
const get = async (url, binary = true) => {
const res = await fetch(url);
return binary ? res.buffer() : res.text();
};
const force = process.argv.slice(2).some((arg) => ['--force', '-f'].includes(arg)) || process.env.FORCE === 'true';
class Build {
async init() {
await scraper.checkOriginServerAvailability();
await hashManager.updateExportCache();
if (!force && hashManager.isUpdated) {
console.log('Data already up-to-date');
return;
}
const resources = await scraper.fetchResources();
/** @type {RawItemData} */
const raw = {
api: resources.en,
manifest: await scraper.fetchImageManifest(),
drops: await scraper.fetchDropRates(),
patchlogs: await scraper.fetchPatchLogs(),
wikia: await scraper.fetchWikiaData(),
vaultData: await scraper.fetchVaultData(),
relics: await scraper.generateRelicData(),
i18n: resources,
};
const parsed = parser.parse(raw);
const data = this.applyCustomCategories(parsed.data);
const i18n = parser.applyI18n(data, raw.i18n);
const all = await this.saveJson(data, i18n);
await this.saveWarnings(parsed.warnings);
await this.saveImages(all, raw.manifest);
await this.updateReadme(raw.patchlogs);
// Log number of warnings at the end of the script
let warningNum = 0;
// eslint-disable-next-line no-restricted-syntax
for (const warning of Object.keys(parsed.warnings)) {
warningNum += parsed.warnings[warning].length;
}
await hashManager.saveExportCache();
console.log(`\nFinished with ${warningNum} warnings.`);
}
/**
* DE's data categories are a bit vague, so we'll use the ones
* we generated in item.category instead. (parser.addCategory)
* @param {Array<module:warframe-items.Item>} data items to parse out
* @returns {Object<string, Array<module:warframe-items.Item>>}
*/
applyCustomCategories(data) {
const result = {};
// eslint-disable-next-line no-restricted-syntax
for (const chunk of data) {
if (chunk.category === 'Recipes') continue; // Skip blueprints
for (let i = 0; i < chunk.data.length; i += 1) {
const item = chunk.data[i];
// write an additional file for the desired custom categories
if (item.productCategory && allowedCustomCategories.includes(item.productCategory)) {
if (result[item.productCategory]) {
result[item.productCategory].push(item);
} else {
result[item.productCategory] = [item];
}
continue;
}
if (result[item.category]) {
result[item.category].push(item);
} else {
result[item.category] = [item];
}
}
}
return result;
}
/**
* Generate JSON file for each category and one for all combined.
* @param {Object<string, Array<module:warframe-items.Item>>} categories list of categories to save and separate
* @param {Object<Partial<module:warframe-items.Item>>} i18n internationalization partials of Items
* @returns {Array<module:warframe-items.Item>}
* @async
*/
async saveJson(categories, i18n) {
let all = [];
const sort = (a, b) => {
if (!a.name) console.log(a);
const res = a.name.localeCompare(b.name);
if (res === 0) {
return a.uniqueName.localeCompare(b.uniqueName);
}
return res;
};
// Category names are provided by this.applyCustomCategories
// eslint-disable-next-line no-restricted-syntax
for (const category of Object.keys(categories)) {
const data = categories[category].sort(sort);
all = all.concat(data);
await fs.writeFile(
new URL(`../data/json/${category}.json`, import.meta.url),
JSON.stringify(JSON.parse(stringify(data)))
);
}
// All.json (all items in one file)
all.sort(sort);
await fs.writeFile(new URL('../data/json/All.json', import.meta.url), stringify(all));
await fs.writeFile(new URL('../data/json/i18n.json', import.meta.url), JSON.stringify(JSON.parse(stringify(i18n))));
return all;
}
/**
* @typedef {Object} Warnings
* @property {Array<module:warframe-items.Item.name>} missingImage list of item names for those missing images
* @property {Array<string>} missingDucats list of item names for those missing ducat values
* @property {Array<module:warframe-items.Item.name>} missingComponents list of item names for those
* missing components (usually weapons)
* @property {Array<string<module:warframe-items.Item.name>>} missingVaultData list
* of item names for those missing vault data
* @property {Array<Array<module:warframe-items.Item.name, module:warframe-items.Polarity>>} polarity
* list of item names for those missing polarities
* @property {Array<module:warframe-items.Item.name>} missingType list of item names for those missing item types
* @property {Array<module:warframe-items.Item.name>} failedImage list of item names for those
* whose image download failed
* @property {Array<module:warframe-items.Item.name>} missingWikiThumb list of item names for those
* missing images from the fandom wikia
*/
/**
* Store warnings during parse process to disk
* @param {Warnings} warnings warnings to save to file
*/
async saveWarnings(warnings) {
return fs.writeFile(new URL('../data/warnings.json', import.meta.url), stringify(warnings));
}
/**
* Get all images unless hashes match with existing images
* @param {Array<module:warframe-items.Item>} items items to append images to
* @param {ImageManifest.Manifest} manifest image manifest to look up items from
* @async
*/
async saveImages(items, manifest) {
// No need to go through every item if the manifest didn't change. I'm
// guessing the `fileTime` key in each element works more or less like a
// hash, so any change to that changes the hash of the full thing.
if (!hashManager.hasChanged('Manifest')) return;
const bar = new Progress('Fetching Images', items.length);
const duplicates = []; // Don't download component images or relics twice
// eslint-disable-next-line no-restricted-syntax
for (const item of items) {
// Save image for parent item
await this.saveImage(item, false, duplicates, manifest);
// Save images for components if necessary
if (item.components) {
// eslint-disable-next-line no-restricted-syntax
for (const component of item.components) {
await this.saveImage(component, true, duplicates, manifest);
}
}
// Save images for abilities
if (item.abilities) {
// eslint-disable-next-line no-restricted-syntax
for (const ability of item.abilities) {
await this.saveImage(ability, false, duplicates, manifest);
}
}
bar.tick();
}
// write the manifests after images have all succeeded
hashManager.imagesUpdated = true;
try {
await hashManager.saveExportCache();
// Write new cache to disk
await hashManager.saveImageCache(imageCache);
} catch (error) {
console.error(error);
}
}
/**
* Download and save images for items or components.
* @param {module:warframe-items.Item} item to determine and save an image for
* @param {boolean} isComponent whether the item is a component or a parent
* @param {Array<module:warframe-items.Item.imageName>} duplicates list of duplicated (already existing) image names
* @param {ImageManifest.Manifest} manifest image lookup list
* @async
*/
async saveImage(item, isComponent, duplicates, manifest) {
const imageBase = manifest.find((i) => i.uniqueName === item.uniqueName);
if (!imageBase) return;
const imageStub = imageBase.textureLocation.replace(/\\/g, '/').replace('xport/', '');
const imageHash = imageStub.match(/!00_([\S]+)/);
const imageUrl = `https://content.warframe.com/PublicExport/${imageStub}`;
const basePath = fileURLToPath(new URL('../data/img/', import.meta.url));
const filePath = path.join(basePath, item.imageName);
const hash = manifest.find((i) => i.uniqueName === item.uniqueName).fileTime || imageHash[1] || undefined;
const cached = imageCache.find((c) => c.uniqueName === item.uniqueName);
// We'll use a custom blueprint image
if (item.name === 'Blueprint' || item.name === 'Arcane') return;
// Don't download component images or relic images twice
if (isComponent || item.type === 'Relic') {
if (duplicates.includes(item.imageName)) {
return;
}
duplicates.push(item.imageName);
}
// Check if the previous image was for a component because they might
// have different naming schemes like lex-prime
if (!cached || cached.hash !== hash || cached.isComponent !== isComponent) {
try {
const retry = (err) => {
if (err.code === 'ENOTFOUND') {
return get(imageUrl);
}
throw err;
};
const image = await get(imageUrl).catch(retry).catch(retry);
this.updateCache(item, cached, hash, isComponent);
await sharp(image).toFile(filePath);
await minify([filePath], {
destination: basePath,
plugins: [
minifyJpeg(),
minifyPng({
quality: [0.2, 0.4],
}),
],
});
} catch (e) {
// swallow error
console.error(e);
}
}
}
/**
* A Cached Item
* @typedef {Object} CachedItem
* @property {module:warframe-items.Item.uniqueName} uniqueName unique name corresponding to the item's
* {@link module:warframe-items.Item.uniqueName|uniqueName}
* @property {string} hash Corresponding hash of the item representing the item
* @property {boolean} isComponent whether this item is a component
*/
/**
* Update image cache with new hash if things changed
* @param {module:warframe-items.Item} item item to add to the cache
* @param {Iterable<CachedItem>} cached list of existing cached items
* @param {CachedItem.hash} hash of an existing cached item
* @param {CachedItem.isComponent} isComponent whether the item to be cached is a component
*/
updateCache(item, cached, hash, isComponent) {
if (!cached) {
imageCache.push({
uniqueName: item.uniqueName,
hash,
isComponent,
});
} else {
cached.hash = hash;
cached.isComponent = isComponent;
}
}
/**
* Update readme with newest patchlog version
* @param {module:warframe-patchlogs.Patchlogs} patchlogs for pulling the latest update
*/
async updateReadme(patchlogs) {
const logob64 = await readJson(new URL('../data/logo.json', import.meta.url));
const version = patchlogs.posts[0].name
.replace(/ \+ /g, '--')
.replace(/[^0-9\-.]/g, '')
.trim();
const { url } = patchlogs.posts[0];
const readmeLocation = new URL('../README.md', import.meta.url);
const readmeOld = await fs.readFile(readmeLocation, 'utf-8');
const readmeNew = readmeOld.replace(
/\[!\[warframe update.*/,
`[![warframe update](https://img.shields.io/badge/warframe_update-${version}-blue.svg?logo=${encodeURIComponent(
logob64
)})](${url})`
);
return fs.writeFile(readmeLocation, readmeNew);
}
}
const build = new Build();
build.init();