Former-commit-id: 1bde56926526c68b0d8f46537e496be215b2c5b4
This commit is contained in:
Benjamin Kyd
2022-04-12 00:42:05 +01:00
21 changed files with 13702 additions and 155 deletions

2
.env
View File

@@ -14,4 +14,4 @@ DATABASE_PORT=5432
DATABASE_DB=legolog
DATABASE_DB_DEV=legolog
DATABASE_USER=postgres
DATABASE_PASSWORD=password
DATABASE_PASSWORD=postgres

4
.gitignore vendored
View File

@@ -1,3 +1,5 @@
node_modules/
logs.log
db/image/*/
db/image/
db/img/
db/*.tar.gz

3
db/README.md Normal file
View File

@@ -0,0 +1,3 @@
# This mess
This mess is to aggregate and condense a massive database of lego parts, bricks and photos into a much more managable dataset which i can generate meaningful SQL for.

105
db/aggregator-stage-2.js Normal file
View File

@@ -0,0 +1,105 @@
// reads parts-to-include and sets-to-include and based off
// schema.sql, creates a new database with the data from
// the files.
const fs = require('fs');
fs.writeFileSync('./db/dump.sql', '');
function addLine(line) {
fs.appendFileSync('./db/dump.sql', line + '\n');
}
// first categories
addLine('-- categories\n');
const newcategory = (category, name) => `INSERT INTO tag (id, name) VALUES ('${category}', '${name}');`;
const categories = fs.readFileSync('db/res/categories.txt').toString().split('\n');
for (let i = 0; i < categories.length; i++) {
const category = categories[i].split('\t');
const categoryName = (category[1]).replace(/'/g, '');
console.log(`NEW category ${categoryName}`);
addLine(newcategory(category[0], categoryName));
}
// then colour type
addLine('\n-- colour type\n');
const newColourType = (type, name) => `INSERT INTO colour_type (id, name) VALUES ('${type}', '${name}');`;
const lookupTable = {
0: 'N/A',
1: 'Solid',
2: 'Transparent',
3: 'Chrome',
4: 'Pearl',
5: 'Satin',
6: 'Metallic',
7: 'Milky',
8: 'Glitter',
9: 'Speckle',
10: 'Modulex',
};
for (let i = 0; i < 11; i++) {
console.log(`NEW colour type ${i}`);
addLine(newColourType(i, lookupTable[i]));
}
// then colour
addLine('\n-- colour\n');
const newcolour = (id, name, RGB, type) => `INSERT INTO lego_brick_colour (id, name, hexrgb, col_type) VALUES ('${id}', '${name}', '${RGB}', '${type}');`;
const colours = fs.readFileSync('db/res/colors.txt').toString().split('\n');
for (let i = 0; i < colours.length; i++) {
const colour = colours[i].split('\t');
const RGB = colour[2];
// needs to get key from value
const type = Object.keys(lookupTable).find(key => lookupTable[key] === colour[3]);
const id = colour[0];
const name = colour[1];
console.log(`NEW colour ${name}`);
addLine(newcolour(id, name, RGB, type));
}
// then bricks
addLine('\n-- bricks\n');
const newBrick = (id, name, weight, dx, dy, dz) => `INSERT INTO lego_brick (id, name, weight, dimensions_x, dimensions_y, dimensions_z) VALUES ('${id}', '${name}', '${weight}', '${dx}', '${dy}', '${dz}');`;
const allBricks = fs.readFileSync('db/res/Parts.txt').toString().split('\n');
const brickIds = JSON.parse(fs.readFileSync('db/parts-to-include'));
for (let i = 0; i < brickIds.length; i++) {
const brickId = brickIds[i];
// find ID in allBricks
const brick = allBricks.find(brick => brick.split('\t')[2] === brickId);
if (!brick) {
console.log(`ERROR: brick ${brickId} not found`);
continue;
}
const brickData = brick.split('\t');
const name = brickData[3].replace(/'/g, '');
const weight = brickData[4];
const dx = brickData[5].split('x')[0].trim();
const dy = brickData[5].split('x')[1].trim();
const dz = brickData[5].split('x')[2].trim();
addLine(newBrick(brickId, name, weight, dx, dy, dz));
console.log(`NEW brick ${i} ${brickId}`);
}
// then sets
addLine('\n-- sets\n');
const newSet = (id, name, category, colour, bricks) => `INSERT INTO lego_set (id, name, category, colour, bricks) VALUES ('${id}', '${name}', '${category}', '${colour}', '${bricks}');`;
const sets = fs.readFileSync('db/res/Sets.txt').toString().split('\n');
const setIds = JSON.parse(fs.readFileSync('db/sets-to-include'));
for (let i = 0; i < setIds.length; i++) {
}
// then brick tags
// then set tags
// then pieces in sets
// then make up some random data for brick inventory
// then make up some random data for set inventory

74
db/aggregator.js Normal file
View File

@@ -0,0 +1,74 @@
// goes through most-common-lego-parts and most-common-lego-sets
// and adds them to parts & sets to include, then goes through
// every part for the related sets and adds them to the parts list
// then gets the photos for all of above and moves them to the img
// directory as opposed to the original directory
const md5 = require('md5');
const fs = require('fs');
// so no duplicates
const sets = new Set();
const parts = new Set();
const commonSets = fs.readFileSync('db/res/most-common-lego-sets.txt').toString().split('\n');
const setList = JSON.parse(fs.readFileSync('db/res/sets.json').toString());
for (let i = 0; i < commonSets.length; i++) {
console.log('adding set ' + commonSets[i]);
sets.add(commonSets[i]);
}
// go through each set and add all parts to the parts list
for (let i = 0; i < commonSets.length; i++) {
const setParts = setList[commonSets[i]];
console.log(commonSets[i] + ' has ' + Object.keys(setParts).length + ' unique parts');
for (const part of Object.keys(setParts)) {
parts.add(part);
}
}
const commonParts = fs.readFileSync('db/res/most-common-lego-parts.txt').toString().split('\n');
for (let i = 0; i < commonParts.length; i++) {
console.log('adding part ' + commonParts[i]);
parts.add(commonParts[i]);
}
// image hash eveything
// function copyFileAndCreateDirs(source, bucket, filename) {
// const filePath = `db/img/${bucket[0]}/${bucket[1]}/${bucket[2]}/${bucket[3]}/${filename}`;
// try {
// if (!fs.existsSync(filePath)) {
// fs.mkdirSync(`db/img/${bucket[0]}`, { recursive: true });
// fs.mkdirSync(`db/img/${bucket[0]}/${bucket[1]}`, { recursive: true });
// fs.mkdirSync(`db/img/${bucket[0]}/${bucket[1]}/${bucket[2]}`, { recursive: true });
// fs.mkdirSync(`db/img/${bucket[0]}/${bucket[1]}/${bucket[2]}/${bucket[3]}`, { recursive: true });
// fs.copyFileSync(source, filePath);
// }
// } catch (err) {
// console.error(err);
// }
// }
// for (const set of sets) {
// const hash = md5(set);
// console.log(hash);
// const bucket = hash.substring(0, 4);
// const filename = set + '.png';
// const file = `db/image/${bucket[0]}/${bucket[1]}/${bucket[2]}/${bucket[3]}/${filename}`;
// copyFileAndCreateDirs(file, bucket, filename);
// }
// for (const part of parts) {
// const hash = md5(part);
// console.log(hash);
// const bucket = hash.substring(0, 4);
// const filename = part + '.png';
// const file = `db/image/${bucket[0]}/${bucket[1]}/${bucket[2]}/${bucket[3]}/${filename}`;
// copyFileAndCreateDirs(file, bucket, filename);
// }
fs.writeFileSync('db/sets-to-include', JSON.stringify(Array.from(sets)));
fs.writeFileSync('db/parts-to-include', JSON.stringify(Array.from(parts)));

3382
db/dump.sql Normal file

File diff suppressed because it is too large Load Diff

35
db/mostpopularbricks.js Normal file
View File

@@ -0,0 +1,35 @@
// get all the id's from this page
// https://brickarchitect.com/most-common-lego-parts/
const fs = require('fs');
const axios = require('axios');
async function get(url) {
// axios return HTML from website
try {
const res = await axios.get(url, {
method: 'GET',
headers: { 'User-Agent': 'Chrome/96.0.4664.175' },
});
return res.data.toString();
} catch (e) {
fs.appendFileSync('error-set.txt', `${url}\n`);
console.log(`Failed to download ${url}`);
}
}
async function main() {
const regex = /https:\/\/brickarchitect\.com\/content\/parts\/(.*?)\.png/g;
const data = await get('https://brickarchitect.com/most-common-lego-parts/');
data.match(regex).forEach((element) => {
// get out the id
const id = element.split('/')[5].split('.')[0];
console.log(id);
fs.appendFileSync('db/res/most-common-lego-parts.txt', `${id}\n`);
});
}
main();

1
db/parts-to-include Normal file

File diff suppressed because one or more lines are too long

View File

@@ -1,5 +1,3 @@
Category ID Category Name
143 (Other)
318 12V
628 1950

View File

@@ -1,5 +1,3 @@
Color ID Color Name RGB Type Parts In Sets Wanted For Sale Year From Year To
0 (Not Applicable) N/A 4587 12360 62547 10990 1954 2022
41 Aqua BCE5DC Solid 82 60 1233 116 1998 2006
11 Black 212121 Solid 10925 11692 15454 11229 1957 2022

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,43 @@
76023-1
75978-1
10175-1
75059-1
10240-1
10225-1
76042-1
10255-1
10193-1
75060-1
71374-1
75192-1
21322-1
10232-1
21103-1
71040-1
76139-1
75159-1
7623-1
75827-1
10283-1
10270-1
76128-1
41314-1
21312-1
70841-1
41154-1
71360-1
75957-1
21113-1
31097-1
21326-1
60292-1
41447-1
41440-1
41450-1
41446-1
41682-1
41684-1
41008-1
41325-1
75969-1
42123-1

View File

@@ -1,3 +1,4 @@
CREATE TABLE IF NOT EXISTS tag (
id INT NOT NULL PRIMARY KEY,
name VARCHAR (100)
@@ -11,52 +12,47 @@ CREATE TABLE IF NOT EXISTS colour_type (
CREATE TABLE IF NOT EXISTS lego_brick_colour (
id INT NOT NULL PRIMARY KEY,
name VARCHAR (100),
hexrgb VARCHAR (6) NOT NULL,
hexrgb VARCHAR (6),
col_type INT,
FOREIGN KEY ( col_type ) REFERENCES colour_type( id )
);
CREATE TABLE IF NOT EXISTS lego_brick (
id VARCHAR (50) NOT NULL PRIMARY KEY,
name VARCHAR NOT NULL,
weight VARCHAR (10),
dimensions_x VARCHAR (10),
dimensions_y VARCHAR (10),
dimensions_z VARCHAR (10)
);
CREATE TABLE IF NOT EXISTS lego_set (
id VARCHAR (50) NOT NULL PRIMARY KEY,
name VARCHAR (100),
description TEXT,
date_released TIMESTAMP WITHOUT TIME ZONE,
dimensions_x DECIMAL,
dimensions_y DECIMAL,
dimensions_z DECIMAL,
);
CREATE TABLE IF NOT EXISTS lego_brick (
id VARCHAR (50) NOT NULL PRIMARY KEY,
name VARCHAR NOT NULL,
colour INT,
weight DECIMAL,
dimensions_x INT,
dimensions_y INT,
dimensions_z INT,
date_from TIMESTAMP WITHOUT TIME ZONE,
date_to TIMESTAMP WITHOUT TIME ZONE,
FOREIGN KEY ( colour ) REFERENCES lego_brick_colour( id ),
dimensions_x VARCHAR (10),
dimensions_y VARCHAR (10),
dimensions_z VARCHAR (10)
);
CREATE TABLE IF NOT EXISTS lego_brick_tag (
id VARCHAR (50) NOT NULL PRIMARY KEY,
brick_id VARCHAR (50) NOT NULL
brick_id VARCHAR (50) NOT NULL,
tag INT NOT NULL,
FOREIGN KEY ( brick_id ) REFERENCES lego_brick( id ),
FOREIGN KEY ( tag ) REFERENCES tag( id ),
)
FOREIGN KEY ( tag ) REFERENCES tag( id )
);
CREATE TABLE IF NOT EXISTS lego_set_tag (
id VARCHAR (50) NOT NULL PRIMARY KEY,
set_id VARCHAR (50) NOT NULL
set_id VARCHAR (50) NOT NULL,
tag INT NOT NULL,
FOREIGN KEY ( set_id ) REFERENCES lego_set( id ),
FOREIGN KEY ( tag ) REFERENCES tag( id ),
)
FOREIGN KEY ( tag ) REFERENCES tag( id )
);
CREATE TABLE IF NOT EXISTS set_descriptor (
id VARCHAR (50) NOT NULL PRIMARY KEY,
set_id VARCHAR (50) NOT NULL,
brick_id VARCHAR (50) NOT NULL,
amount INT,
@@ -82,7 +78,6 @@ CREATE TABLE IF NOT EXISTS lego_set_inventory (
FOREIGN KEY ( id ) REFERENCES lego_set( id )
);
CREATE TABLE IF NOT EXISTS users (
id VARCHAR (50) NOT NULL PRIMARY KEY,
email text NOT NULL,
@@ -91,5 +86,25 @@ CREATE TABLE IF NOT EXISTS users (
address text NOT NULL,
postcode text NOT NULL,
date_created TIMESTAMP WITHOUT TIME ZONE NOT NULL,
date_updated TIMESTAMP WITHOUT TIME ZONE NOT NULL,
date_updated TIMESTAMP WITHOUT TIME ZONE NOT NULL
);
CREATE TABLE IF NOT EXISTS orders (
id VARCHAR (50) NOT NULL PRIMARY KEY,
user_id VARCHAR (50) NOT NULL,
date_placed TIMESTAMP WITHOUT TIME ZONE NOT NULL,
FOREIGN KEY ( user_id ) REFERENCES users( id )
);
CREATE TABLE IF NOT EXISTS order_items (
order_id VARCHAR (50) NOT NULL,
brick_id VARCHAR (50),
-- colour is a modifier for the brick
brick_colour INT,
set_id VARCHAR (50),
amount INT NOT NULL,
FOREIGN KEY ( order_id ) REFERENCES orders( id ),
FOREIGN KEY ( brick_id ) REFERENCES lego_brick( id ),
FOREIGN KEY ( brick_colour ) REFERENCES lego_brick_colour( id ),
FOREIGN KEY ( set_id ) REFERENCES lego_set( id )
);

1
db/sets-to-include Normal file
View File

@@ -0,0 +1 @@
["76023-1","75978-1","10175-1","75059-1","10240-1","10225-1","76042-1","10255-1","10193-1","75060-1","71374-1","75192-1","21322-1","10232-1","21103-1","71040-1","76139-1","75159-1","7623-1","75827-1","10283-1","10270-1","76128-1","41314-1","21312-1","70841-1","41154-1","71360-1","75957-1","21113-1","31097-1","21326-1","60292-1","41447-1","41440-1","41450-1","41446-1","41682-1","41684-1","41008-1","41325-1","75969-1","42123-1"]

9010
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -28,6 +28,8 @@
"dependencies": {
"axios": "^0.25.0",
"cli-color": "^2.0.1",
"decompress": "^4.2.1",
"decompress-targz": "^4.1.1",
"dotenv": "^10.0.0",
"express": "^4.17.2",
"express-oauth2-jwt-bearer": "^1.1.0",

View File

@@ -2,7 +2,7 @@ const Logger = require('./logger.js');
const dotenv = require('dotenv');
function load() {
function Load() {
Logger.Info('Loading Config...');
const res = dotenv.config();
Logger.Debug(`CONFIG: ${JSON.stringify(res.parsed)}`);
@@ -21,6 +21,7 @@ function load() {
}
}
module.exports = {
Load: load,
Load,
};

View File

@@ -44,6 +44,22 @@ class Database {
await con;
return this.connection;
}
async query(query, params, callback) {
if (!this.connection) {
await this.connect();
}
// debug moment
Logger.Database(`PSQL Query: ${query.substring(0, 100)}...`);
const result = await this.connection.query(query, params, callback);
return result;
}
async destroy() {
await this.connection.end();
this.connection = null;
}
}
module.exports = {

79
src/first-time-run.js Normal file
View File

@@ -0,0 +1,79 @@
// Populate databaes, create admin user, etc
const Logger = require('./logger.js');
const Config = require('./config.js');
const Database = require('./database/database.js');
const decompress = require('decompress');
const decompressTargz = require('decompress-targz');
const fs = require('fs');
console.log('LegoLog Setting Up:tm:');
async function main() {
Config.Load();
await Logger.Init({
logLevel: process.env.LOG_LEVEL,
logToConsole: process.env.LOG_CONSOLE,
logFile: process.env.LOG_FILE,
});
Logger.Info('DECOMPRESSING - DO NOT CLOSE, THIS MAY TAKE A WHILE...');
Logger.Info('DECOMPRESSING - DO NOT CLOSE, THIS MAY TAKE A WHILE...');
// connect to database
const db = new Database.IDatabase();
await db.connect();
// unzip images ASYNC
decompress('db/img.tar.gz', 'db/', {
plugins: [
decompressTargz(),
],
}).then(() => {
console.log('Files decompressed');
});
const tableQuery = fs.readFileSync('./db/schema.sql').toString();
/* eslint-disable-next-line */
await new Promise(async (resolve, reject) => {
// run setup script to create schema
await db.query(tableQuery, [], (err, res) => {
if (err) {
Logger.Error(err);
resolve();
return;
}
for (const result of res) {
Logger.Database(result.command);
}
resolve();
});
});
// populate database
const dump = fs.readFileSync('./db/dump.sql').toString();
/* eslint-disable-next-line */
await new Promise(async (resolve, reject) => {
await db.query(dump, [], (err, res) => {
if (err) {
Logger.Error(err);
resolve();
return;
}
for (const result of res) {
Logger.Database(result.command);
}
resolve();
});
});
await db.destroy();
Logger.Info('DECOMPRESSING - DO NOT CLOSE, THIS MAY TAKE A WHILE...');
Logger.Info('DECOMPRESSING - DO NOT CLOSE, THIS MAY TAKE A WHILE...');
}
main();

View File

@@ -3,23 +3,22 @@ const Config = require('./config.js');
const Server = require('./routes/server.js');
const API = require('./routes/api.js');
const Databse = require('./database/database.js');
// TODO: The first time running needs to populate the database with the bricks and sets.
const Database = require('./database/database.js');
async function main() {
Config.Load();
await Logger.Init({
logLevel: process.env.LOG_LEVEL,
logToConsole: process.env.LOG_CONSOLE,
logFile: process.env.LOG_FILE,
// networkHost: process.env.LOG_NET_HOST,
// networkPort: process.env.LOG_NET_PORT,
networkHost: process.env.LOG_NET_HOST,
networkPort: process.env.LOG_NET_PORT,
});
Logger.Info('Pre-Init Complete');
const Database = new Databse.IDatabase();
await Database.connect();
const db = new Database.IDatabase();
await db.connect();
Server.Listen(process.env.PORT);
API.Init();