redid database
Former-commit-id: 93d6c70302791c97062cefa6ab9938739049468c
This commit is contained in:
1
.env
1
.env
@@ -12,6 +12,5 @@ LOG_NET_PORT=21
|
||||
DATABASE_HOST=localhost
|
||||
DATABASE_PORT=5432
|
||||
DATABASE_DB=legolog
|
||||
DATABASE_DB_DEV=legolog
|
||||
DATABASE_USER=postgres
|
||||
DATABASE_PASSWORD=postgres
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1,3 +1,4 @@
|
||||
node_modules/
|
||||
logs.log
|
||||
db/image/*/
|
||||
db/image/
|
||||
db/img/
|
||||
|
||||
3
db/README.md
Normal file
3
db/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# This mess
|
||||
|
||||
This mess is to aggregate and condense a massive database of lego parts, bricks and photos into a much more managable dataset which i can generate meaningful SQL for.
|
||||
74
db/aggregator.js
Normal file
74
db/aggregator.js
Normal file
@@ -0,0 +1,74 @@
|
||||
// goes through most-common-lego-parts and most-common-lego-sets
|
||||
// and adds them to parts & sets to include, then goes through
|
||||
// every part for the related sets and adds them to the parts list
|
||||
// then gets the photos for all of above and moves them to the img
|
||||
// directory as opposed to the original directory
|
||||
|
||||
const md5 = require('md5');
|
||||
const fs = require('fs');
|
||||
|
||||
// so no duplicates
|
||||
const sets = new Set();
|
||||
const parts = new Set();
|
||||
|
||||
const commonSets = fs.readFileSync('db/res/most-common-lego-sets.txt').toString().split('\n');
|
||||
const setList = JSON.parse(fs.readFileSync('db/res/sets.json').toString());
|
||||
for (let i = 0; i < commonSets.length; i++) {
|
||||
console.log('adding set ' + commonSets[i]);
|
||||
sets.add(commonSets[i]);
|
||||
}
|
||||
|
||||
// go through each set and add all parts to the parts list
|
||||
for (let i = 0; i < commonSets.length; i++) {
|
||||
const setParts = setList[commonSets[i]];
|
||||
console.log(commonSets[i] + ' has ' + Object.keys(setParts).length + ' unique parts');
|
||||
for (const part of Object.keys(setParts)) {
|
||||
parts.add(part);
|
||||
}
|
||||
}
|
||||
|
||||
const commonParts = fs.readFileSync('db/res/most-common-lego-parts.txt').toString().split('\n');
|
||||
for (let i = 0; i < commonParts.length; i++) {
|
||||
console.log('adding part ' + commonParts[i]);
|
||||
parts.add(commonParts[i]);
|
||||
}
|
||||
|
||||
// image hash eveything
|
||||
|
||||
// function copyFileAndCreateDirs(source, bucket, filename) {
|
||||
// const filePath = `db/img/${bucket[0]}/${bucket[1]}/${bucket[2]}/${bucket[3]}/${filename}`;
|
||||
// try {
|
||||
// if (!fs.existsSync(filePath)) {
|
||||
// fs.mkdirSync(`db/img/${bucket[0]}`, { recursive: true });
|
||||
// fs.mkdirSync(`db/img/${bucket[0]}/${bucket[1]}`, { recursive: true });
|
||||
// fs.mkdirSync(`db/img/${bucket[0]}/${bucket[1]}/${bucket[2]}`, { recursive: true });
|
||||
// fs.mkdirSync(`db/img/${bucket[0]}/${bucket[1]}/${bucket[2]}/${bucket[3]}`, { recursive: true });
|
||||
// fs.copyFileSync(source, filePath);
|
||||
// }
|
||||
// } catch (err) {
|
||||
// console.error(err);
|
||||
// }
|
||||
// }
|
||||
|
||||
// for (const set of sets) {
|
||||
// const hash = md5(set);
|
||||
// console.log(hash);
|
||||
// const bucket = hash.substring(0, 4);
|
||||
// const filename = set + '.png';
|
||||
|
||||
// const file = `db/image/${bucket[0]}/${bucket[1]}/${bucket[2]}/${bucket[3]}/${filename}`;
|
||||
// copyFileAndCreateDirs(file, bucket, filename);
|
||||
// }
|
||||
|
||||
// for (const part of parts) {
|
||||
// const hash = md5(part);
|
||||
// console.log(hash);
|
||||
// const bucket = hash.substring(0, 4);
|
||||
// const filename = part + '.png';
|
||||
|
||||
// const file = `db/image/${bucket[0]}/${bucket[1]}/${bucket[2]}/${bucket[3]}/${filename}`;
|
||||
// copyFileAndCreateDirs(file, bucket, filename);
|
||||
// }
|
||||
|
||||
fs.writeFileSync('db/sets-to-include', JSON.stringify(Array.from(sets)));
|
||||
fs.writeFileSync('db/parts-to-include', JSON.stringify(Array.from(parts)));
|
||||
0
db/dump.sql
Normal file
0
db/dump.sql
Normal file
35
db/mostpopularbricks.js
Normal file
35
db/mostpopularbricks.js
Normal file
@@ -0,0 +1,35 @@
|
||||
|
||||
// get all the id's from this page
|
||||
// https://brickarchitect.com/most-common-lego-parts/
|
||||
|
||||
const fs = require('fs');
|
||||
const axios = require('axios');
|
||||
|
||||
async function get(url) {
|
||||
// axios return HTML from website
|
||||
try {
|
||||
const res = await axios.get(url, {
|
||||
method: 'GET',
|
||||
headers: { 'User-Agent': 'Chrome/96.0.4664.175' },
|
||||
});
|
||||
return res.data.toString();
|
||||
} catch (e) {
|
||||
fs.appendFileSync('error-set.txt', `${url}\n`);
|
||||
console.log(`Failed to download ${url}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const regex = /https:\/\/brickarchitect\.com\/content\/parts\/(.*?)\.png/g;
|
||||
const data = await get('https://brickarchitect.com/most-common-lego-parts/');
|
||||
|
||||
data.match(regex).forEach((element) => {
|
||||
// get out the id
|
||||
const id = element.split('/')[5].split('.')[0];
|
||||
console.log(id);
|
||||
|
||||
fs.appendFileSync('db/res/most-common-lego-parts.txt', `${id}\n`);
|
||||
});
|
||||
}
|
||||
|
||||
main();
|
||||
1
db/parts-to-include
Normal file
1
db/parts-to-include
Normal file
File diff suppressed because one or more lines are too long
1005
db/res/most-common-lego-parts.txt
Normal file
1005
db/res/most-common-lego-parts.txt
Normal file
File diff suppressed because it is too large
Load Diff
43
db/res/most-common-lego-sets.txt
Normal file
43
db/res/most-common-lego-sets.txt
Normal file
@@ -0,0 +1,43 @@
|
||||
76023-1
|
||||
75978-1
|
||||
10175-1
|
||||
75059-1
|
||||
10240-1
|
||||
10225-1
|
||||
76042-1
|
||||
10255-1
|
||||
10193-1
|
||||
75060-1
|
||||
71374-1
|
||||
75192-1
|
||||
21322-1
|
||||
10232-1
|
||||
21103-1
|
||||
71040-1
|
||||
76139-1
|
||||
75159-1
|
||||
7623-1
|
||||
75827-1
|
||||
10283-1
|
||||
10270-1
|
||||
76128-1
|
||||
41314-1
|
||||
21312-1
|
||||
70841-1
|
||||
41154-1
|
||||
71360-1
|
||||
75957-1
|
||||
21113-1
|
||||
31097-1
|
||||
21326-1
|
||||
60292-1
|
||||
41447-1
|
||||
41440-1
|
||||
41450-1
|
||||
41446-1
|
||||
41682-1
|
||||
41684-1
|
||||
41008-1
|
||||
41325-1
|
||||
75969-1
|
||||
42123-1
|
||||
@@ -1,5 +1,3 @@
|
||||
CREATE DATABASE IF NOT EXISTS `legolog`;
|
||||
USE `legolog`;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS tag (
|
||||
id INT NOT NULL PRIMARY KEY,
|
||||
@@ -26,7 +24,7 @@ CREATE TABLE IF NOT EXISTS lego_set (
|
||||
date_released TIMESTAMP WITHOUT TIME ZONE,
|
||||
dimensions_x DECIMAL,
|
||||
dimensions_y DECIMAL,
|
||||
dimensions_z DECIMAL,
|
||||
dimensions_z DECIMAL
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS lego_brick (
|
||||
@@ -39,24 +37,24 @@ CREATE TABLE IF NOT EXISTS lego_brick (
|
||||
dimensions_z INT,
|
||||
date_from TIMESTAMP WITHOUT TIME ZONE,
|
||||
date_to TIMESTAMP WITHOUT TIME ZONE,
|
||||
FOREIGN KEY ( colour ) REFERENCES lego_brick_colour( id ),
|
||||
FOREIGN KEY ( colour ) REFERENCES lego_brick_colour( id )
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS lego_brick_tag (
|
||||
id VARCHAR (50) NOT NULL PRIMARY KEY,
|
||||
brick_id VARCHAR (50) NOT NULL
|
||||
brick_id VARCHAR (50) NOT NULL,
|
||||
tag INT NOT NULL,
|
||||
FOREIGN KEY ( brick_id ) REFERENCES lego_brick( id ),
|
||||
FOREIGN KEY ( tag ) REFERENCES tag( id ),
|
||||
)
|
||||
FOREIGN KEY ( tag ) REFERENCES tag( id )
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS lego_set_tag (
|
||||
id VARCHAR (50) NOT NULL PRIMARY KEY,
|
||||
set_id VARCHAR (50) NOT NULL
|
||||
set_id VARCHAR (50) NOT NULL,
|
||||
tag INT NOT NULL,
|
||||
FOREIGN KEY ( set_id ) REFERENCES lego_set( id ),
|
||||
FOREIGN KEY ( tag ) REFERENCES tag( id ),
|
||||
)
|
||||
FOREIGN KEY ( tag ) REFERENCES tag( id )
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS set_descriptor (
|
||||
set_id VARCHAR (50) NOT NULL,
|
||||
@@ -92,7 +90,7 @@ CREATE TABLE IF NOT EXISTS users (
|
||||
address text NOT NULL,
|
||||
postcode text NOT NULL,
|
||||
date_created TIMESTAMP WITHOUT TIME ZONE NOT NULL,
|
||||
date_updated TIMESTAMP WITHOUT TIME ZONE NOT NULL,
|
||||
date_updated TIMESTAMP WITHOUT TIME ZONE NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS orders (
|
||||
@@ -108,6 +106,6 @@ CREATE TABLE IF NOT EXISTS order_items (
|
||||
set_id VARCHAR (50),
|
||||
amount INT NOT NULL,
|
||||
FOREIGN KEY ( order_id ) REFERENCES orders( id ),
|
||||
FOREIGN KEY ( brick_id ) REFERENCES lego_brick( id )
|
||||
FOREIGN KEY ( brick_id ) REFERENCES lego_brick( id ),
|
||||
FOREIGN KEY ( set_id ) REFERENCES lego_set( id )
|
||||
);
|
||||
|
||||
1
db/sets-to-include
Normal file
1
db/sets-to-include
Normal file
@@ -0,0 +1 @@
|
||||
["76023-1","75978-1","10175-1","75059-1","10240-1","10225-1","76042-1","10255-1","10193-1","75060-1","71374-1","75192-1","21322-1","10232-1","21103-1","71040-1","76139-1","75159-1","7623-1","75827-1","10283-1","10270-1","76128-1","41314-1","21312-1","70841-1","41154-1","71360-1","75957-1","21113-1","31097-1","21326-1","60292-1","41447-1","41440-1","41450-1","41446-1","41682-1","41684-1","41008-1","41325-1","75969-1","42123-1"]
|
||||
867
package-lock.json
generated
867
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -28,11 +28,14 @@
|
||||
"dependencies": {
|
||||
"axios": "^0.25.0",
|
||||
"cli-color": "^2.0.1",
|
||||
"decompress": "^4.2.1",
|
||||
"decompress-targz": "^4.1.1",
|
||||
"dotenv": "^10.0.0",
|
||||
"express": "^4.17.2",
|
||||
"express-oauth2-jwt-bearer": "^1.1.0",
|
||||
"jest": "^27.4.5",
|
||||
"jsdoc": "^3.6.10",
|
||||
"md5": "^2.3.0",
|
||||
"moment": "^2.29.1",
|
||||
"npm": "^8.6.0",
|
||||
"pg": "^8.7.3",
|
||||
|
||||
@@ -44,6 +44,22 @@ class Database {
|
||||
await con;
|
||||
return this.connection;
|
||||
}
|
||||
|
||||
async query(query, params, callback) {
|
||||
if (!this.connection) {
|
||||
await this.connect();
|
||||
}
|
||||
|
||||
// debug moment
|
||||
Logger.Database(`PSQL Query: ${query.substring(0, 100)}...`);
|
||||
const result = await this.connection.query(query, params, callback);
|
||||
return result;
|
||||
}
|
||||
|
||||
async destroy() {
|
||||
await this.connection.end();
|
||||
this.connection = null;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
||||
@@ -1,15 +1,77 @@
|
||||
// Populate databaes, create admin user, etc
|
||||
const Logger = require('./logger.js');
|
||||
const Config = require('./config.js');
|
||||
const Database = require('./database/database.js');
|
||||
|
||||
const decompress = require('decompress');
|
||||
const decompressTargz = require('decompress-targz');
|
||||
|
||||
const fs = require('fs');
|
||||
|
||||
console.log('LegoLog Setting Up:tm:');
|
||||
|
||||
async main() {
|
||||
async function main() {
|
||||
Config.Load();
|
||||
|
||||
await Logger.Init({
|
||||
logLevel: process.env.LOG_LEVEL,
|
||||
logToConsole: process.env.LOG_CONSOLE,
|
||||
logFile: process.env.LOG_FILE,
|
||||
});
|
||||
|
||||
Logger.Info('DECOMPRESSING - DO NOT CLOSE, THIS MAY TAKE A WHILE...');
|
||||
Logger.Info('DECOMPRESSING - DO NOT CLOSE, THIS MAY TAKE A WHILE...');
|
||||
|
||||
// connect to database
|
||||
const Database = new Databse.IDatabase();
|
||||
await Database.connect();
|
||||
const db = new Database.IDatabase();
|
||||
await db.connect();
|
||||
|
||||
// unzip images ASYNC
|
||||
decompress('db/img.tar.gz', 'db/', {
|
||||
plugins: [
|
||||
decompressTargz(),
|
||||
],
|
||||
}).then(() => {
|
||||
console.log('Files decompressed');
|
||||
});
|
||||
|
||||
|
||||
const tableQuery = fs.readFileSync('./db/schema.sql').toString();
|
||||
/* eslint-disable-next-line */
|
||||
await new Promise(async (resolve, reject) => {
|
||||
// run setup script to create schema
|
||||
await db.query(tableQuery, [], (err, res) => {
|
||||
if (err) {
|
||||
Logger.Error(err);
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
for (const result of res) {
|
||||
Logger.Database(result.command);
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
|
||||
// populate database
|
||||
const dump = fs.readFileSync('./db/dump.sql').toString();
|
||||
/* eslint-disable-next-line */
|
||||
await new Promise(async (resolve, reject) => {
|
||||
await db.query(dump, [], (err, res) => {
|
||||
if (err) {
|
||||
Logger.Error(err);
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
// console.log(res);
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
|
||||
await db.destroy();
|
||||
|
||||
Logger.Info('DECOMPRESSING - DO NOT CLOSE, THIS MAY TAKE A WHILE...');
|
||||
Logger.Info('DECOMPRESSING - DO NOT CLOSE, THIS MAY TAKE A WHILE...');
|
||||
}
|
||||
|
||||
main();
|
||||
|
||||
@@ -3,7 +3,7 @@ const Config = require('./config.js');
|
||||
const Server = require('./routes/server.js');
|
||||
const API = require('./routes/api.js');
|
||||
|
||||
const Databse = require('./database/database.js');
|
||||
const Database = require('./database/database.js');
|
||||
|
||||
async function main() {
|
||||
Config.Load();
|
||||
@@ -17,8 +17,8 @@ async function main() {
|
||||
});
|
||||
Logger.Info('Pre-Init Complete');
|
||||
|
||||
const Database = new Databse.IDatabase();
|
||||
await Database.connect();
|
||||
const db = new Database.IDatabase();
|
||||
await db.connect();
|
||||
|
||||
Server.Listen(process.env.PORT);
|
||||
API.Init();
|
||||
|
||||
Reference in New Issue
Block a user