Compare commits

...

No commits in common. "main" and "main2" have entirely different histories.
main ... main2

50 changed files with 5773 additions and 957 deletions

View file

@ -1,15 +0,0 @@
# EditorConfig is awesome: https://EditorConfig.org
# top-most EditorConfig file
root = true
[*]
indent_style = space
indent_size = 4
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
[*.{yaml,yml}]
indent_size = 2

View file

@ -1,16 +1,19 @@
{
"env": {
"browser": false,
"commonjs": true,
"es2021": true,
"node": true
},
"extends": "eslint:recommended",
"overrides": [
],
"parserOptions": {
"ecmaVersion": "latest"
},
"extends": [
// "eslint:recommended",
// "airbnb-base"
],
"plugins": [
"import"
],
"rules": {
"indent": [
"error",
@ -22,33 +25,70 @@
],
"quotes": [
"error",
"single"
"double"
],
"semi": [
"error",
"always"
],
"no-unused-vars": [
"error",
{
"argsIgnorePattern": "^_"
}
],
"no-undef": "error",
"no-useless-escape": "error",
"no-var": "error",
"prefer-const": "error",
"quote-props": [
"error",
"consistent-as-needed"
"as-needed"
],
"padded-blocks": [
"dot-notation": [
"error",
{
"allowKeywords": false
}
],
"eol-last": "error",
"comma-dangle": [
"error",
"always-multiline"
],
"no-multi-spaces": "error",
"space-in-parens": [
"error",
"never"
],
"import/order": "error",
"no-return-await": "error",
"no-trailing-spaces": "error",
"padded-blocks": [
"error",
"never",
{
"allowSingleLineBlocks": false
}
],
"space-infix-ops": "error",
"radix": [
"error",
"always"
],
"object-curly-spacing": [
"error",
"never"
],
"space-before-function-paren": [
"error",
"always"
],
"one-var": [
"error",
"never"
],
"function-call-argument-newline": [
"one-var-declaration-per-line": [
"error",
"consistent"
"always"
],
"template-curly-spacing": [
"error",
"never"
]
}
}

135
.gitignore vendored
View file

@ -1,132 +1,5 @@
# ---> Node
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
brainz-social.db
brainz-social.db-shm
brainz-social.db-wal
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
.cache
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*
.idea/

View file

@ -1,14 +0,0 @@
---
allowedLicenses:
- (MIT AND CC-BY-3.0)
- (MIT OR CC0-1.0)
- Apache-2.0
- BSD-2-Clause
- BSD-3-Clause
- BlueOak-1.0.0
- CC0-1.0
- CC-BY-3.0
- ISC
- MIT
- Python-2.0
- WTFPL

View file

@ -1,16 +0,0 @@
---
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v3.2.0
hooks:
- id: check-added-large-files
- id: check-merge-conflict
- id: check-yaml
- repo: https://github.com/pre-commit/mirrors-eslint
rev: v8.36.0
hooks:
- id: eslint
- repo: https://github.com/kontrolilo/kontrolilo
rev: v2.1.0
hooks:
- id: license-check-npm

11
LICENSE
View file

@ -1,11 +0,0 @@
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
Version 2, December 2004
Copyright (C) 2004 Sam Hocevar <sam@hocevar.net>
Everyone is permitted to copy and distribute verbatim or modified copies of this license document, and changing it is allowed as long as the name is changed.
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
0. You just DO WHAT THE FUCK YOU WANT TO.

View file

@ -1,2 +1,7 @@
# haxsocial
# BRAINZ SOCIAL
## Creating your first account
```node ./bin/create_account.js USERNAME EMAIL_ADDRESS $(mkpasswd -m bcrypt)```
For the provided password prompt, enter your chosen password.

77
app.js
View file

@ -1,55 +1,82 @@
'use strict';
"use strict";
const express = require('express');
const glob = require('glob');
const { match: createPathMatch } = require('path-to-regexp');
const express = require("express");
const {glob} = require("glob");
const {match: createPathMatch} = require("path-to-regexp");
const bodyParser = require("body-parser");
const cookieParser = require("cookie-parser");
const qs = require("qs");
const databaseHandler = require("./lib/database-handler");
(async () => {
const app = express();
const routes = await glob('**/*.js', {
cwd: './routes',
app.set("query parser", "extended");
app.use(bodyParser.json({
type: "application/*+json",
verify (req, _res, buf) {
req.rawBody = buf;
},
}));
app.use(bodyParser.json({
verify (req, _res, buf) {
req.rawBody = buf;
},
}));
app.use(bodyParser.urlencoded({
extended: false,
verify (req, _res, buf) {
req.rawBody = buf;
},
}));
app.use(cookieParser());
const routes = await glob("**/*.js", {
cwd: "./routes",
dot: true,
});
const pathMatches = [];
app.use((req, res, next) => {
const requestUrl = new URL(req.url, 'https://example.com/');
let candidateUrl = '';
let secondCandidateUrl = '';
for ( const pathMatch in pathMatches ) {
if ( pathMatches[pathMatch](requestUrl.pathname) ) {
app.use((req, _res, next) => {
console.log(`${req.path}${Object.keys(req.query).length ? "?" : ""}${qs.stringify(req.query)}`);
const requestUrl = new URL(req.url, "https://example.com/");
let candidateUrl = "";
let secondCandidateUrl = "";
for (const pathMatch in pathMatches) {
if (pathMatches[pathMatch](requestUrl.pathname)) {
// If we get an exact match, we don't need to process further.
return next();
} else if ( requestUrl.pathname.endsWith('/') && pathMatches[pathMatch](`${requestUrl.pathname}index`) ) {
} if (requestUrl.pathname.endsWith("/") && pathMatches[pathMatch](`${requestUrl.pathname}index`)) {
// If we end with a /, and the index path matches, lets do the index path, but prioritize the non-index path.
const secondRequestUrl = new URL(requestUrl);
secondRequestUrl.pathname = `${requestUrl.pathname}index`;
candidateUrl = secondRequestUrl.toString().substring(19);
} else if ( pathMatches[pathMatch](`${requestUrl.pathname}/index`) ) {
} else if (pathMatches[pathMatch](`${requestUrl.pathname}/index`)) {
// If we don't end with a /, and the /index path matches, lets do the /index path, but prioritize paths checked previously.
const secondRequestUrl = new URL(requestUrl);
secondRequestUrl.pathname = `${requestUrl.pathname}/index`;
secondCandidateUrl = secondRequestUrl.toString().substring(19);
}
}
if ( candidateUrl !== '' ) {
if (candidateUrl !== "") {
req.url = candidateUrl;
console.log(candidateUrl);
return next();
}
if ( secondCandidateUrl !== '' ) {
if (secondCandidateUrl !== "") {
req.url = secondCandidateUrl;
return next();
}
return next();
} );
for ( const routeScript in routes ) {
const route = routes[routeScript].replace(/\.js$/, '');
console.log(route);
pathMatches.push( createPathMatch(`/${route}`));
});
for (const routeScript in routes) {
const route = routes[routeScript].replace(/\.js$/, "");
pathMatches.push(createPathMatch(`/${route}`));
const routeObj = require(`./routes/${route}`);
if ( routeObj.get ) {
app.get(`/${route}`, routeObj.get);
if (routeObj.route) {
routeObj.route(app.route(`/${route}`));
}
}
app.listen(process.env.PORT || 3000);
const server = app.listen(process.env.PORT || 3000, () => {
process.on("SIGINT", () => {
databaseHandler.db.close();
server.close();
});
});
})();

3
bin/create_account.js Normal file
View file

@ -0,0 +1,3 @@
const database_handler = require("../lib/database-handler");
database_handler.createAccount(process.argv[2], process.argv[3], process.argv[4]);

6
database.json Normal file
View file

@ -0,0 +1,6 @@
{
"dev": {
"driver": "sqlite3",
"filename": "brainz-social.db"
}
}

6
jsconfig.json Normal file
View file

@ -0,0 +1,6 @@
{
"compilerOptions": {
"module": "CommonJS"
},
"exclude": ["node_modules"]
}

34
lib/activitypub.js Normal file
View file

@ -0,0 +1,34 @@
const jsonld = require("jsonld");
const databaseHandler = require("./database-handler");
module.exports = {
jsonldCustomLoader: async (url, options) => {
const cache = databaseHandler.getJsonldSchemaCache(url);
if (cache && cache.schema) {
return {
contextUrl: null,
document: JSON.parse(cache.schema),
documentUrl: url,
};
}
// TODO: Write HTTP client handler.
const retData = await jsonld.documentLoaders.node()(url);
databaseHandler.storeJsonldSchemaCache(url, JSON.stringify(retData.document), (Math.floor(Date.now()/1000) + 86400));
return retData;
},
compactedForm: async (urlOrObj) => {
if (typeof urlOrObj === "string") {
try {
const url = new URL(urlOrObj);
return jsonld.compact(await jsonld.expand(jsonld.documentLoaders.node()(url)), {});
} catch (e) {
return jsonld.compact(await jsonld.expand(JSON.parse(urlOrObj)), {});
}
} else {
return jsonld.compact(await jsonld.expand(urlOrObj), {});
}
},
};
jsonld.documentLoader = module.exports.jsonldCustomLoader;

166
lib/database-handler.js Normal file
View file

@ -0,0 +1,166 @@
const db = require("better-sqlite3")("brainz-social.db");
db.pragma("journal_mode = WAL");
module.exports = {
db,
application: new Proxy({}, {
get (target, key) {
if (typeof(key) === "Number") {
return db.prepare("SELECT * FROM applications WHERE id = ?").get(key);
} else {
return db.prepare("SELECT * FROM applications WHERE client_id = ?").get(key);
}
},
}),
config: {
getStatement: "SELECT * FROM config",
getByKey: (key) => {
return db.prepare(module.exports.config.getStatement + " WHERE key = ?").get(key)?.value ?? null;
},
},
getConfig: (key) => {
return module.exports.config.getByKey(key);
},
setConfig: (key, value) => {
db.prepare("INSERT OR REPLACE INTO config (key, value) VALUES(?, ?);").run(key, value);
},
createApplication: (client_name, redirect_uri, scopes, website, client_id, client_secret) => {
db.prepare("INSERT INTO applications (client_name, redirect_uri, scopes, website, client_id, client_secret) VALUES (?, ?, ?, ?, ?, ?);").run(client_name, redirect_uri, scopes, website, client_id, client_secret);
},
createToken: (token, scope, application_id, user_id, created_at) => {
db.prepare("INSERT INTO oauth_tokens (token, scopes, application_id, user_id, created_at, revoked) VALUES (?, ?, ?, ?, ?, false)").run(token, scope, application_id, user_id, created_at);
},
getTokenData: (token) => {
return db.prepare("SELECT application_id, user_id, created_at, revoked, scopes, token FROM oauth_tokens WHERE token = ?").get(token);
},
revokeToken: (token) => {
db.prepare("UPDATE oauth_tokens SET revoked = true WHERE token = ?").run(token);
},
getAccountByUsername: (username) => {
return db.prepare("SELECT id, username, email, password_hash, account_tier, created_at FROM accounts WHERE username = ?").get(username);
},
createAccount: (username, email, password_hash) => {
db.prepare("INSERT INTO accounts (username, email, password_hash, account_tier, created_at) VALUES (?, ?, ?, 0)").run(username, email, password_hash, Date.now());
},
checkAuthCookie: (cookie_value) => {
return db.prepare("SELECT id, cookie_value, created_at, user_id, revoked FROM cookies WHERE cookie_value = ?").get(cookie_value);
},
revokeAuthCookie: (cookie_value) => {
db.prepare("UPDATE cookies SET revoked = true WHERE cookie_value = ?").run(cookie_value);
},
createAuthCookie: (cookie_value, created_at, user_id) => {
db.prepare("INSERT INTO cookies (cookie_value, created_at, user_id, revoked) VALUES (?, ?, ?, false)").run(cookie_value, created_at, user_id);
},
createCsrfToken: (url, created_at, cookie_value) => {
const db_row_id = db.prepare("INSERT INTO csrf_token (url, created_at, cookie_value) VALUES (?, ?, ?)").run(url, created_at, cookie_value).lastInsertRowid;
return db.prepare("SELECT id FROM csrf_token WHERE rowid = ?").get(db_row_id);
},
createCsrfTokenAssociation: (...ids) => {
for (const source_id in ids) {
if (Number.parseInt(ids[source_id], 10) === ids[source_id]) {
for (const destination_id in ids) {
if (Number.parseInt(ids[destination_id], 10) === ids[destination_id]) {
db.prepare("INSERT INTO csrf_token_relations (source_id, destination_id) VALUES (?, ?)").run(ids[source_id], ids[destination_id]);
}
}
}
}
},
removeAssociatedCsrfTokens: (id) => {
db.prepare("DELETE FROM csrf_token WHERE id IN (SELECT destination_id AS id FROM csrf_token_relations WHERE source_id = ?)").run(id);
},
getCsrfToken: (url) => {
return db.prepare("SELECT id, url, created_at, cookie_value FROM csrf_token WHERE url = ?").get(url);
},
createOauthCode: (code, application_id, user_id, scopes, created_at) => {
db.prepare("INSERT INTO oauth_code (code, application_id, user_id, scopes, created_at, revoked) VALUES (?, ?, ?, ?, ?, false)").run(code, application_id, user_id, scopes, created_at);
},
getOauthCode: (code) => {
return db.prepare("SELECT code, application_id, user_id, scopes, created_at, revoked FROM oauth_code WHERE code = ?").get(code);
},
revokeOauthCode: (code) => {
db.prepare("UPDATE oauth_code SET revoked = true WHERE code = ?").run(code);
},
selectApplicationByAuthToken: (token) => {
return db.prepare("SELECT id, client_id, client_secret, redirect_uri, scopes, website FROM applications WHERE id in (SELECT application_id as id FROM oauth_tokens WHERE token = ?);").get(token);
},
getVapidKey: () => {
const vapidPublic = db.prepare("SELECT value FROM config WHERE key = vapid_key_public").get();
const vapidPrivate = db.prepare("SELECT value FROM config WHERE key = vapid_key_private").get();
if (vapidPublic.value && vapidPrivate.value) {
return {public: vapidPublic, private: vapidPrivate};
}
return null;
},
setVapidKey: (publicKey, privateKey) => {
db.prepare("INSERT INTO config (key, value) VALUES (vapid_key_public, ?)").run(publicKey);
db.prepare("INSERT INTO config (key, value) VALUES (vapid_key_private, ?)").run(privateKey);
},
getJsonldSchemaCache: (url) => {
return db.prepare("SELECT schema FROM jsonld_schema_cache WHERE schema_uri = ? AND expires > ?").get(url, Math.floor(Date.now() / 1000));
},
storeJsonldSchemaCache: (url, schema, expiry) => {
db.prepare("INSERT INTO jsonld_schema_cache (schema, schema_uri, expires) VALUES (?, ?, ?)").run(schema, url, expiry);
},
getAccountByToken: (token) => {
return db.prepare("SELECT id, username, email, password_hash, account_tier, created_at FROM accounts WHERE id IN (SELECT user_id FROM oauth_tokens WHERE token = ?)").get(token);
},
getAccountActivityByAccount: (user_id) => {
return db.prepare("SELECT id, object, type, local, uri_id, owner FROM activity_objects WHERE (type = 'https://www.w3.org/ns/activitystreams#Person' OR type = 'https://www.w3.org/ns/activitystreams#Service' OR type = 'https://www.w3.org/ns/activitystreams#Group') AND local = true AND owner in (SELECT username FROM accounts WHERE id = ?)").get(user_id);
},
addActivity: (object, type, local, uri_id, owner, created_at) => {
db.prepare("INSERT INTO activity_objects (object, type, local, uri_id, owner, created_at) VALUES (?, ?, ?, ?, ?, ?)").run(object, type, local, uri_id, owner, created_at);
},
activity: new Proxy({}, {
get: (target, key) => {
return db.prepare("SELECT * FROM activity_objects WHERE uri_id = ?").get(key);
},
set: (target, key, value) => {
db.prepare("INSERT INTO activity_objects (object, type, local, uri_id, owner, created_at) VALUES (?, ?, ?, ?, ?, ?)").run(value.object, value.type, value.local.toString(), key, value.owner, value.created_at);
},
}),
getLastStatus: (owner) => {
return db.prepare("SELECT created_at FROM activity_objects WHERE type = 'https://www.w3.org/ns/activitystreams#Note' AND owner = ? ORDER BY created_at DESC").get(owner);
},
getStatusCount: (owner) => {
return db.prepare("SELECT COUNT(*) AS count FROM activity_objects WHERE type = 'https://www.w3.org/ns/activitystreams#Note' AND owner = ?").get(owner);
},
storeW3idSecurityKey: (key_uri, publicKey, privateKey, expires) => {
db.prepare("INSERT INTO w3id_security_keys (key_uri, public_key, private_key, expires) VALUES (?, ?, ?, ?)").run(key_uri, publicKey, privateKey, expires);
},
};

56
lib/input_validate.js Normal file
View file

@ -0,0 +1,56 @@
module.exports = {
validate_exists: (body, fields) => {
if (!body) {
return {error: "Validation failed, no body provided."};
}
for (const field in fields) {
if (!body[fields[field]]) {
return {error: `Validation failed, field ${field} missing from request.`};
}
}
return true;
},
validate_appropriate_scopes: (max_scope, scope_requested) => {
const max_scope_array_temp = max_scope.split(/[\s+]+/);
const max_scope_array = [];
for (const scope in max_scope_array_temp) {
if (max_scope_array_temp[scope].match(/[a-zA-Z0-9:]/)) {
max_scope_array.push(max_scope_array_temp[scope]);
if (max_scope_array_temp[scope] === "read") {
max_scope_array.push("read:accounts", "read:blocks", "read:bookmarks", "read:favorites", "read:filters", "read:follows", "read:lists", "read:mutes", "read:notifications", "read:search", "read:statuses");
}
if (max_scope_array_temp[scope] === "write") {
max_scope_array.push("write:accounts", "write:blocks", "write:bookmarks", "write:conversations", "write:favourites", "write:filters", "write:follows", "write:lists", "write:media", "write:mutes", "write:notifications", "write:reports", "write:statuses");
}
if (max_scope_array_temp[scope] === "follow") {
max_scope_array.push("read:blocks", "write:blocks", "read:follows", "write:follows", "read:mutes", "write:mutes");
}
if (max_scope_array_temp[scope] === "admin:read") {
max_scope_array.push("admin:read:accounts", "admin:read:reports", "admin:read:domain_allows", "admin:read:domain_blocks", "admin:read:ip_blocks", "admin:read:email_domain_blocks", "admin:read:canonical_email_blocks");
}
if (max_scope_array_temp[scope] === "admin:write") {
max_scope_array.push("admin:write:accounts", "admin:write:reports", "admin:write:domain_allows", "admin:write:domain_blocks", "admin:write:ip_blocks", "admin:write:email_domain_blocks", "admin:write:canonical_email_blocks");
}
}
}
const scope_requested_array = scope_requested.split(/[\s+]+/);
for (const scope in scope_requested_array) {
if (max_scope_array.includes(scope_requested_array[scope])) {
continue;
}
return false;
}
return true;
},
};

53
middleware/auth.js Normal file
View file

@ -0,0 +1,53 @@
const databaseHandler = require("../lib/database-handler");
const input_validate = require("../lib/input_validate");
module.exports = {
auth_token: (needs_user, need_scopes) => {
return (req, res, next) => {
const token = databaseHandler.getTokenData(req.header("authorization").split(/\s+/)[1]);
if (!token) {
res.status(401);
res.json({
error: "UNAUTHENTICATED",
});
res.end();
return;
}
if (token.revoked) {
res.status(401);
res.json({
error: "UNAUTHENTICATED",
});
res.end();
return;
}
if (needs_user && token.user_id === 0) {
res.status(401);
res.json({
error: "INSUFFICIENT_AUTHORIZATION",
});
res.end();
return;
}
if (need_scopes && !input_validate.validate_appropriate_scopes(token.scopes, need_scopes)) {
res.status(401);
res.json({
error: "INSUFFICIENT_SCOPE",
});
res.end();
return;
}
if (!req.brainz) {
req.brainz = {};
}
req.brainz.token = token;
next();
};
},
};

View file

@ -0,0 +1,43 @@
"use strict";
const async = require("async");
let dbm;
let type;
let seed;
/**
* We receive the dbmigrate dependency from dbmigrate initially.
* This enables us to not have to rely on NODE_PATH.
*/
exports.setup = function (options, seedLink) {
dbm = options.dbmigrate;
type = dbm.dataType;
seed = seedLink;
};
exports.up = function (db, callback) {
async.series([
db.createTable.bind(db, "applications", {
id: {type: "int", primaryKey: true, autoIncrement: true},
client_name: "string",
redirect_uri: "string",
scopes: "string",
website: "string",
client_id: {type: "string", unique: true},
client_secret: "string",
}),
db.addIndex.bind(db, "applications", "clientIdIndex", ["client_id"], true),
], callback);
};
exports.down = function (db, callback) {
async.series([
db.removeIndex.bind(db, "applications", "clientIdIndex"),
db.dropTable.bind(db, "applications"),
], callback);
};
exports._meta = {
version: 1,
};

View file

@ -0,0 +1,31 @@
"use strict";
let dbm;
let type;
let seed;
/**
* We receive the dbmigrate dependency from dbmigrate initially.
* This enables us to not have to rely on NODE_PATH.
*/
exports.setup = function (options, seedLink) {
dbm = options.dbmigrate;
type = dbm.dataType;
seed = seedLink;
};
exports.up = function (db) {
return db.createTable("config", {
id: {type: "int", primaryKey: true, autoIncrement: true},
key: {type: "string", unique: true},
value: "string",
});
};
exports.down = function (db) {
return db.dropTable("config");
};
exports._meta = {
version: 1,
};

View file

@ -0,0 +1,45 @@
"use strict";
const async = require("async");
let dbm;
let type;
let seed;
/**
* We receive the dbmigrate dependency from dbmigrate initially.
* This enables us to not have to rely on NODE_PATH.
*/
exports.setup = function (options, seedLink) {
dbm = options.dbmigrate;
type = dbm.dataType;
seed = seedLink;
};
exports.up = function (db, callback) {
async.series([
db.createTable.bind(db, "oauth_tokens", {
id: {type: "int", primaryKey: true, autoIncrements: true},
token: {type: "string", unique: true},
application_id: "int",
scopes: "string",
user_id: "int",
revoked: "boolean",
created_at: "int",
}),
db.addIndex.bind(db, "oauth_tokens", "oauth_token_index", ["token"]),
db.addIndex.bind(db, "oauth_tokens", "oauth_token_user_id_index", ["user_id"]),
], callback);
};
exports.down = function (db, callback) {
async.series([
db.removeIndex.bind(db, "oauth_tokens", "oauth_token_user_id_index"),
db.removeIndex.bind(db, "oauth_tokens", "oauth_token_index"),
db.dropTable("oauth_tokens"),
], callback);
};
exports._meta = {
version: 1,
};

View file

@ -0,0 +1,39 @@
"use strict";
let dbm;
let type;
let seed;
/**
* We receive the dbmigrate dependency from dbmigrate initially.
* This enables us to not have to rely on NODE_PATH.
*/
exports.setup = function (options, seedLink) {
dbm = options.dbmigrate;
type = dbm.dataType;
seed = seedLink;
};
exports.up = function (db, callback) {
db.createTable("accounts", {
id: {type: "int", primaryKey: true, autoIncrement: true},
username: {type: "string", unique: true},
email: "string",
password_hash: "string",
account_tier: "int",
}, (result) => {
if (result) {
callback(result);
}
db.runSql("INSERT INTO \"accounts\" (\"username\",\"email\",\"password_hash\",\"account_tier\") VALUES (\"guest\",\"null@null.null\",\"purposely_invalid_password\",0);", callback);
});
};
exports.down = function (db) {
return db.dropTable("accounts");
};
exports._meta = {
version: 1,
};

View file

@ -0,0 +1,41 @@
"use strict";
const async = require("async");
let dbm;
let type;
let seed;
/**
* We receive the dbmigrate dependency from dbmigrate initially.
* This enables us to not have to rely on NODE_PATH.
*/
exports.setup = function (options, seedLink) {
dbm = options.dbmigrate;
type = dbm.dataType;
seed = seedLink;
};
exports.up = function (db, callback) {
async.series([
db.createTable.bind(db, "cookies", {
id: {type: "int", primaryKey: true, autoIncrement: true},
cookie_value: {type: "string", unique: true},
created_at: "int",
user_id: "int",
revoked: "boolean",
}),
db.addIndex.bind(db, "cookies", "cookies_cookie_value_index", ["cookie_value"]),
], callback);
};
exports.down = function (db, callback) {
async.series([
db.removeIndex.bind(db, "cookies", "cookies_cookie_value_index"),
db.dropTable.bind(db, "cookies"),
], callback);
};
exports._meta = {
version: 1,
};

View file

@ -0,0 +1,31 @@
"use strict";
let dbm;
let type;
let seed;
/**
* We receive the dbmigrate dependency from dbmigrate initially.
* This enables us to not have to rely on NODE_PATH.
*/
exports.setup = function (options, seedLink) {
dbm = options.dbmigrate;
type = dbm.dataType;
seed = seedLink;
};
exports.up = function (db) {
return db.createTable("csrf_token", {
id: {type: "int", primaryKey: true, autoIncrement: true},
url: "string",
created_at: "int",
});
};
exports.down = function (db) {
return db.dropTable("csrf_token");
};
exports._meta = {
version: 1,
};

View file

@ -0,0 +1,31 @@
"use strict";
let dbm;
let type;
let seed;
/**
* We receive the dbmigrate dependency from dbmigrate initially.
* This enables us to not have to rely on NODE_PATH.
*/
exports.setup = function (options, seedLink) {
dbm = options.dbmigrate;
type = dbm.dataType;
seed = seedLink;
};
exports.up = function (db) {
return db.createTable("csrf_token_relations", {
id: {type: "int", primaryKey: true, autoIncrement: true},
source_id: "int",
destination_id: "int",
});
};
exports.down = function (db) {
return db.dropTable("csrf_token_relations");
};
exports._meta = {
version: 1,
};

View file

@ -0,0 +1,27 @@
"use strict";
let dbm;
let type;
let seed;
/**
* We receive the dbmigrate dependency from dbmigrate initially.
* This enables us to not have to rely on NODE_PATH.
*/
exports.setup = function (options, seedLink) {
dbm = options.dbmigrate;
type = dbm.dataType;
seed = seedLink;
};
exports.up = function (db) {
return db.addColumn("csrf_token", "cookie_value", "string");
};
exports.down = function (db) {
return db.removeColumn("csrf_token", "cookie_value");
};
exports._meta = {
version: 1,
};

View file

@ -0,0 +1,42 @@
"use strict";
const async = require("async");
let dbm;
let type;
let seed;
/**
* We receive the dbmigrate dependency from dbmigrate initially.
* This enables us to not have to rely on NODE_PATH.
*/
exports.setup = function (options, seedLink) {
dbm = options.dbmigrate;
type = dbm.dataType;
seed = seedLink;
};
exports.up = function (db, callback) {
async.series([
db.createTable.bind(db, "oauth_code", {
id: {type: "int", primaryKey: true, autoIncrements: true},
code: {type: "string", unique: true},
application_id: "int",
scopes: "string",
user_id: "int",
created_at: "int",
}),
db.addIndex.bind(db, "oauth_code", "oauth_code_index", ["code"]),
], callback);
};
exports.down = function (db, callback) {
async.series([
db.removeIndex.bind(db, "oauth_code", "oauth_code_index"),
db.dropTable.bind(db, "oauth_code"),
], callback);
};
exports._meta = {
version: 1,
};

View file

@ -0,0 +1,27 @@
"use strict";
let dbm;
let type;
let seed;
/**
* We receive the dbmigrate dependency from dbmigrate initially.
* This enables us to not have to rely on NODE_PATH.
*/
exports.setup = function (options, seedLink) {
dbm = options.dbmigrate;
type = dbm.dataType;
seed = seedLink;
};
exports.up = function (db) {
return db.addColumn("oauth_code", "revoked", "boolean");
};
exports.down = function (db) {
return db.removeColumn("oauth_code", "revoked");
};
exports._meta = {
version: 1,
};

View file

@ -0,0 +1,32 @@
"use strict";
let dbm;
let type;
let seed;
/**
* We receive the dbmigrate dependency from dbmigrate initially.
* This enables us to not have to rely on NODE_PATH.
*/
exports.setup = function (options, seedLink) {
dbm = options.dbmigrate;
type = dbm.dataType;
seed = seedLink;
};
exports.up = function (db) {
return db.createTable("jsonld_schema_cache", {
id: {type: "int", primaryKey: true, autoIncrements: true},
schema_uri: "string",
schema: "string",
expires: "int",
});
};
exports.down = function (db) {
return db.dropTable("jsonld_schema_cache");
};
exports._meta = {
version: 1,
};

View file

@ -0,0 +1,34 @@
"use strict";
let dbm;
let type;
let seed;
/**
* We receive the dbmigrate dependency from dbmigrate initially.
* This enables us to not have to rely on NODE_PATH.
*/
exports.setup = function (options, seedLink) {
dbm = options.dbmigrate;
type = dbm.dataType;
seed = seedLink;
};
exports.up = function (db) {
return db.createTable("activity_objects", {
id: {type: "int", primaryKey: true, autoIncrements: true},
object: "string",
type: "string",
local: "boolean",
uri_id: "string",
owner: "string",
});
};
exports.down = function (db) {
return db.dropTable("activity_objects");
};
exports._meta = {
version: 1,
};

View file

@ -0,0 +1,27 @@
"use strict";
let dbm;
let type;
let seed;
/**
* We receive the dbmigrate dependency from dbmigrate initially.
* This enables us to not have to rely on NODE_PATH.
*/
exports.setup = function (options, seedLink) {
dbm = options.dbmigrate;
type = dbm.dataType;
seed = seedLink;
};
exports.up = function (db) {
return db.addColumn("activity_objects", "created_at", "string");
};
exports.down = function (db) {
return db.removeColumn("activity_objects", "created_at");
};
exports._meta = {
version: 1,
};

View file

@ -0,0 +1,33 @@
"use strict";
let dbm;
let type;
let seed;
/**
* We receive the dbmigrate dependency from dbmigrate initially.
* This enables us to not have to rely on NODE_PATH.
*/
exports.setup = function (options, seedLink) {
dbm = options.dbmigrate;
type = dbm.dataType;
seed = seedLink;
};
exports.up = function (db) {
return db.createTable("w3id_security_keys", {
id: {type: "int", primaryKey: true, autoIncrements: true},
key_uri: "string",
public_key: "string",
private_key: "string",
expires: "int",
});
};
exports.down = function (db) {
return db.dropTable("w3id_security_keys");
};
exports._meta = {
version: 1,
};

View file

@ -0,0 +1,30 @@
"use strict";
let dbm;
let type;
let seed;
/**
* We receive the dbmigrate dependency from dbmigrate initially.
* This enables us to not have to rely on NODE_PATH.
*/
exports.setup = function (options, seedLink) {
dbm = options.dbmigrate;
type = dbm.dataType;
seed = seedLink;
};
exports.up = function (db) {
return db.addColumn("accounts", "created_at", {
type: "string",
default: Date.now(),
});
};
exports.down = function (db) {
return db.removeColumn("accounts", "created_at");
};
exports._meta = {
version: 1,
};

4632
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -1,24 +1,33 @@
{
"name": "haxsocial",
"name": "brainz-social",
"version": "0.0.1",
"description": "",
"description": "Brainz Social",
"main": "app.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"repository": {
"type": "git",
"url": "https://forgejo.hax.social/rallias/haxsocial.git"
},
"author": "Andrew Pietila <a.pietila@protonmail.com>",
"license": "WTFPL",
"dependencies": {
"async": "^3.2.4",
"bcrypt": "^5.1.1",
"better-sqlite3": "^8.6.0",
"body-parser": "^1.20.2",
"cookie-parser": "^1.4.6",
"db-migrate": "^0.11.14",
"db-migrate-sqlite3": "^0.5.0",
"express": "^4.18.2",
"glob": "^9.3.0",
"path-to-regexp": "^6.2.1"
"glob": "^10.3.10",
"jsonld": "^8.3.1",
"path-to-regexp": "^6.2.1",
"qs": "^6.11.2",
"sanitize-html": "^2.11.0",
"web-push": "^3.6.6"
},
"type": "commonjs",
"devDependencies": {
"eslint": "^8.36.0",
"license-checker": "^25.0.1"
"eslint": "^8.51.0",
"eslint-config-airbnb-base": "^15.0.0",
"eslint-plugin-import": "^2.28.1"
}
}

48
routes/api/v1/accounts.js Normal file
View file

@ -0,0 +1,48 @@
const crypto = require("crypto");
const bcrypt = require("bcrypt");
const databaseHandler = require("../../../lib/database-handler");
const input_validate = require("../../../lib/input_validate");
const auth = require("../../../middleware/auth");
module.exports = {
route: ((routeObj) => {
routeObj.post(auth.auth_token(false, "write:accounts"), (req, res) => {
const authToken = req.header("Authorization").split(/\s+/)[1];
const validation_result = input_validate.validate_exists(req.body, ["username", "email", "password", "agreement", "locale"]);
if (validation_result !== true) {
res.status(422);
res.json(validation_result);
return;
}
const username = req.body.username;
if (databaseHandler.getAccountByUsername(username)) {
res.status(422);
res.json({error: "Validation failed, username taken.", details: {username: {error: "ERR_TAKEN", description: "Username taken."}}});
return;
}
// TODO: We're just taking emails at face value for now.
const email = req.body.email;
const password = req.body.password;
const password_hash = bcrypt.hashSync(password, bcrypt.genSaltSync());
databaseHandler.createAccount(username, email, password_hash);
const userObject = databaseHandler.getAccountByUsername(username);
const userToken = crypto.randomBytes(32).toString("base64");
const created_at = Math.floor(Date.now() / 1000);
const application = databaseHandler.application[authToken.application_id];
databaseHandler.createToken(userToken, application.scopes, application.id, userObject.id, created_at);
res.status(200);
res.json({
access_token: userToken,
token_type: "Bearer",
scope: application.scopes,
created_at,
});
});
}),
};