Initial commit

Co-Authored-By: mikaeltellhed <2311083+mikaeltellhed@users.noreply.github.com>
This commit is contained in:
Eric Tuvesson
2023-12-04 10:20:38 +01:00
commit 663c0a2e39
43 changed files with 39568 additions and 0 deletions

View File

@@ -0,0 +1,16 @@
# Noodl Cloud Services Docker
This package contains the docker image of the Noodl Self Hosted Cloud Service.
## Health Endpoints
```
# The application is up and running.
/health/live
# The application is ready to serve requests.
/health/ready
# Accumulating all health check procedures in the application.
/health
```

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,20 @@
{
"name": "@noodl/cloudservice-docker",
"version": "1.0.0",
"description": "Low-code for when experience matter",
"author": "Noodl <info@noodl.net>",
"homepage": "https://noodl.net",
"license": "MIT",
"scripts": {
"start": "node ./src/index.js",
"test": "./node_modules/.bin/jasmine"
},
"dependencies": {
"@noodl/cloudservice": "file:../noodl-cloudservice",
"cors": "^2.8.5",
"express": "^4.17.1"
},
"devDependencies": {
"jasmine": "^4.0.2"
}
}

View File

@@ -0,0 +1,11 @@
{
"spec_dir": "spec",
"spec_files": [
"**/*[sS]pec.js"
],
"helpers": [
"helpers/**/*.js"
],
"stopSpecOnExpectationFailure": false,
"random": false
}

View File

@@ -0,0 +1,57 @@
const { createNoodlServer } = require("@noodl/cloudservice");
const express = require("express");
const cors = require("cors");
// Get environment variable that is a number, if not return undefined
function _getNumberEnv(_value) {
const val = Number(_value);
if (isNaN(val)) return undefined;
else return val;
}
const port = Number(process.env.PORT || 3000);
const databaseURI = String(process.env.DATABASE_URI);
const masterKey = String(process.env.MASTER_KEY);
const appId = String(process.env.APP_ID);
const server = express();
server.use(
cors({
// Set the browser cache time for preflight responses
maxAge: 86400,
})
);
server.use(
express.urlencoded({
extended: true,
})
);
server.use(
express.json({
limit: "2mb",
})
);
const noodlServer = createNoodlServer({
port,
databaseURI,
masterKey,
appId,
functionOptions: {
timeOut: _getNumberEnv(process.env.CLOUD_FUNCTIONS_TIMEOUT),
memoryLimit: _getNumberEnv(process.env.CLOUD_FUNCTIONS_MEMORY_LIMIT),
},
parseOptions: {
maxUploadSize: process.env.MAX_UPLOAD_SIZE || "20mb",
// set or override any of the Parse settings
},
});
server.use("/", noodlServer.middleware);
server.listen(port, () => {
console.log(`Noodl Parse Server listening at http://localhost:${port}`);
});

View File

@@ -0,0 +1,43 @@
# Noodl Cloud Service
Welcome to the Noodl Cloud Service project!
## About Noodl
Noodl is the low-code platform where designers and developers build custom applications and experiences. Designed as a visual programming environment, it aims to expedite your development process. It promotes swift and efficient creation of applications, requiring minimal coding knowledge.
## Getting started
```js
const express = require('express')
const { createNoodlServer } = require("@noodl/cloudservices");
const noodlServer = createNoodlServer({
port: 3000,
databaseURI: "insert",
masterKey: "insert",
appId: "insert",
parseOptions: {
// set or override any of the Parse settings
//
// A custom file adaptor can be set here:
// filesAdapter ...
}
});
const server = express();
server.use("/", noodlServer.middleware);
server.listen(port, () => {
console.log(`Noodl Cloud Service listening at http://localhost:${port}`);
});
```
## License
Please note that this project is released with a [Contributor Code of Conduct](../../CODE_OF_CONDUCT.md). By participating in this project you agree to abide by its terms.
This project is licensed under the MIT License - see the [LICENSE.md](../../LICENSE.md) file for details.
## Contact
If you have any questions, concerns, or feedback, please open a discussion in the [discussions tracker](https://github.com/noodlapp/noodl-cloudservice/discussions) or join our Discord channel and we'll be happy to assist you!

12111
packages/noodl-cloudservice/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,22 @@
{
"name": "@noodl/cloudservice",
"version": "1.0.0",
"description": "Low-code for when experience matter",
"author": "Noodl <info@noodl.net>",
"homepage": "https://noodl.net",
"license": "MIT",
"main": "./src/index.js",
"scripts": {
"test": "./node_modules/.bin/jasmine"
},
"dependencies": {
"isolated-vm": "^4.4.2",
"node-fetch": "2.6.7",
"parse-server": "^4.10.4",
"parse-server-gcs-adapter": "git+https://github.com/noodlapp/noodl-parse-server-gcs-adapter.git",
"winston-mongodb": "^5.1.0"
},
"devDependencies": {
"jasmine": "^4.0.2"
}
}

View File

@@ -0,0 +1,408 @@
const fetch = require("node-fetch");
const ivm = require("isolated-vm");
const fs = require("fs");
// Create a snapshot of a given runtime if needed
// of serve from the cache
const snapshots = {};
async function getRuntimeSnapshot(url) {
if (snapshots[url]) {
try {
await snapshots[url];
} catch (e) {
console.log(`Disposing runtime snapshot due to error in create: `, e);
delete snapshots[url];
}
}
if (snapshots[url]) return snapshots[url];
else
return (snapshots[url] = (async () => {
console.log("- Loading runtime script");
const res = await fetch(url);
const script = await res.text();
return ivm.Isolate.createSnapshot([
{
code: `var _noodl_handleReq, _noodl_api_response,_noodl_process_jobs;`,
}, // Must declare, otherwise we will get error when trying to set as global from function
{ code: script },
]);
})());
}
const _defaultRuntime = process.env.NOODL_DEFAULT_CLOUD_RUNTIME;
// Create an isolated context for a specific environment
async function createContext(env) {
if (env.version === undefined) {
throw Error("No version specified when creating context.");
}
const timeOut = 15;
const memoryLimit = env.memoryLimit || 128;
// Load custom code
console.log("Creating context for version " + env.version);
console.log("- Loading cloud deploy");
const res = await fetch(
env.backendEndpoint +
'/classes/Ndl_CF?where={"version":"' +
env.version +
'"}',
{
headers: {
"X-Parse-Application-Id": env.appId,
"X-Parse-Master-Key": env.masterKey,
},
}
);
const data = await res.json();
let code = "",
cloudRuntime;
if (data.results && data.results.length > 0) {
data.results.sort((a, b) => a._created_at - b._created_at);
cloudRuntime = data.results[0].runtime;
data.results.forEach((d) => {
code += d.code;
});
} else {
throw Error(
`No cloud functions found for env ${env.appId} and version ${env.version}.`
);
}
console.log("- Starting up isolate");
let runtime = cloudRuntime || _defaultRuntime;
if (!runtime.endsWith(".js")) runtime = runtime + ".js";
console.log("- Using runtime: " + runtime);
const snapshot = await getRuntimeSnapshot(
(process.env.NOODL_CLOUD_RUNTIMES_LOCATION ||
"https://runtimes.noodl.cloud") +
"/" +
runtime
);
const isolate = new ivm.Isolate({ memoryLimit, snapshot });
const context = await isolate.createContext();
const jail = context.global;
// Bootstrap message handler
jail.setSync("global", context.global.derefInto());
// ---------------- API ----------------
let ongoingAPICalls = 0;
const maxOngoingAPICalls = 100;
function _internalServerError(message) {
Object.keys(responseHandlers).forEach((k) => {
if (typeof responseHandlers[k] === "function") {
responseHandlers[k]({
statusCode: 500,
body: JSON.stringify({ error: message || "Internal server error" }),
});
delete responseHandlers[k];
}
});
}
async function _eval(script) {
if (isolate.isDisposed) return;
try {
await context.eval(script, { timeout: timeOut * 1000 });
} catch (e) {
console.log("_eval", e);
if (
e.message ===
"Isolate was disposed during execution due to memory limit"
) {
// Isolate was disposed, return out of memory error for all pending requests
_internalServerError("Out of memory");
}
}
if (isolate.isDisposed) {
// The isolate was disposed, end all currently pending requests
_internalServerError();
}
}
function _api_respond(token, res) {
ongoingAPICalls--;
if (ongoingAPICalls < 0) ongoingAPICalls = 0;
if (token !== undefined)
_eval("_noodl_api_response('" + token + "'," + JSON.stringify(res) + ")");
}
// Loggers
const logger = env.logger;
const apiFunctions = {
log: function (token, args) {
logger.log(
args.level || "info",
typeof args === "string" ? args : args.message
);
_api_respond(token);
},
fetch: function (token, args) {
fetch(args.url, args)
.then((r) => {
r.text()
.then((text) => {
_api_respond(token, {
ok: r.ok,
redirected: r.redirected,
statusText: r.statusText,
status: r.status,
headers: r.headers.raw(),
body: text,
});
})
.catch((e) => {
_api_respond(token, { error: e.message || true });
});
})
.catch((e) => {
_api_respond(token, { error: e.message || true });
});
},
setTimeout: function (token, millis) {
setTimeout(() => {
_api_respond(token);
}, millis);
},
};
jail.setSync("_noodl_api_call", function (functionName, token, args) {
ongoingAPICalls++;
if (!apiFunctions[functionName]) {
_api_respond(token, { error: "No such API function" });
return;
}
if (ongoingAPICalls >= maxOngoingAPICalls) {
// Protect against user code flooding API calls
_api_respond(token, { error: "Too many API calls" });
console.log("Warning too many concurrent ongoing api calls...");
return;
}
//console.log('API Call: ' + functionName + ' with args ', args)
try {
const _args = JSON.parse(JSON.stringify(args)); // extra safe
apiFunctions[functionName](token, _args);
} catch (e) {
console.log("Warning failed to execute api function: ", e);
_api_respond(token, { error: "Failed to execute API call" });
}
});
// event queue
let hasScheduledProcessJobs = false;
jail.setSync("_noodl_request_process_jobs", function () {
if (hasScheduledProcessJobs) return;
hasScheduledProcessJobs = true;
setImmediate(() => {
hasScheduledProcessJobs = false;
_eval("_noodl_process_jobs()");
});
});
// Some cloud services related stuff
jail.setSync(
"_noodl_cloudservices",
{
masterKey: env.masterKey,
endpoint: env.backendEndpoint,
appId: env.appId,
},
{ copy: true }
);
// Result from request
const responseHandlers = {};
jail.setSync("_noodl_response", function (token, args) {
if (typeof responseHandlers[token] === "function") {
responseHandlers[token](args);
delete responseHandlers[token];
}
});
try {
const script = await isolate.compileScript(code);
await script.run(context, {
timeout: timeOut * 1000, // 15 s to initialize
});
} catch (e) {
console.log("Failed when compiling and running cloud function code");
isolate.dispose();
throw e;
}
function _checkMemUsage() {
if (isolate.isDisposed) return; // Ignore already disposed isolate
const heap = isolate.getHeapStatisticsSync();
const memUsage = heap.total_heap_size / (1024 * 1024);
if (memUsage > memoryLimit * 0.8) {
// Mem usage has exceeded 80% of limit
// discard the context, a new context will be created for new incoming requests
// and this one will be cleaned up
const uri = env.appId + "/" + env.version;
if (!_context.markedToBeDiscarded) {
// Make sure it has not already been marked
_context.markedToBeDiscarded = true;
console.log(
`Marking context ${uri} as to be discarded due to memory limit, will be discarded in 2 mins.`
);
contextCache[uri + "/discarded/" + Date.now()] =
Promise.resolve(_context);
_context.ttl = Date.now() + 2 * 60 * 1000; // Kill in 3 minutes
delete contextCache[uri];
}
}
}
async function handleRequest(options) {
return new Promise((resolve, reject) => {
try {
let hasResponded = false;
_context.ttl = Date.now() + 10 * 60 * 1000; // Keep context alive
const token = Math.random().toString(26).slice(2);
const _req = {
function: options.functionId,
headers: options.headers,
body: options.body, // just forward raw body
};
responseHandlers[token] = (_res) => {
if (hasResponded) return;
hasResponded = true;
_checkMemUsage();
resolve(_res);
};
setTimeout(() => {
if (hasResponded) return;
hasResponded = true;
_checkMemUsage();
resolve({
statusCode: 500,
body: JSON.stringify({ error: "timeout" }),
});
}, timeOut * 1000); // Timeout if no reply from function
_eval(`_noodl_handleReq('${token}',${JSON.stringify(_req)})`)
.then(() => {
// All good
})
.catch((e) => {
if (hasResponded) return;
hasResponded = true;
_checkMemUsage();
resolve({
statusCode: 500,
body: JSON.stringify({ error: e.message }),
});
console.log("Error while running function:", e);
});
} catch (e) {
if (hasResponded) return;
hasResponded = true;
_checkMemUsage();
resolve({
statusCode: 500,
body: JSON.stringify({ error: e.message }),
});
console.log("Error while running function:", e);
}
});
}
const _context = {
context,
isolate,
responseHandlers,
version: env.version,
eval: _eval,
handleRequest,
ttl: Date.now() + 10 * 60 * 1000,
};
return _context;
}
const contextCache = {};
async function getCachedContext(env) {
const uri = env.appId + "/" + env.version;
// Check if the isolate have been disposed
if (contextCache[uri]) {
let context;
try {
context = await contextCache[uri];
} catch (e) {
console.log(`Disposing context due to error in create: `, e);
delete contextCache[uri];
}
if (context && context.isolate && context.isolate.isDisposed)
delete contextCache[uri];
}
if (contextCache[uri]) {
return contextCache[uri];
} else {
return (contextCache[uri] = createContext(env));
}
}
let hasScheduledContextCachePurge = false;
function scheduleContextCachePurge() {
if (hasScheduledContextCachePurge) return;
hasScheduledContextCachePurge = true;
setTimeout(() => {
hasScheduledContextCachePurge = false;
Object.keys(contextCache).forEach(async (k) => {
let context;
try {
context = await contextCache[k];
} catch (e) {
// This is a context that have failed to create
// delete it.
console.log(`Disposing isolate ${k} due to error in create: `, e);
delete contextCache[k];
}
if (context && context.isolate.isDisposed) {
console.log(`Disposing isolate ${k} due to "already disposed": `);
delete contextCache[k];
} else if (context && context.ttl < Date.now()) {
console.log(`Disposing isolate ${k} due to inactivity.`);
context.isolate.dispose();
delete contextCache[k];
}
});
}, 5 * 1000);
}
module.exports = {
scheduleContextCachePurge,
getCachedContext,
};

View File

@@ -0,0 +1,14 @@
Parse.Cloud.beforeLogin(async req => {
const {
object: user
} = req;
if (!user) {
return; // No user
}
const disabled = user.get('logInDisabled')
if (!req.master && disabled) {
throw Error('Access denied, log in disabled.')
}
});

View File

@@ -0,0 +1,103 @@
const fetch = require('node-fetch');
// Get the latest version of cloud functions deploy, if not provided in header
async function getLatestVersion({ appId, masterKey }) {
const res = await fetch('http://localhost:' + port + '/classes/Ndl_CF?limit=1&order=-createdAt&keys=version', {
headers: {
'X-Parse-Application-Id': appId,
'X-Parse-Master-Key': masterKey
}
})
if (res.ok) {
const json = await res.json();
if (json.results && json.results.length === 1)
return json.results[0].version;
}
}
let _latestVersionCache;
async function getLatestVersionCached(options) {
if (_latestVersionCache && (_latestVersionCache.ttl === undefined || _latestVersionCache.ttl > Date.now())) {
return _latestVersionCache;
}
try {
const latestVersion = await getLatestVersion(options);
_latestVersionCache = latestVersion;
_latestVersionCache.ttl = Date.now() + 15 * 1000; // Cache for 15s
} catch {
_latestVersionCache = undefined;
}
}
function _randomString(size) {
if (size === 0) {
throw new Error("Zero-length randomString is useless.");
}
const chars =
"ABCDEFGHIJKLMNOPQRSTUVWXYZ" + "abcdefghijklmnopqrstuvwxyz" + "0123456789";
let objectId = "";
for (let i = 0; i < size; ++i) {
objectId += chars[Math.floor((1 + Math.random()) * 0x10000) % chars.length];
}
return objectId;
}
function chunkDeploy(str, size) {
const numChunks = Math.ceil(str.length / size)
const chunks = new Array(numChunks)
for (let i = 0, o = 0; i < numChunks; ++i, o += size) {
chunks[i] = str.substr(o, size)
}
return chunks
}
async function deployFunctions({
port,
appId,
masterKey,
runtime,
data
}) {
const deploy = "const _exportedComponents = " + data
const version = _randomString(16)
// Split deploy into 100kb sizes
const chunks = chunkDeploy(deploy, 100 * 1024);
// Upload all (must be waterfall so they get the right created_at)
const serverUrl = 'http://localhost:' + port;
for (let i = 0; i < chunks.length; i++) {
await fetch(serverUrl + '/classes/Ndl_CF', {
method: 'POST',
body: JSON.stringify({
code: chunks[i],
version,
runtime,
ACL: {
"*": {
read: false,
write: false
}
}
}), // Make it only accessible to masterkey
headers: {
'X-Parse-Application-Id': appId,
'X-Parse-Master-Key': masterKey
}
})
}
return {
version
}
}
module.exports = {
deployFunctions,
getLatestVersion: getLatestVersionCached
};

View File

@@ -0,0 +1,54 @@
const CFContext = require('./cfcontext')
// The logger that is needed by the cloud functions
// it passes the logs to the parse server logger
class FunctionLogger {
constructor(noodlParseServer) {
this.noodlParseServer = noodlParseServer;
}
log(level, message) {
setImmediate(function () {
this.noodlParseServer.logger._log(level, message)
});
}
}
async function executeFunction({
port,
appId,
masterKey,
version,
logger,
headers,
functionId,
body,
timeOut = 15,
memoryLimit = 256
}) {
// Prepare the context
let cachedContext = await CFContext.getCachedContext({
backendEndpoint: 'http://localhost:' + port,
appId,
masterKey,
version,
logger,
timeOut: timeOut * 1000,
memoryLimit,
})
CFContext.scheduleContextCachePurge();
// Execute the request
const response = await cachedContext.handleRequest({
functionId,
headers,
body: JSON.stringify(body),
})
return response
}
module.exports = {
FunctionLogger,
executeFunction
};

View File

@@ -0,0 +1,132 @@
const { createNoodlParseServer } = require("./parse");
const { executeFunction } = require("./function");
const { deployFunctions, getLatestVersionCached } = require("./function-deploy");
const { Logger } = require("./logger");
function createMiddleware(noodlServer) {
return async function middleware(req, res, next) {
if (req.url.startsWith('/functions/') && req.method === 'POST') {
try {
const path = req.url;
const functionId = decodeURIComponent(path.split('/')[2]);
if (functionId === undefined)
return next()
console.log('Running cloud function ' + functionId);
let version = req.headers['x-noodl-cloud-version']
if (version === undefined) {
version = await getLatestVersionCached(noodlServer.options)
}
// Execute the request
const cfResponse = await executeFunction({
port: noodlServer.options.port,
appId: noodlServer.options.appId,
masterKey: noodlServer.options.masterKey,
version,
logger: new Logger(noodlServer),
headers: req.headers,
functionId,
body: req.body,
timeOut: noodlServer.functionOptions.timeOut,
memoryLimit: noodlServer.functionOptions.memoryLimit,
})
if (cfResponse.headers) {
res.status(cfResponse.statusCode)
.set(cfResponse.headers)
.send(cfResponse.body)
} else {
res.status(cfResponse.statusCode)
.set({ 'Content-Type': 'application/json' })
.send(cfResponse.body)
}
} catch (e) {
console.log('Something went wrong when running function', e)
res.status(400).json({
error: "Something when wrong..."
})
}
} else if (req.url.startsWith('/functions-admin')) {
if (req.headers['x-parse-master-key'] !== noodlServer.options.masterKey) {
return res.status(401).json({
message: 'Not authorized'
})
}
if (req.headers['x-parse-application-id'] !== noodlServer.options.appId) {
return res.status(401).json({
message: 'Not authorized'
})
}
// Deploy a new version
if (req.method === 'POST' && req.url === "/functions-admin/deploy") {
if (!req.body || typeof req.body.deploy !== "string" || typeof req.body.runtime !== "string") {
return res.status(400).json({
message: 'Must supply deploy and runtime'
})
}
console.log('Uploading deploy...')
const { version } = await deployFunctions({
port: noodlServer.options.port,
appId: noodlServer.options.appId,
masterKey: noodlServer.options.masterKey,
runtime: req.body.runtime,
data: req.body.deploy
})
console.log('Upload completed, version: ' + version)
res.json({
status: 'success',
version
})
} else if (req.method === 'GET' && req.url === "/functions-admin/info") {
res.json({
version: '1.0'
})
} else res.status(400).json({
message: 'Function not supported'
})
} else {
next()
}
}
}
/**
*
* @param {{
* port: number;
* databaseURI: string;
* masterKey: string;
* appId: string;
* functionOptions: { timeOut: number; memoryLimit: number; };
* parseOptions?: unknown;
* }} options
*/
function createNoodlServer(options) {
const noodlServer = createNoodlParseServer(options)
const cfMiddleware = createMiddleware(noodlServer);
// Combine the Noodl Cloud Function middleware with the Parse middleware into one middleware.
const middleware = (req, res, next) => {
cfMiddleware(req, res, () => {
noodlServer.server.app(req, res, next);
});
};
return {
noodlServer,
middleware
}
}
module.exports = {
createNoodlServer
};

View File

@@ -0,0 +1,17 @@
// The logger that is needed by the cloud functions
// it passes the logs to the parse server logger
class Logger {
constructor(noodlServer) {
this.noodlServer = noodlServer;
}
log(level, message) {
setImmediate(() => {
this.noodlServer.logger._log(level, message);
});
}
}
module.exports = {
Logger,
};

View File

@@ -0,0 +1,138 @@
const Winston = require('winston')
require('winston-mongodb');
// This stuff is needed to get the mongo-db transport working
// https://github.com/winstonjs/winston/issues/1130
function clone(obj) {
var copy = Array.isArray(obj) ? [] : {};
for (var i in obj) {
if (Array.isArray(obj[i])) {
copy[i] = obj[i].slice(0);
} else if (obj[i] instanceof Buffer) {
copy[i] = obj[i].slice(0);
} else if (typeof obj[i] != 'function') {
copy[i] = obj[i] instanceof Object ? clone(obj[i]) : obj[i];
} else if (typeof obj[i] === 'function') {
copy[i] = obj[i];
}
}
return copy;
}
require("winston/lib/winston/common").clone = clone;
let Transport = require("winston-transport");
Transport.prototype.normalizeQuery = function (options) { //
options = options || {};
// limit
options.rows = options.rows || options.limit || 10;
// starting row offset
options.start = options.start || 0;
// now
options.until = options.until || new Date;
if (typeof options.until !== 'object') {
options.until = new Date(options.until);
}
// now - 24
options.from = options.from || (options.until - (24 * 60 * 60 * 1000));
if (typeof options.from !== 'object') {
options.from = new Date(options.from);
}
// 'asc' or 'desc'
options.order = options.order || 'desc';
// which fields to select
options.fields = options.fields;
return options;
};
Transport.prototype.formatResults = function (results, options) {
return results;
};
// Create a logger that will push to mongodb
class WinstonLoggerAdapter {
constructor(options) {
const info = new Winston.transports.MongoDB({
db: options.databaseURI,
level: 'info',
collection: '_ndl_logs_info',
capped: true,
cappedSize: 2000000, // 2mb size
})
info.name = 'logs-info'
const error = new Winston.transports.MongoDB({
db: options.databaseURI,
level: 'error',
collection: '_ndl_logs_error',
capped: true,
cappedSize: 2000000, // 2mb size
})
error.name = 'logs-error'
this.logger = Winston.createLogger({
transports: [
info,
error
]
})
}
log() {
// Logs from parse are simply passed to console
console.log.apply(this, arguments);
}
// This function is used by cloud functions to actually push to log
_log() {
// Logs from parse are simply passed to console
console.log.apply(this, arguments);
return this.logger.log.apply(this.logger, arguments);
}
// custom query as winston is currently limited
query(options, callback = () => {}) {
if (!options) {
options = {};
}
// defaults to 7 days prior
const from = options.from || new Date(Date.now() - 7 * MILLISECONDS_IN_A_DAY);
const until = options.until || new Date();
const limit = options.size || 10;
const order = options.order || 'desc';
const level = options.level || 'info';
const queryOptions = {
from,
until,
limit,
order,
};
return new Promise((resolve, reject) => {
this.logger.query(queryOptions, (err, res) => {
if (err) {
callback(err);
return reject(err);
}
const _res = level === 'error' ? res['logs-error'] : res['logs-info'];
_res.forEach(r => delete r.meta)
callback(_res);
resolve(_res);
});
});
}
}
module.exports = {
LoggerAdapter: WinstonLoggerAdapter
}

View File

@@ -0,0 +1,115 @@
const path = require('path');
const ParseServer = require('parse-server').default;
const {
LoggerAdapter
} = require('./mongodb');
/**
*
* @param {{
* port: number;
* databaseURI: string;
* masterKey: string;
* appId: string;
* parseOptions?: unknown;
* }} param0
* @returns {{
* server: ParseServer;
* logger: LoggerAdapter;
* }}
*/
function createNoodlParseServer({
port = 3000,
databaseURI,
masterKey,
appId,
functionOptions,
parseOptions = {},
}) {
const serverURL = `http://localhost:${port}/`;
const logger = new LoggerAdapter({
databaseURI
})
// Create files adapter
let filesAdapter;
if (process.env.S3_BUCKET) {
console.log('Using AWS S3 file storage with bucket ' + process.env.S3_BUCKET)
if (!process.env.S3_SECRET_KEY || !process.env.S3_BUCKET) {
throw Error("You must provide S3_SECRET_KEY and S3_ACCESS_KEY environment variables in addition to S3_BUCKET for S3 file storage.")
}
const S3Adapter = require('parse-server').S3Adapter;
filesAdapter = new S3Adapter(
process.env.S3_ACCESS_KEY,
process.env.S3_SECRET_KEY,
process.env.S3_BUCKET, {
region: process.env.S3_REGION,
bucketPrefix: process.env.S3_BUCKET_PREFIX,
directAccess: process.env.S3_DIRECT_ACCESS === 'true'
}
)
} else if (process.env.GCS_BUCKET) {
const GCSAdapter = require('parse-server-gcs-adapter');
if (!process.env.GCP_PROJECT_ID || !process.env.GCP_CLIENT_EMAIL || !process.env.GCP_PRIVATE_KEY) {
throw Error("You must provide GCP_PROJECT_ID, GCP_CLIENT_EMAIL, GCP_PRIVATE_KEY environment variables in addition to GCS_BUCKET for GCS file storage.")
}
console.log('Using GCS file storage with bucket ' + process.env.GCS_BUCKET)
filesAdapter = new GCSAdapter(
process.env.GCP_PROJECT_ID, { // Credentials
client_email: process.env.GCP_CLIENT_EMAIL,
private_key: process.env.GCP_PRIVATE_KEY.replace(/\\n/gm, '\n')
},
process.env.GCS_BUCKET, {
directAccess: process.env.GCS_DIRECT_ACCESS === 'true',
bucketPrefix: process.env.GCS_BUCKET_PREFIX
}
);
}
const server = new ParseServer({
databaseURI,
cloud: path.resolve(__dirname, './cloud.js'),
push: false,
appId,
masterKey,
serverURL,
appName: "Noodl App",
// allowCustomObjectId is needed for Noodl's cached model writes
allowCustomObjectId: true,
loggerAdapter: logger,
// We do this just to get the right behaviour for emailVerified (no emails are sent)
publicServerURL: process.env.PUBLIC_SERVER_URL || 'https://you-need-to-set-public-server-env-to-support-files',
verifyUserEmails: true,
emailAdapter: { // null email adapter
sendMail: () => {},
sendVerificationEmail: () => {},
sendPasswordResetEmail: () => {}
},
filesAdapter,
...parseOptions,
});
return {
functionOptions: {
timeOut: functionOptions.timeOut || 15,
memoryLimit: functionOptions.memoryLimit || 256
},
options: {
port,
appId,
masterKey,
},
server,
logger,
};
}
module.exports = {
createNoodlParseServer
}