From ee6578b3d3f8a34bf1ff0be6354bc000b332669e Mon Sep 17 00:00:00 2001 From: Andrea Child Date: Tue, 15 Oct 2024 17:31:37 -0700 Subject: [PATCH] Increased logging to console and introduced file logging to facilitate troubleshooting. File logging: -uses pino and pino-pretty logging libraries -log level is controlled via the --log-verbose option which will output more information to the file (default log level is INFO) -queries and node/edge data are logged at DEBUG level and not logged by default Console output: -added more messages and spinners to indicate what the utility is doing and its progress -yellow text is used to highlight some information for the user Other: -fixed sample TODO schema which had extra data that causes it to fail when used as an import schema -deleted unused test case input graphql schema files --- package.json | 4 +- samples/todo.schema.graphql | 4 - src/CDKPipelineApp.js | 31 +- src/NeptuneSchema.js | 89 ++-- src/graphdb.js | 5 +- src/help.js | 1 + src/lambdaZip.js | 3 +- src/logger.js | 85 ++++ src/main.js | 113 ++--- src/pipelineResources.js | 443 +++++++++++------- src/resolverJS.js | 3 +- src/schemaModelValidator.js | 30 +- templates/CDKTemplate.js | 2 +- .../input/airports.source.schema.graphql | 153 ------ .../input/airports.source.schema.graphql | 153 ------ 15 files changed, 507 insertions(+), 612 deletions(-) create mode 100644 src/logger.js delete mode 100644 test/TestCases/Case05/input/airports.source.schema.graphql delete mode 100644 test/TestCases/Case06/input/airports.source.schema.graphql diff --git a/package.json b/package.json index 156a43d..d2db1ee 100644 --- a/package.json +++ b/package.json @@ -64,7 +64,9 @@ "graphql": "^16.8.1", "graphql-tag": "2.12.6", "ora": "7.0.1", - "semver": "7.5.4" + "semver": "7.5.4", + "pino": "9.4.0", + "pino-pretty": "11.2.2" }, "devDependencies": { "@jest/test-sequencer": "^29.7.0", diff --git a/samples/todo.schema.graphql b/samples/todo.schema.graphql index a307bfd..66e0e40 100644 --- a/samples/todo.schema.graphql +++ b/samples/todo.schema.graphql @@ -10,8 +10,4 @@ type Todo { type Comment { content: String -} - -input Options { - limit: Int } \ No newline at end of file diff --git a/src/CDKPipelineApp.js b/src/CDKPipelineApp.js index d916e0b..4dc8342 100644 --- a/src/CDKPipelineApp.js +++ b/src/CDKPipelineApp.js @@ -16,6 +16,7 @@ import { readFile, writeFile } from 'fs/promises'; import fs from 'fs'; import archiver from 'archiver'; import ora from 'ora'; +import { loggerDebug, loggerError, loggerInfo, yellow } from "./logger.js"; let NAME = ''; let REGION = ''; @@ -34,11 +35,6 @@ let APPSYNC_ATTACH_MUTATION = []; let SCHEMA_MODEL = null; let thisOutputFolderPath = './output'; - -function yellow(text) { - return '\x1b[33m' + text + '\x1b[0m'; -} - async function getSchemaFields(typeName) { /* To be updated as: SCHEMA_MODEL.definitions @@ -69,7 +65,7 @@ async function createDeploymentFile(folderPath, zipFilePath) { archive.file('./output/output.resolver.graphql.js', { name: 'output.resolver.graphql.js' }) await archive.finalize(); } catch (err) { - console.error('Creating deployment zip file: ' + err); + loggerError('Error creating deployment zip file', err); } } @@ -107,32 +103,32 @@ async function createAWSpipelineCDK({ if (neptuneType === 'neptune-db') { try { - if (!quiet) console.log('Get Neptune Cluster Info'); + loggerInfo('Getting Neptune Cluster Info'); if (!quiet) spinner = ora('Getting ...').start(); neptuneClusterInfo = await getNeptuneClusterDbInfoBy(NEPTUNE_DB_NAME, REGION); if (!quiet) spinner.succeed('Got Neptune Cluster Info'); if (isNeptuneIAMAuth) { if (!neptuneClusterInfo.isIAMauth) { - console.error("The Neptune database authentication is set to VPC."); - console.error("Remove the --output-aws-pipeline-cdk-neptune-IAM option."); + loggerError("The Neptune database authentication is set to VPC."); + loggerError("Remove the --output-aws-pipeline-cdk-neptune-IAM option."); process.exit(1); } } else { if (neptuneClusterInfo.isIAMauth) { - console.error("The Neptune database authentication is set to IAM."); - console.error("Add the --output-aws-pipeline-cdk-neptune-IAM option."); + loggerError("The Neptune database authentication is set to IAM."); + loggerError("Add the --output-aws-pipeline-cdk-neptune-IAM option."); process.exit(1); } else { - if (!quiet) console.log(`Subnet Group: ` + yellow(neptuneClusterInfo.dbSubnetGroup)); + loggerDebug(`Subnet Group: ` + neptuneClusterInfo.dbSubnetGroup, {toConsole: true}); } } if (neptuneClusterInfo.version != '') { const v = neptuneClusterInfo.version; if (lambdaFilesPath.includes('SDK') == true && //semver.satisfies(v, '>=1.2.1.0') ) { - (v == '1.2.1.0' || v == '1.2.0.2' || v == '1.2.0.1' || v == '1.2.0.0' || v == '1.1.1.0' || v == '1.1.0.0')) { - console.error("Neptune SDK query is supported starting with Neptune versions 1.2.1.0.R5"); - console.error("Switch to Neptune HTTPS query with option --output-resolver-query-https"); + (v == '1.2.1.0' || v == '1.2.0.2' || v == '1.2.0.1' || v == '1.2.0.0' || v == '1.1.1.0' || v == '1.1.0.0')) { + loggerError("Neptune SDK query is supported starting with Neptune versions 1.2.1.0.R5"); + loggerError("Switch to Neptune HTTPS query with option --output-resolver-query-https"); process.exit(1); } } @@ -143,13 +139,14 @@ async function createAWSpipelineCDK({ NEPTUNE_IAM_POLICY_RESOURCE = neptuneClusterInfo.iamPolicyResource; } catch (error) { + loggerError('Error getting Neptune Cluster Info', error); if (!quiet) spinner.fail("Error getting Neptune Cluster Info."); if (!isNeptuneIAMAuth) { spinner.clear(); - console.error("VPC data is not available to proceed."); + loggerError("VPC data is not available to proceed."); process.exit(1); } else { - if (!quiet) console.log("Proceeding without getting Neptune Cluster info."); + loggerInfo("Proceeding without getting Neptune Cluster info.", {toConsole: true}); } } } diff --git a/src/NeptuneSchema.js b/src/NeptuneSchema.js index d50d145..25aa474 100644 --- a/src/NeptuneSchema.js +++ b/src/NeptuneSchema.js @@ -14,6 +14,7 @@ import axios from "axios"; import { aws4Interceptor } from "aws4-axios"; import { fromNodeProviderChain } from "@aws-sdk/credential-providers"; import { NeptunedataClient, ExecuteOpenCypherQueryCommand } from "@aws-sdk/client-neptunedata"; +import { loggerDebug, loggerError, loggerInfo, yellow } from "./logger.js"; import { parseNeptuneDomainFromHost, parseNeptuneGraphName } from "./util.js"; import { ExecuteQueryCommand, GetGraphSummaryCommand, NeptuneGraphClient } from "@aws-sdk/client-neptune-graph"; @@ -26,7 +27,6 @@ let HOST = ''; let PORT = 8182; let REGION = '' let SAMPLE = 5000; -let VERBOSE = false; let NEPTUNE_TYPE = NEPTUNE_DB; let NAME = ''; let useSDK = false; @@ -54,18 +54,6 @@ const schema = { edgeStructures:[] }; - -function yellow(text) { - return '\x1b[33m' + text + '\x1b[0m'; -} - - -function consoleOut(text) { - if (VERBOSE) { - console.log(text); - } -} - function sanitize(text) { // TODO implement sanitization logic // placeholder for sanitization of query text that cannot be parameterized @@ -93,10 +81,10 @@ async function queryNeptune(query, params = {}) { }); return response.data; } catch (error) { - console.error("Http query request failed: ", error.message); - consoleOut("Trying with the AWS SDK"); + loggerError('Http query request failed', error); + loggerInfo('Trying with the AWS SDK', {toConsole: true}); const response = await queryNeptuneSdk(query, params); - console.log('Querying via AWS SDK was successful, will use SDK for future queries'); + loggerInfo('Querying via AWS SDK was successful, will use SDK for future queries') useSDK = true; return response; } @@ -129,7 +117,7 @@ async function queryNeptuneDbSDK(query, params = {}) { return response; } catch (error) { - console.error(NEPTUNE_DB + ' SDK query request failed: ', error.message); + loggerError(NEPTUNE_DB + ' SDK query request failed', error); process.exit(1); } } @@ -149,7 +137,7 @@ async function queryNeptuneGraphSDK(query, params = {}) { const response = await client.send(command); return await new Response(response.payload).json(); } catch (error) { - console.error(NEPTUNE_GRAPH + ' SDK query request failed:' + JSON.stringify(error)); + loggerError(NEPTUNE_GRAPH + ' SDK query request failed', error); process.exit(1); } } @@ -158,16 +146,16 @@ async function queryNeptuneGraphSDK(query, params = {}) { async function getNodesNames() { let query = `MATCH (a) RETURN labels(a), count(a)`; let response = await queryNeptune(query); + loggerInfo('Getting nodes names'); try { response.results.forEach(result => { schema.nodeStructures.push({ label: result['labels(a)'][0], properties: []}); - consoleOut(' Found Node: ' + yellow(result['labels(a)'][0])); + loggerDebug('Found Node: ' + yellow(result['labels(a)'][0]), {toConsole: true}); }); } catch (e) { - consoleOut(" No nodes found"); - return; + loggerError('No nodes found', e); } } @@ -175,16 +163,16 @@ async function getNodesNames() { async function getEdgesNames() { let query = `MATCH ()-[e]->() RETURN type(e), count(e)`; let response = await queryNeptune(query); + loggerInfo('Getting edges names'); try { response.results.forEach(result => { schema.edgeStructures.push({ label: result['type(e)'], directions: [], properties:[]}); - consoleOut(' Found Edge: ' + yellow(result['type(e)'])); + loggerDebug('Found Edge: ' + yellow(result['type(e)']), {toConsole: true}); }); } catch (e) { - consoleOut(" No edges found"); - return; + loggerError('No edges found', e); } } @@ -197,7 +185,7 @@ async function findFromAndToLabels(edgeStructure) { for (let fromLabel of result.fromLabel) { for (let toLabel of result.toLabel) { edgeStructure.directions.push({from:fromLabel, to:toLabel}); - consoleOut(' Found edge: ' + yellow(edgeStructure.label) + ' direction: ' + yellow(fromLabel) + ' -> ' + yellow(toLabel)); + loggerDebug('Found edge: ' + yellow(edgeStructure.label) + ' direction: ' + yellow(fromLabel) + ' -> ' + yellow(toLabel), {toConsole: true}); } } } @@ -231,7 +219,7 @@ function addUpdateNodeProperty(nodeName, name, value) { if (property === undefined) { let propertyType = CastGraphQLType(value); node.properties.push({name: name, type: propertyType}); - consoleOut(` Added property to node: ${yellow(nodeName)} property: ${yellow(name)} type: ${yellow(propertyType)}`); + loggerDebug(`Added property to node: ${yellow(nodeName)} property: ${yellow(name)} type: ${yellow(propertyType)}`, {toConsole: true}); } } @@ -242,7 +230,7 @@ function addUpdateEdgeProperty(edgeName, name, value) { if (property === undefined) { let propertyType = CastGraphQLType(value); edge.properties.push({name: name, type: propertyType}); - consoleOut(' Added property to edge: ' + yellow(edgeName) + ' property: ' + yellow(name) + ' type: ' + yellow(propertyType)); + loggerDebug('Added property to edge: ' + yellow(edgeName) + ' property: ' + yellow(name) + ' type: ' + yellow(propertyType), {toConsole: true}); } } @@ -250,6 +238,7 @@ function addUpdateEdgeProperty(edgeName, name, value) { async function getEdgeProperties(edge) { let query = `MATCH ()-[n:${sanitize(edge.label)}]->() RETURN properties(n) as properties LIMIT $sample`; let parameters = {sample: SAMPLE}; + loggerDebug(`Getting properties for edge: ${query}`); try { let response = await queryNeptune(query, parameters); let result = response.results; @@ -260,12 +249,13 @@ async function getEdgeProperties(edge) { }); } catch (e) { - consoleOut(" No properties found for edge: " + edge.label); + loggerError('No properties found for edge: ' + edge.label, e); } } async function getEdgesProperties() { + loggerInfo('Retrieving edge properties'); await Promise.all(schema.edgeStructures.map(async (edge) => { await getEdgeProperties(edge); })); @@ -275,6 +265,7 @@ async function getEdgesProperties() { async function getNodeProperties(node) { let query = `MATCH (n:${sanitize(node.label)}) RETURN properties(n) as properties LIMIT $sample`; let parameters = {sample: SAMPLE}; + loggerDebug(`Getting properties for node: ${query}`); try { let response = await queryNeptune(query, parameters); let result = response.results; @@ -285,12 +276,13 @@ async function getNodeProperties(node) { }); } catch (e) { - consoleOut(" No properties found for node: " + node.label); + loggerError('No properties found for node: ' + node.label, e); } } -async function getNodesProperties() { +async function getNodesProperties() { + loggerInfo('Retrieving node properties'); await Promise.all(schema.nodeStructures.map(async (node) => { await getNodeProperties(node); })); @@ -299,9 +291,11 @@ async function getNodesProperties() { async function checkEdgeDirectionCardinality(d) { let queryFrom = `MATCH (from:${sanitize(d.from)})-[r:${sanitize(d.edge.label)}]->(to:${sanitize(d.to)}) WITH to, count(from) as rels WHERE rels > 1 RETURN rels LIMIT 1`; + loggerDebug(`Checking edge direction cardinality: ${queryFrom}`); let responseFrom = await queryNeptune(queryFrom); let resultFrom = responseFrom.results[0]; let queryTo = `MATCH (from:${sanitize(d.from)})-[r:${sanitize(d.edge.label)}]->(to:${sanitize(d.to)}) WITH from, count(to) as rels WHERE rels > 1 RETURN rels LIMIT 1`; + loggerDebug(`Checking edge direction cardinality: ${queryTo}`) let responseTo = await queryNeptune(queryTo); let resultTo = responseTo.results[0]; let c = ''; @@ -319,7 +313,7 @@ async function checkEdgeDirectionCardinality(d) { } Object.assign(d.direction, {relationship: c}); - consoleOut(' Found edge: ' + yellow(d.edge.label) + ' direction: ' + yellow(d.from) + ' -> ' + yellow(d.to) + ' relationship: ' + yellow(c)); + loggerDebug('Found edge: ' + yellow(d.edge.label) + ' direction: ' + yellow(d.from) + ' -> ' + yellow(d.to) + ' relationship: ' + yellow(c), {toConsole: true}); } @@ -337,18 +331,17 @@ async function getEdgesDirectionsCardinality() { } -function setGetNeptuneSchemaParameters(host, port, region, verbose = false, neptuneType) { +function setGetNeptuneSchemaParameters(host, port, region, neptuneType) { HOST = host; PORT = port; REGION = region; - VERBOSE = verbose; NEPTUNE_TYPE = neptuneType; NAME = parseNeptuneGraphName(host); } function getNeptunedataClient() { if (!neptunedataClient) { - console.log('Instantiating NeptunedataClient') + loggerInfo('Instantiating NeptunedataClient') neptunedataClient = new NeptunedataClient({ endpoint: `https://${HOST}:${PORT}` }); @@ -358,7 +351,7 @@ function getNeptunedataClient() { function getNeptuneGraphClient() { if (!neptuneGraphClient) { - console.log('Instantiating NeptuneGraphClient') + loggerInfo('Instantiating NeptuneGraphClient') neptuneGraphClient = new NeptuneGraphClient({ port: PORT, host: parseNeptuneDomainFromHost(HOST), @@ -373,14 +366,14 @@ function getNeptuneGraphClient() { * Get a summary of a neptune analytics graph */ async function getNeptuneGraphSummary() { - console.log('Retrieving ' + NEPTUNE_GRAPH + ' summary') + loggerInfo('Retrieving ' + NEPTUNE_GRAPH + ' summary') const client = getNeptuneGraphClient(); const command = new GetGraphSummaryCommand({ graphIdentifier: NAME, mode: 'detailed' }); const response = await client.send(command); - console.log('Retrieved ' + NEPTUNE_GRAPH + ' summary') + loggerInfo('Retrieved ' + NEPTUNE_GRAPH + ' summary') return response.graphSummary; } @@ -388,13 +381,13 @@ async function getNeptuneGraphSummary() { * Get a summary of a neptune db graph */ async function getNeptuneDbSummary() { - console.log('Retrieving ' + NEPTUNE_DB + ' summary') + loggerInfo('Retrieving ' + NEPTUNE_DB + ' summary') let response = await axios.get(`https://${HOST}:${PORT}/propertygraph/statistics/summary`, { params: { mode: 'detailed' } }); - console.log('Retrieved ' + NEPTUNE_DB + ' summary') + loggerInfo('Retrieved ' + NEPTUNE_DB + ' summary') return response.data.payload.graphSummary; } @@ -411,39 +404,37 @@ async function loadSchemaViaSummary() { } graphSummary.nodeLabels.forEach(label => { schema.nodeStructures.push({label:label, properties:[]}); - consoleOut(' Found node: ' + yellow(label)); + loggerDebug('Found node: ' + yellow(label), {toConsole: true}); }); graphSummary.edgeLabels.forEach(label => { schema.edgeStructures.push({label:label, properties:[], directions:[]}); - consoleOut(' Found edge: ' + yellow(label)); + loggerDebug('Found edge: ' + yellow(label), {toConsole: true}); }); return true; } catch (error) { - console.error(`Getting the schema via Neptune Summary API failed: ${JSON.stringify(error)}`); + loggerError('Getting the schema via Neptune Summary API failed', error); return false; } } -async function getNeptuneSchema(quiet) { - - VERBOSE = !quiet; +async function getNeptuneSchema() { try { await getAWSCredentials(); } catch (error) { - consoleOut("There are no AWS credetials configured. \nGetting the schema from an Amazon Neptune database with IAM authentication works only with AWS credentials."); + loggerError('There are no AWS credentials configured. Getting the schema from an Amazon Neptune database with IAM authentication works only with AWS credentials.', error); } if (await loadSchemaViaSummary()) { - consoleOut("Got nodes and edges via Neptune Summary API."); + loggerInfo("Got nodes and edges via Neptune Summary API.", {toConsole: true}); } else { - consoleOut("Getting nodes via queries."); + loggerInfo("Getting nodes via queries.", {toConsole: true}); await getNodesNames(); - consoleOut("Getting edges via queries."); + loggerInfo("Getting edges via queries.", {toConsole: true}); await getEdgesNames(); } diff --git a/src/graphdb.js b/src/graphdb.js index c0ea668..19838fe 100644 --- a/src/graphdb.js +++ b/src/graphdb.js @@ -9,6 +9,7 @@ on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ +import { loggerInfo } from './logger.js'; let changeCase = true; @@ -33,7 +34,7 @@ function checkForDuplicateNames(schema) { }); if (!changeCase) - console.log('Pascal case is not applicable, duplicate names types.'); + loggerInfo('Pascal case is not applicable, duplicate names types.', {toConsole: true}); } @@ -71,7 +72,7 @@ function graphDBInferenceSchema (graphbSchema, addMutations) { r += '\t_id: ID! @id\n'; node.properties.forEach(property => { - if (property.name == 'id') + if (property.name == 'id') r+= `\tid: ID\n`; else r+= `\t${property.name}: ${property.type}\n`; diff --git a/src/help.js b/src/help.js index 3c564a7..0885706 100644 --- a/src/help.js +++ b/src/help.js @@ -112,6 +112,7 @@ Parameters [--help -h ] [--version -v ] [--quiet -q ] +[--log-verbose -lv ] [--input-schema -is ] [--input-schema-file -isf ] diff --git a/src/lambdaZip.js b/src/lambdaZip.js index 5084854..9044a58 100644 --- a/src/lambdaZip.js +++ b/src/lambdaZip.js @@ -1,5 +1,6 @@ import fs from 'fs'; import archiver from 'archiver'; +import { loggerError } from "./logger.js"; async function createLambdaDeploymentPackage(templatePath, zipFilePath) { try { @@ -10,7 +11,7 @@ async function createLambdaDeploymentPackage(templatePath, zipFilePath) { archive.file('./output/output.resolver.graphql.js', { name: 'output.resolver.graphql.js' }) await archive.finalize(); } catch (error) { - console.error('Lambda deployment package creation failed. '+ error); + loggerError('Lambda deployment package creation failed', error); } } diff --git a/src/logger.js b/src/logger.js new file mode 100644 index 0000000..a02b819 --- /dev/null +++ b/src/logger.js @@ -0,0 +1,85 @@ +import { pino } from "pino"; +import path from "path"; + +let fileLogger; +let logFileDestination; + +/** + * Initialize the standard out and file loggers. + * @param directory the directory in which to create the log file + * @param quiet true if the standard output should be minimalized to errors only + * @param logLevel the file log level + */ +function loggerInit(directory, quiet = false, logLevel = 'info') { + logFileDestination = path.join(directory, 'log_' + (new Date()).toISOString() + '.txt'); + fileLogger = pino(pino.transport({ + targets: [ + { + target: 'pino-pretty', + options: { + destination: logFileDestination, + colorize: false, + translateTime: 'yyyy-mm-dd HH:MM:ss', + ignore: 'pid,hostname' + }, + } + ] + })); + fileLogger.level = logLevel; + if (quiet) { + console.log = function(){}; + console.info = function(){}; + console.debug = function(){}; + } +} + +function log(level, text, options = {toConsole: false}) { + let detail = options.detail; + if (detail) { + if (options.toConsole) { + console.log(text + ': ' + yellow(detail)); + } + fileLogger[level](removeYellow(text) + ': ' + removeYellow(detail)); + } else { + if (options.toConsole) { + console.log(text); + } + // remove any yellow which may have been added by the caller + fileLogger[level](removeYellow(text)); + } +} + +function loggerInfo(text, options = {toConsole: false}) { + log('info', text, options); +} + +function loggerDebug(text, options = {toConsole: false}) { + log('debug', text, options); +} + +/** + * Log an error to console and file. A simplified error message will be output to console while a more detailed error will be logged to file. + * @param errorMessage the error message to log to console and file + * @param error optional error object that caused the error + */ +function loggerError(errorMessage, error) { + let toConsole = errorMessage; + let toLog = removeYellow(errorMessage); + if (error) { + toConsole = toConsole + ': ' + error.message + ' - Please see ' + logFileDestination + ' for more details'; + toLog = toLog + '\n' + JSON.stringify(error, null, 4); + } + console.error(toConsole); + fileLogger.error(toLog); +} + +function yellow(text) { + return '\x1b[33m' + text + '\x1b[0m'; +} + +function removeYellow(text) { + let withoutYellow = text.replaceAll(/\x1b\[33m/g, ''); + return withoutYellow.replaceAll(/\x1b\[0m/g, ''); +} + +export { loggerInit, loggerInfo, loggerError, loggerDebug, yellow }; \ No newline at end of file diff --git a/src/main.js b/src/main.js index 01c6af2..20cc3ed 100644 --- a/src/main.js +++ b/src/main.js @@ -11,7 +11,7 @@ permissions and limitations under the License. */ import { readFileSync, writeFileSync, mkdirSync} from 'fs'; -import { helpTxt } from './help.js'; +import { helpTxt } from './help.js'; import { graphDBInferenceSchema } from './graphdb.js'; import { changeGraphQLSchema } from './changes.js'; import { schemaParser, schemaStringify } from './schemaParser.js'; @@ -21,12 +21,10 @@ import { getNeptuneSchema, setGetNeptuneSchemaParameters } from './NeptuneSchema import { createUpdateAWSpipeline, removeAWSpipelineResources } from './pipelineResources.js' import { createAWSpipelineCDK } from './CDKPipelineApp.js' import { createLambdaDeploymentPackage } from './lambdaZip.js' +import { loggerDebug, loggerError, loggerInfo, loggerInit, yellow } from './logger.js'; import ora from 'ora'; let spinner = null; -function yellow(text) { - return '\x1b[33m' + text + '\x1b[0m'; -} // find global installation dir import path from 'path'; @@ -46,6 +44,7 @@ const NEPTUNE_DB = 'neptune-db'; // Input let quiet = false; +let logLevel = 'info'; let inputGraphQLSchema = ''; let inputGraphQLSchemaFile = ''; let inputGraphQLSchemaChanges = ''; @@ -112,6 +111,10 @@ function processArgs() { case '--quiet': quiet = true; break; + case '-lv': + case '--log-verbose': + logLevel = 'debug'; + break; case '-is': case '--input-schema': inputGraphQLSchema = array[index + 1]; @@ -266,13 +269,18 @@ async function main() { processArgs(); + // Init the logger + loggerInit(outputFolderPath, quiet, logLevel); + loggerInfo('Starting neptune-for-graphql version: ' + version); + loggerDebug('Input arguments: ' + process.argv); + // Get graphDB schema from file if (inputGraphDBSchemaFile != '' && inputGraphQLSchema == '' && inputGraphQLSchemaFile == '') { try { inputGraphDBSchema = readFileSync(inputGraphDBSchemaFile, 'utf8'); - if (!quiet) console.log('Loaded graphDB schema from file: ' + inputGraphDBSchemaFile); + loggerInfo('Loaded graphDB schema from file: ' + yellow(inputGraphDBSchemaFile), {toConsole: true}); } catch (err) { - console.error('Error reading graphDB schema file: ' + inputGraphDBSchemaFile); + loggerError('Error reading graphDB schema file: ' + yellow(inputGraphDBSchemaFile), err); process.exit(1); } } @@ -283,20 +291,20 @@ async function main() { if (isNeptuneAnalyticsGraph) { neptuneType = NEPTUNE_GRAPH; // neptune analytics requires IAM - console.log("Detected neptune-graph from input endpoint - setting IAM auth to true as it is required for neptune analytics") + loggerInfo("Detected neptune-graph from input endpoint - setting IAM auth to true as it is required for neptune analytics") isNeptuneIAMAuth = true; } // Get Neptune schema from endpoint if (inputGraphDBSchemaNeptuneEndpoint != '' && inputGraphDBSchema == '' && inputGraphDBSchemaFile == '') { let endpointParts = inputGraphDBSchemaNeptuneEndpoint.split(':'); - if (endpointParts.length !== 2) { - console.error('Neptune endpoint must be in the form of host:port'); + if (endpointParts.length != 2) { + loggerError('Neptune endpoint must be in the form of host:port'); process.exit(1); } let neptuneHost = endpointParts[0]; let neptunePort = endpointParts[1]; - + let neptuneRegionParts = inputGraphDBSchemaNeptuneEndpoint.split('.'); let neptuneRegion = ''; if (neptuneType === NEPTUNE_DB) @@ -304,30 +312,30 @@ async function main() { else neptuneRegion = neptuneRegionParts[1]; - if (!quiet) console.log('Getting Neptune schema from endpoint: ' + yellow(neptuneHost + ':' + neptunePort)); - setGetNeptuneSchemaParameters(neptuneHost, neptunePort, neptuneRegion, true, neptuneType); + loggerInfo('Retrieving Neptune schema'); + loggerDebug('Getting Neptune schema from endpoint: ' + yellow(neptuneHost + ':' + neptunePort), {toConsole: true}); + + setGetNeptuneSchemaParameters(neptuneHost, neptunePort, neptuneRegion, neptuneType); let startTime = performance.now(); - inputGraphDBSchema = await getNeptuneSchema(quiet); + inputGraphDBSchema = await getNeptuneSchema(); let endTime = performance.now(); let executionTime = endTime - startTime; - if (!quiet) console.log(`Execution time: ${(executionTime/1000).toFixed(2)} seconds`); - if (!quiet) console.log(''); + loggerInfo('Fetch neptune schema execution time: ' + (executionTime/1000).toFixed(2) + ' seconds', {toConsole: true}); } // Option 2: inference GraphQL schema from graphDB schema if (inputGraphDBSchema != '' && inputGraphQLSchema == '' && inputGraphQLSchemaFile == '') { - if (!quiet) console.log('Inferencing GraphQL schema from graphDB schema'); + loggerInfo('Inferencing GraphQL schema from graphDB schema', {toConsole: true}); inputGraphQLSchema = graphDBInferenceSchema(inputGraphDBSchema, outputSchemaMutations); - if (!quiet) console.log(''); } // Option 1: load if (inputGraphQLSchema == '' && inputGraphQLSchemaFile != '') { try { inputGraphQLSchema = readFileSync(inputGraphQLSchemaFile, 'utf8'); - if (!quiet) console.log('Loaded GraphQL schema from file: ' + inputGraphQLSchemaFile); + loggerInfo('Loaded GraphQL schema from file: ' + yellow(inputGraphQLSchemaFile), {toConsole: true}); } catch (err) { - console.error('Error reading GraphQL schema file: ' + inputGraphQLSchemaFile); + loggerError('Error reading GraphQL schema file: ' + yellow(inputGraphQLSchemaFile), err); process.exit(1); } } @@ -336,9 +344,9 @@ async function main() { if (inputGraphQLSchemaChangesFile != '') { try { inputGraphQLSchemaChanges = readFileSync(inputGraphQLSchemaChangesFile, 'utf8'); - if (!quiet) console.log('Loaded GraphQL schema changes from file: ' + inputGraphQLSchemaChangesFile); + loggerInfo('Loaded GraphQL schema changes from file: ' + yellow(inputGraphQLSchemaChangesFile), {toConsole: true}); } catch (err) { - console.error('Error reading GraphQL schema changes file: ' + inputGraphQLSchemaChangesFile); + loggerError('Error reading GraphQL schema changes file: ' + yellow(inputGraphQLSchemaChangesFile), err); process.exit(1); } } @@ -350,17 +358,17 @@ async function main() { } if (createUpdatePipelineEndpoint == '' && createUpdatePipelineRegion == '' && createUpdatePipelineNeptuneDatabaseName == '') { - console.error('AWS pipeline: is required a Neptune endpoint, or a Neptune database name and region.'); + loggerError('AWS pipeline: is required a Neptune endpoint, or a Neptune database name and region.'); process.exit(1); } if (createUpdatePipelineEndpoint == '' && !createUpdatePipelineRegion == '' && createUpdatePipelineNeptuneDatabaseName == '') { - console.error('AWS pipeline: a Neptune database name is required.'); + loggerError('AWS pipeline: a Neptune database name is required.'); process.exit(1); } if (createUpdatePipelineEndpoint == '' && createUpdatePipelineRegion == '' && !createUpdatePipelineNeptuneDatabaseName == '') { - console.error('AWS pipeline: a Neptune database region is required.'); + loggerError('AWS pipeline: a Neptune database region is required.'); process.exit(1); } if (createUpdatePipelineEndpoint != '') { @@ -376,9 +384,9 @@ async function main() { if (createUpdatePipelineRegion !== parsedRegion) { if (createUpdatePipelineRegion !== '') { - console.log('Switching region from ' + createUpdatePipelineRegion + ' to region parsed from endpoint: ' + parsedRegion); + loggerInfo('Switching region from ' + createUpdatePipelineRegion + ' to region parsed from endpoint: ' + parsedRegion); } else { - console.log('Region parsed from endpoint: ' + parsedRegion); + loggerInfo('Region parsed from endpoint: ' + parsedRegion); } createUpdatePipelineRegion = parsedRegion; } @@ -395,17 +403,17 @@ async function main() { } if (inputCDKpipelineEnpoint == '' && inputCDKpipelineRegion == '' && inputCDKpipelineDatabaseName == '') { - console.error('AWS CDK: is required a Neptune endpoint, or a Neptune database name and region.'); + loggerError('AWS CDK: is required a Neptune endpoint, or a Neptune database name and region.'); process.exit(1); } if (inputCDKpipelineEnpoint == '' && !inputCDKpipelineRegion == '' && inputCDKpipelineDatabaseName == '') { - console.error('AWS CDK: a Neptune database name is required.'); + loggerError('AWS CDK: a Neptune database name is required.'); process.exit(1); } if (inputCDKpipelineEnpoint == '' && inputCDKpipelineRegion == '' && !inputCDKpipelineDatabaseName == '') { - console.error('AWS CDK: a Neptune database region is required.'); + loggerError('AWS CDK: a Neptune database region is required.'); process.exit(1); } if (inputCDKpipelineEnpoint != '') { @@ -458,9 +466,9 @@ async function main() { try { writeFileSync(outputSchemaFile, outputSchema); - if (!quiet) console.log('Wrote GraphQL schema to file: ' + yellow(outputSchemaFile)); + loggerInfo('Wrote GraphQL schema to file: ' + yellow(outputSchemaFile), {toConsole: true}); } catch (err) { - console.error('Error writing GraphQL schema to file: ' + outputSchemaFile); + loggerError('Error writing GraphQL schema to file: ' + yellow(outputSchemaFile), err); } @@ -476,9 +484,9 @@ async function main() { try { writeFileSync(outputSourceSchemaFile, outputSourceSchema); - if (!quiet) console.log('Wrote GraphQL schema to file: ' + yellow(outputSourceSchemaFile)); + loggerInfo('Wrote output GraphQL schema to file: ' + yellow(outputSourceSchemaFile), {toConsole: true}); } catch (err) { - console.error('Error writing GraphQL schema to file: ' + outputSourceSchemaFile); + loggerError('Error writing output GraphQL schema to file: ' + yellow(outputSourceSchemaFile), err); } @@ -493,9 +501,9 @@ async function main() { try { writeFileSync(outputNeptuneSchemaFile, inputGraphDBSchema); - if (!quiet) console.log('Wrote Neptune schema to file: ' + yellow(outputNeptuneSchemaFile)); + loggerInfo('Wrote Neptune schema to file: ' + yellow(outputNeptuneSchemaFile), {toConsole: true}); } catch (err) { - console.error('Error writing Neptune schema to file: ' + outputNeptuneSchemaFile); + loggerError('Error writing Neptune schema to file: ' + yellow(outputNeptuneSchemaFile), err); } @@ -506,9 +514,9 @@ async function main() { try { writeFileSync(outputLambdaResolverFile, outputLambdaResolver); - if (!quiet) console.log('Wrote Lambda resolver to file: ' + yellow(outputLambdaResolverFile)); + loggerInfo('Wrote Lambda resolver to file: ' + yellow(outputLambdaResolverFile), {toConsole: true}); } catch (err) { - console.error('Error writing Lambda resolver to file: ' + outputLambdaResolverFile); + loggerError('Error writing Lambda resolver to file: ' + yellow(outputLambdaResolverFile), err); } @@ -523,9 +531,9 @@ async function main() { try { writeFileSync(outputJSResolverFile, outputJSResolver); - if (!quiet) console.log('Wrote Javascript resolver to file: ' + yellow(outputJSResolverFile)); + loggerInfo('Wrote Javascript resolver to file: ' + yellow(outputJSResolverFile), {toConsole: true}); } catch (err) { - console.error('Error writing Javascript resolver to file: ' + outputJSResolverFile); + loggerError('Error writing Javascript resolver to file: ' + yellow(outputJSResolverFile), err); } @@ -555,11 +563,11 @@ async function main() { if (!quiet) spinner = ora('Creating Lambda ZIP ...').start(); await createLambdaDeploymentPackage(__dirname + outputLambdaPackagePath, outputLambdaResolverZipFile); if (!quiet) { - spinner.stop(); - console.log('Wrote Lambda ZIP file: ' + yellow(outputLambdaResolverZipFile)); + spinner.succeed('Created Lambda ZIP'); } + loggerInfo('Wrote Lambda ZIP file: ' + yellow(outputLambdaResolverZipFile), {toConsole: true}); } catch (err) { - console.error('Error creating Lambda ZIP file: ' + err); + loggerError('Error creating Lambda ZIP file: ' + yellow(outputLambdaResolverZipFile), err); } } @@ -570,13 +578,12 @@ async function main() { try { let endpointParts = createUpdatePipelineEndpoint.split(':'); if (endpointParts.length < 2) { - console.error('Neptune endpoint must be in the form of host:port'); + loggerError('Neptune endpoint must be in the form of host:port'); process.exit(1); } let neptuneHost = endpointParts[0]; let neptunePort = endpointParts[1]; - if (!quiet) console.log('\nCreating AWS pipeline resources') await createUpdateAWSpipeline( createUpdatePipelineName, createUpdatePipelineNeptuneDatabaseName, createUpdatePipelineRegion, @@ -590,20 +597,20 @@ async function main() { neptuneHost, neptunePort, outputFolderPath, - neptuneType ); + neptuneType ); } catch (err) { - console.error('Error creating AWS pipeline: ' + err); + loggerError('Error creating AWS pipeline', err); } } // Output CDK if (inputCDKpipeline) { try { - if (!quiet) console.log('\nCreating CDK File') + loggerInfo('Creating CDK File', {toConsole: true}); let endpointParts = inputCDKpipelineEnpoint.split(':'); if (endpointParts.length < 2) { - console.error('Neptune endpoint must be in the form of host:port'); + loggerError('Neptune endpoint must be in the form of host:port'); process.exit(1); } let neptuneHost = endpointParts[0]; @@ -631,23 +638,23 @@ async function main() { neptuneType: neptuneType }); } catch (err) { - console.error('Error creating CDK File: ' + err); + loggerError('Error creating CDK File', err); } } - if (!quiet) console.log('\nDone\n'); + loggerInfo('Done', {toConsole: true}); } // Remove AWS Pipeline if ( removePipelineName != '') { - if (!quiet) console.log('\nRemoving pipeline AWS resources, name: ' + yellow(removePipelineName)) + loggerInfo('Removing pipeline AWS resources, name: ' + yellow(removePipelineName), {toConsole: true}); let resourcesToRemove = null; let resourcesFile = `${outputFolderPath}/${removePipelineName}-resources.json`; - if (!quiet) console.log('Using file: ' + yellow(resourcesFile)); + loggerInfo('Using file: ' + yellow(resourcesFile), {toConsole: true}); try { resourcesToRemove = readFileSync(resourcesFile, 'utf8'); } catch (err) { - console.error('Error reading AWS pipeline resources file: ' + resourcesFile + ' ' + err); + loggerError('Error reading AWS pipeline resources file: ' + yellow(resourcesFile), err); process.exit(1); } await removeAWSpipelineResources(JSON.parse(resourcesToRemove), quiet); diff --git a/src/pipelineResources.js b/src/pipelineResources.js index a66b729..972be34 100644 --- a/src/pipelineResources.js +++ b/src/pipelineResources.js @@ -10,43 +10,44 @@ express or implied. See the License for the specific language governing permissions and limitations under the License. */ -import { NeptuneClient, - DescribeDBClustersCommand, +import { NeptuneClient, + DescribeDBClustersCommand, DescribeDBSubnetGroupsCommand } from "@aws-sdk/client-neptune"; -import { IAMClient, - CreateRoleCommand, - AttachRolePolicyCommand, - GetRoleCommand, - CreatePolicyCommand, +import { IAMClient, + CreateRoleCommand, + AttachRolePolicyCommand, + GetRoleCommand, + CreatePolicyCommand, DetachRolePolicyCommand, DeleteRoleCommand, DeletePolicyCommand, - //waitUntilRoleExists, + //waitUntilRoleExists, //waitUntilPolicyExists } from "@aws-sdk/client-iam"; -import { LambdaClient, - CreateFunctionCommand as LambdaCreateFunctionCommand, - GetFunctionCommand, +import { LambdaClient, + CreateFunctionCommand as LambdaCreateFunctionCommand, + GetFunctionCommand, DeleteFunctionCommand, UpdateFunctionCodeCommand } from "@aws-sdk/client-lambda"; -import { AppSyncClient, - CreateGraphqlApiCommand, - StartSchemaCreationCommand, - CreateDataSourceCommand, - CreateFunctionCommand as AppSyncCreateFunctionCommand, +import { AppSyncClient, + CreateGraphqlApiCommand, + StartSchemaCreationCommand, + CreateDataSourceCommand, + CreateFunctionCommand as AppSyncCreateFunctionCommand, CreateResolverCommand, CreateApiKeyCommand, ListGraphqlApisCommand, - DeleteGraphqlApiCommand, + DeleteGraphqlApiCommand, ListResolversCommand } from "@aws-sdk/client-appsync"; import fs from 'fs'; import archiver from 'archiver'; import ora from 'ora'; import { exit } from "process"; +import { loggerDebug, loggerError, loggerInfo, yellow } from './logger.js'; import { parseNeptuneDomainFromHost } from "./util.js"; const NEPTUNE_DB = 'neptune-db'; @@ -80,14 +81,65 @@ let ZIP = null; let RESOURCES = {}; let RESOURCES_FILE = ''; - const sleep = ms => new Promise(r => setTimeout(r, ms)); // alternative: import { setTimeout } from timers/promises let spinner = null; -function yellow(text) { - return '\x1b[33m' + text + '\x1b[0m'; +/** + * Start the spinner with a message. + * + * To also log the message to file, set logInfo = true. Do not log messages with sensitive data to file. + * + * @param text the message to display on the spinner as it is spinning + * @param logInfo if true then the text will also be logged to file at info level + */ +function startSpinner(text, logInfo = false) { + if (logInfo) { + loggerInfo(text); + } + if (!quiet) { + spinner = ora(text).start(); + } +} + +/** + * Stop the spinner with a success message. + * + * To also log the message to file, set logOptions.logLevel to 'info' or 'debug'. Do not log messages with sensitive data to file at info level. + * + * @param text the text to display on the spinner to indicate success + * @param logOptions optional logging options which determine if the text is also logged to file and what level it should be logged at + */ +function succeedSpinner(text, logOptions = {logLevel: ''}) { + if (spinner && !quiet) { + spinner.succeed(text); + } else if (!spinner && !quiet) { + console.error('Cannot succeed spinner as it has not been started'); + console.log(text); + } + if (logOptions.logLevel === 'info') { + loggerInfo(text); + } else if (logOptions.logLevel === 'debug') { + loggerDebug(text); + } } +function warnSpinner(text) { + if (spinner && !quiet) { + spinner.warn(text); + } else if (!spinner && !quiet) { + console.error('Cannot warn spinner as it has not been started'); + console.log(text); + } +} + +function failSpinner(text) { + if (spinner && !quiet) { + spinner.fail(text); + } else if (!spinner && !quiet) { + console.error('Cannot fail spinner as it has not been started'); + console.log(text); + } +} async function checkPipeline() { // Checking if Role, Lambda and AppSync API is already created. @@ -99,51 +151,85 @@ async function checkPipeline() { let appSyncExists = false; let roleExists = false; - if (!quiet) spinner = ora('Checking pipeline resources...').start(); + loggerInfo('Checking pipeline resources', {toConsole: true}); + startSpinner('Checking for lambda...'); try { const command = new GetFunctionCommand({FunctionName: NAME +'LambdaFunction'}); await lambdaClient.send(command); lambdaExists = true; + succeedSpinner('Found lambda', {logLevel: 'info'}); } catch (error) { lambdaExists = false; + const text = 'Lambda not found'; + warnSpinner(text); + loggerInfo(text); + loggerDebug("checkPipeline GetFunctionCommand: " + JSON.stringify(error)); } - - try { + + startSpinner('Checking for API...'); + const notFound = 'API not found'; + try { const command = new ListGraphqlApisCommand({apiType: "GRAPHQL"}); const response = await appSyncClient.send(command); response.graphqlApis.forEach(element => { if (element.name == NAME + 'API') { - //APPSYNC_API_ID = element.apiId; + //APPSYNC_API_ID = element.apiId; appSyncExists = true; } }); + if (appSyncExists) { + succeedSpinner('API found', {logLevel: 'info'}); + } else { + warnSpinner(notFound); + loggerInfo(notFound); + } } catch (error) { + warnSpinner(notFound); + loggerInfo(notFound); + loggerError('Error checking for API', error); appSyncExists = false; } + startSpinner('Checking for lambda execution role...'); try { const command = new GetRoleCommand({ RoleName: NAME + "LambdaExecutionRole" }); const response = await iamClient.send(command); LAMBDA_ROLE = response.Role.Arn; roleExists = true; + succeedSpinner('Lambda execution role found', {logLevel: 'info'}); } catch (error) { + const text = 'Lambda execution role not found'; + warnSpinner(text); + loggerInfo(text); + loggerDebug("checkPipeline GetRoleCommand: " + JSON.stringify(error)); roleExists = false; } if (lambdaExists && appSyncExists && roleExists) { - if (!quiet) spinner.succeed('Pipeline exists.'); + loggerInfo('Pipeline exists.', {toConsole: true}); pipelineExists = true; } else { - if (!quiet) spinner.warn('Pipeline does not exists.'); + loggerInfo('Pipeline does not exist.', {toConsole: true}); } if (lambdaExists && appSyncExists && roleExists) return; if (!lambdaExists && !appSyncExists && !roleExists) return; - if (!quiet) console.log("One or more pipeline resources are missing."); - if (!lambdaExists && !quiet) console.log(" Lambda " + NAME + "LambdaFunction" + " is missing." ); - if (!roleExists && !quiet) console.log(" Role " + NAME + "LambdaExecutionRole" + " is missing." ); - if (!appSyncExists && !quiet) console.log(" AppSync " + NAME + "API" + " is missing." ); - console.error("Fix the issue manually or create the pipeline resources with a new name.\n"); + + loggerError('One or more pipeline resources are missing.'); + + if (!lambdaExists) { + loggerError('Lambda ' + yellow(NAME) + 'LambdaFunction is missing.'); + } + + if (!roleExists) { + loggerError('Role ' + yellow(NAME) + 'LambdaExecutionRole is missing.'); + } + + if (!appSyncExists) { + loggerError('AppSync ' + yellow(NAME) + 'API is missing.'); + } + + loggerError('Fix the issue manually or create the pipeline resources with a new name.'); process.exit(1); } @@ -208,7 +294,8 @@ async function createLambdaRole() { const iamClient = new IAMClient({region: REGION}); // Create Lambda principal role - if (!quiet) spinner = ora('Creating Lambda principal role ...').start(); + startSpinner('Creating Lambda principal role ...', true); + let roleName = NAME +"LambdaExecutionRole"; const params = { AssumeRolePolicyDocument: JSON.stringify({ Version: "2012-10-17", @@ -220,25 +307,26 @@ async function createLambdaRole() { }, ], }), - RoleName: NAME +"LambdaExecutionRole" + RoleName: roleName }; const data = await iamClient.send(new CreateRoleCommand(params)); //await waitUntilRoleExists({ client: iamClient, maxWaitTime: 180 }, { RoleName: data.Role.RoleName }); // does not work :(, using sleep await sleep(10000); LAMBDA_ROLE = data.Role.Arn; - storeResource({LambdaExecutionRole: NAME +"LambdaExecutionRole"}); - if (!quiet) spinner.succeed('Role ARN: ' + yellow(LAMBDA_ROLE)); + storeResource({LambdaExecutionRole: roleName}); + succeedSpinner('Role ARN: ' + yellow(LAMBDA_ROLE), {logLevel: 'debug'}); + loggerInfo('Created Lambda principal role') - // Attach to Lambda role the AWSLambdaBasicExecutionRole - if (!quiet) spinner = ora('Attaching policies to the Lambda principal role ...').start(); + // Attach to Lambda role the AWSLambdaBasicExecutionRole + startSpinner('Attaching AWSLambdaBasicExecutionRole to Lambda Role', true); let input = { - RoleName: NAME +"LambdaExecutionRole", + RoleName: roleName, PolicyArn: "arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole", }; let command = new AttachRolePolicyCommand(input); await iamClient.send(command); storeResource({LambdaExecutionPolicy1: input.PolicyArn}); - if (!quiet) spinner.succeed(`Attached ${yellow('AWSLambdaBasicExecutionRole')} to Lambda Role`); + succeedSpinner(`Attached ${yellow('AWSLambdaBasicExecutionRole')} to Lambda Role`, {logLevel: 'info'}); if (NEPTUNE_IAM_AUTH) { @@ -256,7 +344,8 @@ async function createLambdaRole() { } // Create Neptune query policy - if (!quiet) spinner = ora('Creating policy for Neptune queries ...').start(); + startSpinner('Creating policy for Neptune queries', true); + let policyName = NAME+"NeptuneQueryPolicy"; let command = new CreatePolicyCommand({ PolicyDocument: JSON.stringify({ Version: "2012-10-17", @@ -268,40 +357,39 @@ async function createLambdaRole() { }, ], }), - PolicyName: NAME+"NeptuneQueryPolicy", + PolicyName: policyName, }); let response = await iamClient.send(command); const policyARN = response.Policy.Arn; storeResource({NeptuneQueryPolicy: policyARN}); await sleep(5000); - if (!quiet) spinner.succeed('Neptune query policy ARN: ' + yellow(policyARN)); - - // Attach to Lambda role the Neptune query policy. - if (!quiet) spinner = ora('Attaching policy for Neptune queries to Lambda role ...').start(); + succeedSpinner('Neptune query policy ARN: ' + yellow(policyARN), {logLevel: 'debug'}); + loggerInfo('Neptune query policy created') + + // Attach to Lambda role the Neptune query policy. + startSpinner('Attaching Neptune query policy to Lambda role ...', true); input = { - RoleName: NAME +"LambdaExecutionRole", + RoleName: roleName, PolicyArn: policyARN, }; command = new AttachRolePolicyCommand(input); await iamClient.send(command); storeResource({LambdaExecutionPolicy2: input.PolicyArn}); await sleep(10000); - if (!quiet) spinner.succeed(`Attached ${yellow('Neptune Query Policy')} policies to Lambda Role`); + succeedSpinner(`Attached ${yellow('Neptune Query Policy')} policies to Lambda Role`, {logLevel: 'info'}); } else { - - - if (!quiet) spinner = ora('Attaching policy for Neptune VPC to Lambda role ...').start(); + startSpinner('Attaching policy for Neptune VPC to Lambda role ...', true); input = { - RoleName: NAME +"LambdaExecutionRole", + RoleName: roleName, PolicyArn: "arn:aws:iam::aws:policy/service-role/AWSLambdaVPCAccessExecutionRole", }; command = new AttachRolePolicyCommand(input); await iamClient.send(command); storeResource({LambdaExecutionPolicy2: input.PolicyArn}); await sleep(10000); - if (!quiet) spinner.succeed(`Attached ${yellow('AWSLambdaVPCAccessExecutionRole')} policies to role`); + succeedSpinner(`Attached ${yellow('AWSLambdaVPCAccessExecutionRole')} policies to role`, {logLevel: 'info'}); } } @@ -320,15 +408,14 @@ async function createDeploymentPackage(folderPath) { return fileContent; } - async function createLambdaFunction() { - if (!quiet) spinner = ora('Creating Lambda function ...').start(); - + startSpinner('Creating Lambda function', true); + let lambdaName = NAME +'LambdaFunction'; let params = { Code: { ZipFile: ZIP }, - FunctionName: NAME +'LambdaFunction', + FunctionName: lambdaName, Handler: 'index.handler', Role: LAMBDA_ROLE, Runtime: 'nodejs18.x', @@ -358,15 +445,17 @@ async function createLambdaFunction() { const lambdaClient = new LambdaClient({region: REGION}); const data = await lambdaClient.send(new LambdaCreateFunctionCommand(params)); LAMBDA_ARN = data.FunctionArn; - storeResource({LambdaFunction: NAME +'LambdaFunction'}); - if (!quiet) spinner.succeed('Lambda Name: ' + yellow(NAME +'LambdaFunction') + ' ARN: ' + yellow(LAMBDA_ARN)); + storeResource({LambdaFunction: lambdaName}); + succeedSpinner('Lambda Name: ' + yellow(lambdaName) + ' ARN: ' + yellow(LAMBDA_ARN), {logLevel: 'debug'}); + loggerInfo('Lambda function created') } async function createAppSyncAPI() { const iamClient = new IAMClient({region: REGION}); - - if (!quiet) spinner = ora('Creating policy for Lambda invocation ...').start(); + + startSpinner('Creating policy for Lambda invocation ...', true); + let policyName = NAME+"LambdaInvokePolicy"; let command = new CreatePolicyCommand({ PolicyDocument: JSON.stringify({ Version: "2012-10-17", @@ -381,14 +470,15 @@ async function createAppSyncAPI() { }, ], }), - PolicyName: NAME+"LambdaInvokePolicy", + PolicyName: policyName, } ); let response = await iamClient.send(command); const policyARN = response.Policy.Arn; storeResource({LambdaInvokePolicy: policyARN}); - if (!quiet) spinner.succeed('Lambda invocation policy ARN: ' + yellow(policyARN)); - + succeedSpinner('Lambda invocation policy created: ' + yellow(policyName), {logLevel: 'debug'}); + loggerInfo('Created lambda invoke policy'); + let roleName = NAME +"LambdaInvocationRole"; let params = { AssumeRolePolicyDocument: JSON.stringify({ Version: "2012-10-17", @@ -402,29 +492,29 @@ async function createAppSyncAPI() { } ] }), - RoleName: NAME +"LambdaInvocationRole" + RoleName: roleName }; - if (!quiet) spinner = ora('Creating role for Lambda invocation ...').start(); + startSpinner('Creating role for Lambda invocation ...', true); response = await iamClient.send(new CreateRoleCommand(params)); const LAMBDA_INVOCATION_ROLE = response.Role.Arn; - storeResource({LambdaInvokeRole: NAME +"LambdaInvocationRole"}); + storeResource({LambdaInvokeRole: roleName}); sleep(5000); - if (!quiet) spinner.succeed('Lambda invocation role ARN: ' + yellow(LAMBDA_INVOCATION_ROLE)); - - if (!quiet) spinner = ora('Attaching policy ...').start(); + succeedSpinner('Lambda invocation role created: ' + yellow(roleName), {logLevel: 'debug'}); + loggerInfo('Created lambda invocation role'); + + startSpinner('Attaching policy ...', true); params = { - RoleName: NAME +"LambdaInvocationRole", + RoleName: roleName, PolicyArn: policyARN, }; command = new AttachRolePolicyCommand(params); await iamClient.send(command); - if (!quiet) spinner.succeed('Attached policy to role'); - + succeedSpinner('Attached policy to role', {logLevel: 'info'}); // APPSync API const appSyncClient = new AppSyncClient({region: REGION}); - if (!quiet) spinner = ora('Creating AppSync API ...').start(); + startSpinner('Creating AppSync API ...', true); params = { name: NAME + 'API', authenticationType: "API_KEY", @@ -435,19 +525,18 @@ async function createAppSyncAPI() { response = await appSyncClient.send(command); const apiId = response.graphqlApi.apiId; storeResource({AppSyncAPI: apiId}); - if (!quiet) spinner.succeed('Created API id: ' + yellow(apiId) + ' name: ' + yellow(NAME + 'API')); - + succeedSpinner('Created API: ' + yellow(NAME + 'API', {logLevel: 'debug'})); + loggerInfo('Created App Sync API'); // create Key - if (!quiet) spinner = ora('Creating API key ...').start(); + startSpinner('Creating API key ...', true); command = new CreateApiKeyCommand({apiId: apiId}); response = await appSyncClient.send(command); const apiKey = response.apiKey.id; - if (!quiet) spinner.succeed('Created API key: ' + yellow(apiKey)); - + succeedSpinner('Created API key', {logLevel: 'info'}); // create datasource - if (!quiet) spinner = ora('Creating DataSource ...').start(); + startSpinner('Creating DataSource ...', true); params = { apiId: apiId, name: NAME + 'DataSource', @@ -459,11 +548,11 @@ async function createAppSyncAPI() { }; command = new CreateDataSourceCommand(params); response = await appSyncClient.send(command); - if (!quiet) spinner.succeed('Created DataSource: ' + yellow(NAME+'DataSource')); - + succeedSpinner('Created DataSource: ' + yellow(NAME+'DataSource'), {logLevel: 'debug'}); + loggerInfo('Created datasource'); // create function - if (!quiet) spinner = ora('Creating Function ...').start(); + startSpinner('Creating Function ...', true); params = { apiId: apiId, name: NAME+'Function', @@ -497,10 +586,11 @@ export function response(ctx) { await sleep(5000); let functionId = response.functionConfiguration.functionId; storeResource({AppSyncAPIFunction: functionId}); - if (!quiet) spinner.succeed('Created Function: ' + yellow(NAME+'Function')); + succeedSpinner('Created Function: ' + yellow(NAME+'Function'), {logLevel: 'debug'}); + loggerInfo('Created function'); // Upload schema - if (!quiet) spinner = ora('Uploading schema ...').start(); + startSpinner('Uploading schema ...', true); let encoder = new TextEncoder(); let definition = encoder.encode(APPSYNC_SCHEMA); params = { @@ -510,7 +600,7 @@ export function response(ctx) { command = new StartSchemaCreationCommand(params); response = await appSyncClient.send(command); await sleep(5000); - if (!quiet) spinner.succeed('Added schema'); + succeedSpinner('Added schema', {logLevel: 'info'}); await attachResolvers(appSyncClient, apiId, functionId); } @@ -531,8 +621,8 @@ async function getSchemaFields(typeName) { } -async function attachResolvers(client, apiId, functionId) { - +async function attachResolvers(client, apiId, functionId) { + loggerInfo('Attaching resolvers'); const queries = await getSchemaFields("Query"); let mutations = []; @@ -584,14 +674,17 @@ async function attachResolvers(client, apiId, functionId) { } } } - + loggerInfo('Finished attaching resolvers'); } async function attachResolverToSchemaField (client, apiId, functionId, typeName, fieldName) { // attach resolvers to schema - if (!quiet) spinner = ora('Attaching resolver to schema type ' + yellow(typeName) + ' field ' + yellow(fieldName) + ' ...').start(); + const startMsg = 'Attaching resolver to schema type ' + yellow(typeName) + ' field ' + yellow(fieldName) + ' ...'; + startSpinner(startMsg); + loggerDebug(startMsg); + const input = { apiId: apiId, typeName: typeName, @@ -622,7 +715,9 @@ export function response(ctx) { const command = new CreateResolverCommand(input); await client.send(command); await sleep(200); - if (!quiet) spinner.succeed('Attached resolver to schema type ' + yellow(typeName) + ' field ' + yellow(fieldName)); + const endMsg = 'Attached resolver to schema type ' + yellow(typeName) + ' field ' + yellow(fieldName); + succeedSpinner(endMsg); + loggerDebug(endMsg); } @@ -633,33 +728,40 @@ async function removeAWSpipelineResources(resources, quietI) { const iamClient = new IAMClient({region: resources.region}); // Appsync API - if (!quiet) spinner = ora('Deleting AppSync API ...').start(); + startSpinner('Deleting AppSync API ...', true); + try { const input = { apiId: resources.AppSyncAPI }; const command = new DeleteGraphqlApiCommand(input); await appSyncClient.send(command); - if (!quiet) spinner.succeed('Deleted API id: ' + yellow(resources.AppSyncAPI)); + succeedSpinner('Deleted API id: ' + yellow(resources.AppSyncAPI), {logLevel: 'debug'}); + loggerInfo('Deleted AppSync API') } catch (error) { - if (!quiet) spinner.fail('AppSync API delete failed: ' + error); + const message = 'AppSync API delete failed'; + failSpinner(message); + loggerError(message, error); } // Lambda - if (!quiet) spinner = ora('Deleting Lambda function ...').start(); + startSpinner('Deleting Lambda function ...', true); try { const input = { FunctionName: resources.LambdaFunction }; const command = new DeleteFunctionCommand(input); await lambdaClient.send(command); - if (!quiet) spinner.succeed('Lambda function deleted: ' + yellow(resources.LambdaFunction)); + succeedSpinner('Deleted Lambda function: ' + yellow(resources.LambdaFunction), {logLevel: 'debug'}); + loggerInfo('Deleted Lambda') } catch (error) { - if (!quiet) spinner.fail('Lambda function fail to delete: ' + error); + const message = 'Lambda function failed to delete'; + failSpinner(message); + loggerError(message, error); } // Lambda execution role - if (!quiet) spinner = ora('Detaching IAM policies from role ...').start(); + startSpinner('Detaching first IAM policy from role ...', true); try { let input = { PolicyArn: resources.LambdaExecutionPolicy1, @@ -667,12 +769,15 @@ async function removeAWSpipelineResources(resources, quietI) { }; let command = new DetachRolePolicyCommand(input); await iamClient.send(command); - if (!quiet) spinner.succeed('Detached policy: ' + yellow(resources.LambdaExecutionPolicy1) + " from role: " + yellow(resources.LambdaExecutionRole)); + succeedSpinner('Detached policy: ' + yellow(resources.LambdaExecutionPolicy1) + " from role: " + yellow(resources.LambdaExecutionRole), {logLevel: 'debug'}); + loggerInfo('Detached first IAM policy from role'); } catch (error) { - if (!quiet) spinner.fail('Detach policy failed: ' + error); + let message = 'Detach first policy failed'; + failSpinner(message); + loggerError(message, error); } - if (!quiet) spinner = ora('Detaching IAM policies from role ...').start(); + startSpinner('Detaching second IAM policy from role ...', true); try { let input = { PolicyArn: resources.LambdaExecutionPolicy2, @@ -680,41 +785,50 @@ async function removeAWSpipelineResources(resources, quietI) { }; let command = new DetachRolePolicyCommand(input); await iamClient.send(command); - if (!quiet) spinner.succeed('Detached policy: ' + yellow(resources.LambdaExecutionPolicy2) + " from role: " + yellow(resources.LambdaExecutionRole)); + succeedSpinner('Detached IAM policy: ' + yellow(resources.LambdaExecutionPolicy2) + " from role: " + yellow(resources.LambdaExecutionRole), {logLevel: 'debug'}); + loggerInfo('Detached second IAM policy from role'); } catch (error) { - if (!quiet) spinner.fail('Detach policy failed: ' + error); + const message = 'Detach second IAM policy failed'; + failSpinner(message); + loggerError(message, error); } // Delete Neptune query Policy if (resources.NeptuneQueryPolicy != undefined) { - if (!quiet) spinner = ora('Deleting policy ...').start(); + startSpinner('Deleting query policy ...', true); try { const input = { PolicyArn: resources.NeptuneQueryPolicy, }; const command = new DeletePolicyCommand(input); await iamClient.send(command); - if (!quiet) spinner.succeed('Deleted policy: ' + yellow(resources.NeptuneQueryPolicy)); + succeedSpinner('Deleted query policy: ' + yellow(resources.NeptuneQueryPolicy), {logLevel: 'debug'}); + loggerInfo('Deleted query policy'); } catch (error) { - if (!quiet) spinner.fail('Delete policy failed: ' + error); + const message = 'Delete query policy failed'; + failSpinner(message); + loggerError(message, error); } } // Delete Role - if (!quiet) spinner = ora('Deleting role ...').start(); + startSpinner('Deleting execution role ...', true); try { const input = { RoleName: resources.LambdaExecutionRole, }; const command = new DeleteRoleCommand(input); await iamClient.send(command); - if (!quiet) spinner.succeed('Deleted role: ' + yellow(resources.LambdaExecutionRole)); + succeedSpinner('Deleted execution role: ' + yellow(resources.LambdaExecutionRole), {logLevel: 'debug'}); + loggerInfo('Deleted execution role'); } catch (error) { - if (!quiet) spinner.fail('Delete role failed: ' + error); + const message = 'Delete execution role failed'; + failSpinner(message); + loggerError(message, error); } // AppSync Lambda role - if (!quiet) spinner = ora('Detaching policy from AppSync Lambda role ...').start(); + startSpinner('Detaching invoke policy from AppSync Lambda role ...', true); try { let input = { PolicyArn: resources.LambdaInvokePolicy, @@ -722,41 +836,50 @@ async function removeAWSpipelineResources(resources, quietI) { }; let command = new DetachRolePolicyCommand(input); await iamClient.send(command); - if (!quiet) spinner.succeed('Detached policy: ' + yellow(resources.LambdaInvokePolicy) + " from role: " + yellow(resources.LambdaInvokeRole)); + succeedSpinner('Detached invoke policy: ' + yellow(resources.LambdaInvokePolicy) + " from role: " + yellow(resources.LambdaInvokeRole), {logLevel: 'debug'}); + loggerInfo('Detached invoke policy'); } catch (error) { - if (!quiet) spinner.fail('Detach policy failed: ' + error); + const message = 'Detach invoke policy failed'; + failSpinner(message); + loggerError(message, error); } // Delete Policy - if (!quiet) spinner = ora('Deleting policy ...').start(); + startSpinner('Deleting invoke policy ...', true); try { const input = { PolicyArn: resources.LambdaInvokePolicy, }; const command = new DeletePolicyCommand(input); await iamClient.send(command); - if (!quiet) spinner.succeed('Deleted policy: ' + yellow(resources.LambdaInvokePolicy)); + succeedSpinner('Deleted invoke policy: ' + yellow(resources.LambdaInvokePolicy), {logLevel: 'debug'}); + loggerInfo('Deleted invoke policy'); } catch (error) { - if (!quiet) spinner.fail('Delete policy failed: ' + error); + const message = 'Delete invoke policy failed'; + failSpinner(message); + loggerError(message, error); } // Delete Role - if (!quiet) spinner = ora('Deleting role ...').start(); + startSpinner('Deleting invoke role ...', true); try { const input = { RoleName: resources.LambdaInvokeRole, }; const command = new DeleteRoleCommand(input); await iamClient.send(command); - if (!quiet) spinner.succeed('Deleted role: ' + yellow(resources.LambdaInvokeRole)); + succeedSpinner('Deleted invoke role: ' + yellow(resources.LambdaInvokeRole), {logLevel: 'debug'}); + loggerInfo('Deleted invoke role'); } catch (error) { - if (!quiet) spinner.fail('Delete role failed: ' + error); + const message = 'Delete invoke role failed'; + failSpinner(message); + loggerError(message, error); } } async function updateLambdaFunction(resources) { - if (!quiet) spinner = ora('Updating Lambda function code ...').start(); + startSpinner('Updating Lambda function code ...', true); const lambdaClient = new LambdaClient({region: resources.region}); const input = { FunctionName: resources.LambdaFunction, @@ -764,14 +887,15 @@ async function updateLambdaFunction(resources) { }; const command = new UpdateFunctionCodeCommand(input); await lambdaClient.send(command); - if (!quiet) spinner.succeed('Lambda function code updated: ' + yellow(resources.LambdaFunction)); + succeedSpinner('Lambda function code updated: ' + yellow(resources.LambdaFunction), {logLevel: 'debug'}); + loggerInfo('Lambda function code updated'); } async function updateAppSyncAPI(resources) { const appSyncClient = new AppSyncClient({region: resources.region}); - if (!quiet) spinner = ora('Updating AppSync API schema ...').start(); + startSpinner('Updating AppSync API schema ...', true); let encoder = new TextEncoder(); let definition = encoder.encode(APPSYNC_SCHEMA); @@ -782,7 +906,7 @@ async function updateAppSyncAPI(resources) { let command = new StartSchemaCreationCommand(params); await appSyncClient.send(command); await sleep(5000); - if (!quiet) spinner.succeed('Schema updated'); + succeedSpinner('AppSync API Schema updated', {logLevel: 'info'}); await attachResolvers(appSyncClient, resources.AppSyncAPI, resources.AppSyncAPIFunction); } @@ -818,32 +942,32 @@ async function createUpdateAWSpipeline ( pipelineName, thisOutputFolderPath = outputFolderPath; NEPTUNE_TYPE = neptuneType; - if (!quiet) console.log('\nCheck if the pipeline resources have been created'); + loggerInfo('Creating or updating AWS pipeline resources ...', {toConsole: true}); await checkPipeline(); if (!pipelineExists) { + loggerInfo('Creating AWS pipeline resources', {toConsole: true}); try { storeResource({region: REGION}); if (NEPTUNE_TYPE === NEPTUNE_DB) { try { - if (!quiet) console.log('Get Neptune Cluster Info'); - if (!quiet) spinner = ora('Getting ...').start(); + startSpinner('Getting Neptune Cluster Info ...', true); await setNeptuneDbClusterInfo(); - if (!quiet) spinner.succeed('Got Neptune Cluster Info'); + succeedSpinner('Retrieved Neptune Cluster Info', {logLevel: 'info'}); if (isNeptuneIAMAuth) { if (!NEPTUNE_CURRENT_IAM) { - console.error("The Neptune database authentication is set to VPC."); - console.error("Remove the --create-update-aws-pipeline-neptune-IAM option."); + loggerError('The Neptune database authentication is set to VPC.'); + loggerError('Remove the --create-update-aws-pipeline-neptune-IAM option.'); exit(1); } } else { if (NEPTUNE_CURRENT_IAM) { - console.error("The Neptune database authentication is set to IAM."); - console.error("Add the --create-update-aws-pipeline-neptune-IAM option."); + loggerError('The Neptune database authentication is set to IAM.'); + loggerError('Add the --create-update-aws-pipeline-neptune-IAM option.'); exit(1); } else { - if (!quiet) console.log(`Subnet Group: ` + yellow(NEPTUNE_DBSubnetGroup)); + loggerDebug(`Subnet Group: ` + yellow(NEPTUNE_DBSubnetGroup), {toConsole: true}); } } @@ -851,67 +975,66 @@ async function createUpdateAWSpipeline ( pipelineName, const v = NEPTUNE_CURRENT_VERSION; if (lambdaFilesPath.includes('SDK') == true && (v == '1.2.1.0' || v == '1.2.0.2' || v == '1.2.0.1' || v == '1.2.0.0' || v == '1.1.1.0' || v == '1.1.0.0')) { - console.error("Neptune SDK query is supported starting with Neptune versions 1.2.2.0"); - console.error("Switch to Neptune HTTPS query with option --output-resolver-query-https"); + loggerError("Neptune SDK query is supported starting with Neptune versions 1.2.2.0"); + loggerError("Switch to Neptune HTTPS query with option --output-resolver-query-https"); exit(1); } } } catch (error) { - if (!quiet) spinner.fail("Error getting Neptune Cluster Info."); + let message = 'Error getting Neptune Cluster Info.'; + failSpinner(message); + loggerError(message, error); if (!isNeptuneIAMAuth) { - console.error("VPC data is not available to proceed."); + loggerError("VPC data is not available to proceed."); exit(1); } else { - if (!quiet) console.log("Could not read the database ARN to restrict the Lambda permissions. \nTo increase security change the resource in the Neptune Query policy.") - if (!quiet) console.log("Proceeding without getting Neptune Cluster info."); + loggerInfo("Could not read the database ARN to restrict the Lambda permissions. To increase security change the resource in the Neptune Query policy.", {toConsole: true}); + loggerInfo("Proceeding without getting Neptune Cluster info.", {toConsole: true}); } } } - if (!quiet) console.log('Create ZIP'); - if (!quiet) spinner = ora('Creating ZIP ...').start(); - ZIP = await createDeploymentPackage(LAMBDA_FILES_PATH) - if (!quiet) spinner.succeed('Created ZIP File: ' + yellow(LAMBDA_FILES_PATH)); - - if (!quiet) console.log('Create Lambda execution role'); - await createLambdaRole(); - if (!quiet) console.log('Create Lambda function'); - await createLambdaFunction(); + startSpinner('Creating ZIP ...', true); + ZIP = await createDeploymentPackage(LAMBDA_FILES_PATH) + succeedSpinner('Created ZIP File: ' + yellow(LAMBDA_FILES_PATH), {logLevel: 'info'}); - if (!quiet) console.log('Create AppSync API'); + await createLambdaRole(); + await createLambdaFunction(); await createAppSyncAPI(); - if (!quiet) console.log('Saved resorces to file: ' + yellow(RESOURCES_FILE)); + loggerInfo('Saved resources to file: ' + yellow(RESOURCES_FILE), {toConsole: true}); } catch (error) { - if (!quiet) spinner.fail('Error creating resources: ' + error); - console.error('Rolling back resources.'); + const message= 'Error creating resources'; + failSpinner(message); + loggerError(message, error); + loggerError('Rolling back resources.'); await removeAWSpipelineResources(RESOURCES, quiet); - return; } } else { - if (!quiet) console.log('Update resources'); + loggerInfo('Updating AWS pipeline resources', {toConsole: true}); let resources = null; try { - if (!quiet) spinner = ora('Loading resources file ...').start(); + startSpinner('Loading resources file ...', true); resources = JSON.parse(fs.readFileSync(RESOURCES_FILE, 'utf8')); - if (!quiet) spinner.succeed('Loaded resources from file: ' + yellow(RESOURCES_FILE)); + succeedSpinner('Loaded resources from file: ' + yellow(RESOURCES_FILE), {logLevel: 'info'}); } catch (error) { - if (!quiet) spinner.warn('Error loading resources file: ' + RESOURCES_FILE + ' ' + error); + const message = 'Error loading resources file' + RESOURCES_FILE; + if (!quiet) spinner.warn(message); + loggerError(message, error); return; } - - if (!quiet) console.log('Create ZIP'); - if (!quiet) spinner = ora('Creating ZIP ...').start(); + + startSpinner('Creating ZIP ...', true); ZIP = await createDeploymentPackage(LAMBDA_FILES_PATH) - if (!quiet) spinner.succeed('File: ' + yellow(LAMBDA_FILES_PATH)); + succeedSpinner('Created ZIP File: ' + yellow(LAMBDA_FILES_PATH), {logLevel: 'info'}); - if (!quiet) console.log('Update Lambda function'); + loggerInfo('Updating Lambda function', {toConsole: true}); await updateLambdaFunction(resources); - if (!quiet) console.log('Update AppSync API'); + loggerInfo('Updating AppSync API', {toConsole: true}); await updateAppSyncAPI(resources); } } diff --git a/src/resolverJS.js b/src/resolverJS.js index c7f6133..0b229ba 100644 --- a/src/resolverJS.js +++ b/src/resolverJS.js @@ -11,6 +11,7 @@ permissions and limitations under the License. */ import { readFileSync} from 'fs'; +import { loggerError } from "./logger.js"; function resolverJS (schemaModel, queryLanguage, queryClient, __dirname) { let code = ''; @@ -22,7 +23,7 @@ function resolverJS (schemaModel, queryLanguage, queryClient, __dirname) { code = code.toString().replace('TIMESTAMP HERE', (new Date()).toISOString()); code = code.toString().replace('INSERT SCHEMA DATA MODEL HERE', queryDataModelJSON); } catch (err) { - console.log('ERROR: No resolver template found.'); + loggerError('No resolver template found.', err); } } return code; diff --git a/src/schemaModelValidator.js b/src/schemaModelValidator.js index 5b7e00f..3c59a20 100644 --- a/src/schemaModelValidator.js +++ b/src/schemaModelValidator.js @@ -12,17 +12,13 @@ permissions and limitations under the License. import { schemaStringify } from './schemaParser.js'; import {gql} from 'graphql-tag' +import { loggerInfo, yellow } from "./logger.js"; let quiet = false; const typesToAdd = []; const queriesToAdd = []; const mutationsToAdd = []; -function yellow(text) { - return '\x1b[33m' + text + '\x1b[0m'; -} - - function lowercaseFirstCharacter(inputString) { if (inputString.length === 0) { return inputString; @@ -174,12 +170,12 @@ function addNode(def) { mutationsToAdd.push(`updateNode${name}(input: ${name}Input!): ${name}\n`); mutationsToAdd.push(`deleteNode${name}(_id: ID!): Boolean\n`); - if (!quiet) console.log(`Added input type: ${yellow(name+'Input')}`); - if (!quiet) console.log(`Added query: ${yellow('getNode' + name)}`); - if (!quiet) console.log(`Added query: ${yellow('getNode' + name + 's')}`); - if (!quiet) console.log(`Added mutation: ${yellow('createNode' + name)}`); - if (!quiet) console.log(`Added mutation: ${yellow('updateNode' + name)}`); - if (!quiet) console.log(`Added mutation: ${yellow('deleteNode' + name)}`); + loggerInfo(`Added input type: ${yellow(name+'Input')}`); + loggerInfo(`Added query: ${yellow('getNode' + name)}`); + loggerInfo(`Added query: ${yellow('getNode' + name + 's')}`); + loggerInfo(`Added mutation: ${yellow('createNode' + name)}`); + loggerInfo(`Added mutation: ${yellow('updateNode' + name)}`); + loggerInfo(`Added mutation: ${yellow('deleteNode' + name)}`); } @@ -193,9 +189,9 @@ function addEdge(from, to, edgeName) { mutationsToAdd.push(`connectNode${from}ToNode${to}Edge${edgeName}(from_id: ID!, to_id: ID!): ${edgeName}\n`); mutationsToAdd.push(`deleteEdge${edgeName}From${from}To${to}(from_id: ID!, to_id: ID!): Boolean\n`); - if (!quiet) console.log(`Added type for edge: ${yellow(edgeName)}`); - if (!quiet) console.log(`Added mutation: ${yellow(`connectNode${from}ToNode${to}Edge${edgeName}`)}`); - if (!quiet) console.log(`Added mutation: ${yellow(`deleteEdge${edgeName}From${from}To${to}`)}`); + loggerInfo(`Added type for edge: ${yellow(edgeName)}`); + loggerInfo(`Added mutation: ${yellow(`connectNode${from}ToNode${to}Edge${edgeName}`)}`); + loggerInfo(`Added mutation: ${yellow(`deleteEdge${edgeName}From${from}To${to}`)}`); } } @@ -275,7 +271,7 @@ function inferGraphDatabaseDirectives(schemaModel) { try { referencedType = field.type.type.name.value; edgeName = referencedType + 'Edge'; - if (!quiet) console.log("Infer graph database directive in type: " + yellow(currentType) + " field: " + yellow(field.name.value) + " referenced type: " + yellow(referencedType) + " graph relationship: " + yellow(edgeName)); + loggerInfo("Infer graph database directive in type: " + yellow(currentType) + " field: " + yellow(field.name.value) + " referenced type: " + yellow(referencedType) + " graph relationship: " + yellow(edgeName)); addRelationshipDirective(field, edgeName, 'OUT'); addEdge(currentType, referencedType, edgeName); if (!edgesTypeToAdd.includes(edgeName)) edgesTypeToAdd.push(edgeName); @@ -289,7 +285,7 @@ function inferGraphDatabaseDirectives(schemaModel) { referencedType = field.type.name.value; edgeName = referencedType + 'Edge'; - if (!quiet) console.log("Infer graph database directive in type: " + yellow(currentType) + " field: " + yellow(field.name.value) + " referenced type: " + yellow(referencedType) + " graph relationship: " + yellow(edgeName)); + loggerInfo("Infer graph database directive in type: " + yellow(currentType) + " field: " + yellow(field.name.value) + " referenced type: " + yellow(referencedType) + " graph relationship: " + yellow(edgeName)); addRelationshipDirective(field, edgeName, 'OUT'); addEdge(currentType, referencedType, edgeName); if (!edgesTypeToAdd.includes(edgeName)) edgesTypeToAdd.push(edgeName); @@ -321,7 +317,7 @@ function validatedSchemaModel (schemaModel, quietInput) { quiet = quietInput; if (!isGraphDBDirectives(schemaModel)) { - if (!quiet) console.log("The schema model does not contain any graph database directives."); + loggerInfo("The schema model does not contain any graph database directives."); schemaModel = inferGraphDatabaseDirectives(schemaModel); } diff --git a/templates/CDKTemplate.js b/templates/CDKTemplate.js index c6efa34..218e3d5 100644 --- a/templates/CDKTemplate.js +++ b/templates/CDKTemplate.js @@ -58,7 +58,7 @@ class AppSyncNeptuneStack extends Stack { assumedBy: new iam.ServicePrincipal('lambda.amazonaws.com') }); - lambda_role.addManagedPolicy( iam.ManagedPolicy.fromAwsManagedPolicyName('service-role/AWSLambdaBasicExecutionRole')); + lambda_role.addManagedPolicy( iam.ManagedPolicy.fromAwsManagedPolicyName('service-role/AWSLambdaBasicExecutionRole')); let env = { NEPTUNE_HOST: NEPTUNE_HOST, diff --git a/test/TestCases/Case05/input/airports.source.schema.graphql b/test/TestCases/Case05/input/airports.source.schema.graphql deleted file mode 100644 index 35e3768..0000000 --- a/test/TestCases/Case05/input/airports.source.schema.graphql +++ /dev/null @@ -1,153 +0,0 @@ -type Continent @alias(property: "continent") { - id: ID! @id - code: String - type: String - desc: String - airportContainssOut(filter: AirportInput, options: Options): [Airport] @relationship(edgeType: "contains", direction: OUT) - contains: Contains -} - -input ContinentInput { - id: ID @id - code: String - type: String - desc: String -} - -type Country @alias(property: "country") { - _id: ID! @id - code: String - type: String - desc: String - airportContainssOut(filter: AirportInput, options: Options): [Airport] @relationship(edgeType: "contains", direction: OUT) - contains: Contains -} - -input CountryInput { - _id: ID @id - code: String - type: String - desc: String -} - -type Version @alias(property: "version") { - _id: ID! @id - date: String - code: String - author: String - type: String - desc: String -} - -input VersionInput { - _id: ID @id - date: String - code: String - author: String - type: String - desc: String -} - -type Airport @alias(property: "airport") { - _id: ID! @id - country: String - longest: Float - code: String - city: String - elev: Float - icao: String - lon: Float - runways: Float - region: String - type: String - lat: Float - desc2: String @alias(property: "desc") - outboundRoutesCount: Int @graphQuery(statement: "MATCH (this)-[r:route]->(a) RETURN count(r)") - continentContainsIn: Continent @relationship(edgeType: "contains", direction: IN) - countryContainsIn: Country @relationship(edgeType: "contains", direction: IN) - airportRoutesOut(filter: AirportInput, options: Options): [Airport] @relationship(edgeType: "route", direction: OUT) - airportRoutesIn(filter: AirportInput, options: Options): [Airport] @relationship(edgeType: "route", direction: IN) - contains: Contains - route: Route -} - -input AirportInput { - _id: ID @id - country: String - longest: Float - code: String - city: String - elev: Float - icao: String - lon: Float - runways: Float - region: String - type: String - lat: Float - desc: String -} - -type Contains @alias(property: "contains") { - _id: ID! @id -} - -type Route @alias(property: "route") { - _id: ID! @id - dist: Int -} - -input RouteInput { - dist: Int -} - -input Options { - limit: Int -} - -type Query { - getAirport(code: String): Airport - getAirportConnection(fromCode: String!, toCode: String!): Airport @cypher(statement: "MATCH (:airport{code: '$fromCode'})-[:route]->(this:airport)-[:route]->(:airport{code:'$toCode'})") - getAirportWithGremlin(code:String): Airport @graphQuery(statement: "g.V().has('airport', 'code', '$code').elementMap()") - getContinentsWithGremlin: [Continent] @graphQuery(statement: "g.V().hasLabel('continent').elementMap().fold()") - getCountriesCountGremlin: Int @graphQuery(statement: "g.V().hasLabel('country').count()") - - getNodeContinent(filter: ContinentInput): Continent - getNodeContinents(filter: ContinentInput, options: Options): [Continent] - getNodeCountry(filter: CountryInput): Country - getNodeCountrys(filter: CountryInput, options: Options): [Country] - getNodeVersion(filter: VersionInput): Version - getNodeVersions(filter: VersionInput, options: Options): [Version] - getNodeAirport(filter: AirportInput): Airport - getNodeAirports(filter: AirportInput, options: Options): [Airport] -} - -type Mutation { - createAirport(input: AirportInput!): Airport @graphQuery(statement: "CREATE (this:airport {$input}) RETURN this") - addRoute(fromAirportCode:String, toAirportCode:String, dist:Int): Route @graphQuery(statement: "MATCH (from:airport{code:'$fromAirportCode'}), (to:airport{code:'$toAirportCode'}) CREATE (from)-[this:route{dist:$dist}]->(to) RETURN this") - deleteAirport(id: ID): Int @graphQuery(statement: "MATCH (this:airport) WHERE ID(this) = '$id' DETACH DELETE this") - - createNodeContinent(input: ContinentInput!): Continent - updateNodeContinent(input: ContinentInput!): Continent - deleteNodeContinent(_id: ID!): Boolean - createNodeCountry(input: CountryInput!): Country - updateNodeCountry(input: CountryInput!): Country - deleteNodeCountry(_id: ID!): Boolean - createNodeVersion(input: VersionInput!): Version - updateNodeVersion(input: VersionInput!): Version - deleteNodeVersion(_id: ID!): Boolean - createNodeAirport(input: AirportInput!): Airport - updateNodeAirport(input: AirportInput!): Airport - deleteNodeAirport(_id: ID!): Boolean - connectNodeContinentToNodeAirportEdgeContains(from_id: ID!, to_id: ID!): Contains - deleteEdgeContainsFromContinentToAirport(from_id: ID!, to_id: ID!): Boolean - connectNodeCountryToNodeAirportEdgeContains(from_id: ID!, to_id: ID!): Contains - deleteEdgeContainsFromCountryToAirport(from_id: ID!, to_id: ID!): Boolean - connectNodeAirportToNodeAirportEdgeRoute(from_id: ID!, to_id: ID!, edge: RouteInput!): Route - updateEdgeRouteFromAirportToAirport(from_id: ID!, to_id: ID!, edge: RouteInput!): Route - deleteEdgeRouteFromAirportToAirport(from_id: ID!, to_id: ID!): Boolean -} - -schema { - query: Query - mutation: Mutation -} \ No newline at end of file diff --git a/test/TestCases/Case06/input/airports.source.schema.graphql b/test/TestCases/Case06/input/airports.source.schema.graphql deleted file mode 100644 index 35e3768..0000000 --- a/test/TestCases/Case06/input/airports.source.schema.graphql +++ /dev/null @@ -1,153 +0,0 @@ -type Continent @alias(property: "continent") { - id: ID! @id - code: String - type: String - desc: String - airportContainssOut(filter: AirportInput, options: Options): [Airport] @relationship(edgeType: "contains", direction: OUT) - contains: Contains -} - -input ContinentInput { - id: ID @id - code: String - type: String - desc: String -} - -type Country @alias(property: "country") { - _id: ID! @id - code: String - type: String - desc: String - airportContainssOut(filter: AirportInput, options: Options): [Airport] @relationship(edgeType: "contains", direction: OUT) - contains: Contains -} - -input CountryInput { - _id: ID @id - code: String - type: String - desc: String -} - -type Version @alias(property: "version") { - _id: ID! @id - date: String - code: String - author: String - type: String - desc: String -} - -input VersionInput { - _id: ID @id - date: String - code: String - author: String - type: String - desc: String -} - -type Airport @alias(property: "airport") { - _id: ID! @id - country: String - longest: Float - code: String - city: String - elev: Float - icao: String - lon: Float - runways: Float - region: String - type: String - lat: Float - desc2: String @alias(property: "desc") - outboundRoutesCount: Int @graphQuery(statement: "MATCH (this)-[r:route]->(a) RETURN count(r)") - continentContainsIn: Continent @relationship(edgeType: "contains", direction: IN) - countryContainsIn: Country @relationship(edgeType: "contains", direction: IN) - airportRoutesOut(filter: AirportInput, options: Options): [Airport] @relationship(edgeType: "route", direction: OUT) - airportRoutesIn(filter: AirportInput, options: Options): [Airport] @relationship(edgeType: "route", direction: IN) - contains: Contains - route: Route -} - -input AirportInput { - _id: ID @id - country: String - longest: Float - code: String - city: String - elev: Float - icao: String - lon: Float - runways: Float - region: String - type: String - lat: Float - desc: String -} - -type Contains @alias(property: "contains") { - _id: ID! @id -} - -type Route @alias(property: "route") { - _id: ID! @id - dist: Int -} - -input RouteInput { - dist: Int -} - -input Options { - limit: Int -} - -type Query { - getAirport(code: String): Airport - getAirportConnection(fromCode: String!, toCode: String!): Airport @cypher(statement: "MATCH (:airport{code: '$fromCode'})-[:route]->(this:airport)-[:route]->(:airport{code:'$toCode'})") - getAirportWithGremlin(code:String): Airport @graphQuery(statement: "g.V().has('airport', 'code', '$code').elementMap()") - getContinentsWithGremlin: [Continent] @graphQuery(statement: "g.V().hasLabel('continent').elementMap().fold()") - getCountriesCountGremlin: Int @graphQuery(statement: "g.V().hasLabel('country').count()") - - getNodeContinent(filter: ContinentInput): Continent - getNodeContinents(filter: ContinentInput, options: Options): [Continent] - getNodeCountry(filter: CountryInput): Country - getNodeCountrys(filter: CountryInput, options: Options): [Country] - getNodeVersion(filter: VersionInput): Version - getNodeVersions(filter: VersionInput, options: Options): [Version] - getNodeAirport(filter: AirportInput): Airport - getNodeAirports(filter: AirportInput, options: Options): [Airport] -} - -type Mutation { - createAirport(input: AirportInput!): Airport @graphQuery(statement: "CREATE (this:airport {$input}) RETURN this") - addRoute(fromAirportCode:String, toAirportCode:String, dist:Int): Route @graphQuery(statement: "MATCH (from:airport{code:'$fromAirportCode'}), (to:airport{code:'$toAirportCode'}) CREATE (from)-[this:route{dist:$dist}]->(to) RETURN this") - deleteAirport(id: ID): Int @graphQuery(statement: "MATCH (this:airport) WHERE ID(this) = '$id' DETACH DELETE this") - - createNodeContinent(input: ContinentInput!): Continent - updateNodeContinent(input: ContinentInput!): Continent - deleteNodeContinent(_id: ID!): Boolean - createNodeCountry(input: CountryInput!): Country - updateNodeCountry(input: CountryInput!): Country - deleteNodeCountry(_id: ID!): Boolean - createNodeVersion(input: VersionInput!): Version - updateNodeVersion(input: VersionInput!): Version - deleteNodeVersion(_id: ID!): Boolean - createNodeAirport(input: AirportInput!): Airport - updateNodeAirport(input: AirportInput!): Airport - deleteNodeAirport(_id: ID!): Boolean - connectNodeContinentToNodeAirportEdgeContains(from_id: ID!, to_id: ID!): Contains - deleteEdgeContainsFromContinentToAirport(from_id: ID!, to_id: ID!): Boolean - connectNodeCountryToNodeAirportEdgeContains(from_id: ID!, to_id: ID!): Contains - deleteEdgeContainsFromCountryToAirport(from_id: ID!, to_id: ID!): Boolean - connectNodeAirportToNodeAirportEdgeRoute(from_id: ID!, to_id: ID!, edge: RouteInput!): Route - updateEdgeRouteFromAirportToAirport(from_id: ID!, to_id: ID!, edge: RouteInput!): Route - deleteEdgeRouteFromAirportToAirport(from_id: ID!, to_id: ID!): Boolean -} - -schema { - query: Query - mutation: Mutation -} \ No newline at end of file