Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@ jobs:
matrix:
node-version: [20.x, 22.x, 24.x, 25.x]
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- uses: actions/checkout@v6
- uses: actions/setup-node@v6
with:
node-version: ${{ matrix.node-version }}
- run: npm ci || npm install
Expand Down
7 changes: 5 additions & 2 deletions AGENTS.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,14 @@ If the goal is to back up an entire database en masse, this simple Node.js scrip
- `index.js`: The main entry point and CLI wrapper. Uses `commander` to parse arguments and define commands for backing up, restoring, auditing (`list`), and testing restores.
- `lib/cmd.js`: Helper functions for parsing command line arguments and extracting things like Google Cloud project IDs and bucket names.
- `lib/datastore-backup.js`: Core programmatic logic that interfaces with `@google-cloud/datastore` to execute export operations and execute validation commands via `gcloud`.
- `test/`: Contains the test suite files (currently using Mocha + Should). Tests validate configuration parsing and command generation.
- `test/`: Contains the test suite files (Vitest). Tests validate configuration parsing and command generation.
Comment thread
eschultink marked this conversation as resolved.
- `examples/`: Example configuration files like `backup-schedule.json`.

## Technical Details

- **Dependencies**: Uses `@google-cloud/datastore` for interacting with the Google Cloud Datastore API and executes some `gcloud` CLI commands via `child_process.execSync` for testing and restoration outputs.
- **Testing**: Test suite verifies the core configuration logic but does not automatically hit live GCP endpoints unless explicitly configured.
- **Testing**: Vitest (`npm test`) verifies the core configuration logic but does not automatically hit live GCP endpoints unless explicitly configured.
- **CI**: There is a GitHub Actions workflow that runs standard `npm test` checks.


Generally, keep all dependenices up-to-date with latest; pretty simple thing, so update even if potential breaking changes; rely on agents + automated tests to catch issues.
15 changes: 8 additions & 7 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -28,14 +28,15 @@
* @type {commander.CommanderStatic | commander}
*/

const { Command } = require('commander');
const program = new Command();
const child_process = require('child_process');
const fs = require('fs');
const colors = require('colors');
import { Command } from 'commander';
import child_process from 'node:child_process';
import fs from 'node:fs';
import colors from 'colors';

import { getProjectId, getBackupBucket, validateFrequency } from './lib/cmd.js';
import { backup, testRestoreFromBackup, datastoreRestoreCommand, datastoreStatusCommand } from './lib/datastore-backup.js';

const { getProjectId, getBackupBucket, validateFrequency } = require('./lib/cmd');
const { backup, testRestoreFromBackup, datastoreRestoreCommand, datastoreStatusCommand } = require('./lib/datastore-backup');
const program = new Command();

// custom loadJsonFile function:
async function loadJsonFile(path) {
Expand Down
21 changes: 9 additions & 12 deletions lib/cmd.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import 'colors';

const _ = require('lodash');
const child_process = require('child_process');
import _ from 'lodash';
import child_process from 'node:child_process';

/**
* helpers to handle arguments
Expand All @@ -11,27 +12,27 @@ const child_process = require('child_process');
* @param backupSchedule
* @param frequency
*/
const validateFrequency = (backupSchedule, frequency) => {
export const validateFrequency = (backupSchedule, frequency) => {
if (_.isUndefined(backupSchedule[frequency])) {
const msg = 'Frequency value (' + frequency + ') unknown!';
console.log(msg.red);
throw new Error(msg);
}
}
};

/**
* extract projectId from options in program, or determine it from gcloud context
*
* @param program
* @returns {string}
*/
const getProjectId = (program) => {
export const getProjectId = (program) => {
let projectId = program.projectId;
if (!projectId) {
projectId = child_process.execSync('gcloud config get-value project 2> /dev/null').toString('utf8').trim();
}
return projectId;
}
};

/**
* determine backupBucket based on program options, projectId, and frequency
Expand All @@ -41,11 +42,7 @@ const getProjectId = (program) => {
* @param frequency
* @returns {string}
*/
const getBackupBucket = (program, projectId, frequency) => {
export const getBackupBucket = (program, projectId, frequency) => {
let prefix = program.bucketPrefix || `${projectId}_backup`;
return `${prefix}_${frequency}`;
}

module.exports.getProjectId = getProjectId;
module.exports.getBackupBucket = getBackupBucket;
module.exports.validateFrequency = validateFrequency;
};
28 changes: 11 additions & 17 deletions lib/datastore-backup.js
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
import 'colors';


const _ = require('lodash');
const child_process = require('child_process');
require('colors');
import _ from 'lodash';
import child_process from 'node:child_process';
// Imports the Google Cloud client library
// @see https://googleapis.dev/nodejs/datastore/latest/
const {Datastore} = require('@google-cloud/datastore');
import { Datastore } from '@google-cloud/datastore';

/**
* create a backup of kinds from project into bucket; depends only on Datastore API client, rather
Expand All @@ -18,7 +17,7 @@ const {Datastore} = require('@google-cloud/datastore');
* @param options
* @returns {string|*}
*/
const backup = async (kinds, project, bucket, options) => {
export const backup = async (kinds, project, bucket, options) => {

const datastore = new Datastore({projectId : project});

Expand All @@ -44,31 +43,26 @@ const backup = async (kinds, project, bucket, options) => {
* @param options
* @returns {string} output from the commands
*/
const testRestoreFromBackup = (kind, project, bucket, timestamp, options) => {
export const testRestoreFromBackup = (kind, project, bucket, timestamp, options) => {
let restore = child_process.execSync(datastoreRestoreCommand([kind], project, bucket, timestamp, options)).toString('utf8');
let status = child_process.execSync(datastoreStatusCommand(project, options));
return restore + "\n" + status;
};

const datastoreRestoreCommand = (kinds, project, bucket, timestamp, options) => {
export const datastoreRestoreCommand = (kinds, project, bucket, timestamp, options) => {
let authOptions = ' --project ' + project;
if (!_.isUndefined(options.account)) {
authOptions += '--account ' + options.account;
authOptions += ' --account ' + options.account;
}
let backupMetadataFile = 'gs://' + bucket + '/' + timestamp + '/' + timestamp + '.overall_export_metadata'
return 'gcloud datastore import ' + authOptions + ' --kinds="' + kinds.join(',') + '" --async ' + backupMetadataFile ;
};

const datastoreStatusCommand = (project, options) => {
export const datastoreStatusCommand = (project, options) => {
let authOptions = '';
if (options.account) {
authOptions += ' --account \' + options.account';
if (options?.account) {
authOptions += ' --account ' + options.account;
}
authOptions += ' --project ' + project;
return 'gcloud datastore operations' + authOptions + ' list';
};

module.exports.backup = backup;
module.exports.testRestoreFromBackup = testRestoreFromBackup;
module.exports.datastoreRestoreCommand = datastoreRestoreCommand;
module.exports.datastoreStatusCommand = datastoreStatusCommand;
Loading
Loading