Initial commit
This commit is contained in:
72
themes/keepit/node_modules/@algolia/cli/commands/AddRules.js
generated
vendored
Normal file
72
themes/keepit/node_modules/@algolia/cli/commands/AddRules.js
generated
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
const fs = require('fs');
|
||||
const algolia = require('algoliasearch');
|
||||
const Base = require('./Base.js');
|
||||
|
||||
class AddRulesScript extends Base {
|
||||
constructor() {
|
||||
super();
|
||||
// Bind class methods
|
||||
this.getSource = this.getSource.bind(this);
|
||||
this.parseBatchRulesOptions = this.parseBatchRulesOptions.bind(this);
|
||||
this.start = this.start.bind(this);
|
||||
// Define validation constants
|
||||
this.message =
|
||||
'\nExample: $ algolia addrules -a algoliaappid -k algoliaapikey -n algoliaindexname -s sourcefilepath -p batchRulesParams\n\n';
|
||||
this.params = [
|
||||
'algoliaappid',
|
||||
'algoliaapikey',
|
||||
'algoliaindexname',
|
||||
'sourcefilepath',
|
||||
];
|
||||
}
|
||||
|
||||
getSource(path) {
|
||||
const filepath = this.normalizePath(path);
|
||||
if (!fs.lstatSync(filepath).isFile())
|
||||
throw new Error('Source filepath must target valid rules file.');
|
||||
return filepath;
|
||||
}
|
||||
|
||||
parseBatchRulesOptions(params) {
|
||||
try {
|
||||
const options = { forwardToReplicas: false, clearExistingRules: false };
|
||||
if (params === null) return options;
|
||||
else return JSON.parse(params);
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
async start(program) {
|
||||
try {
|
||||
// Validate command; if invalid display help text and exit
|
||||
this.validate(program, this.message, this.params);
|
||||
|
||||
// Config params
|
||||
const appId = program.algoliaappid;
|
||||
const apiKey = program.algoliaapikey;
|
||||
const indexName = program.algoliaindexname;
|
||||
const sourcefilepath = program.sourcefilepath;
|
||||
const params = program.params || null;
|
||||
|
||||
// Get rules
|
||||
const rulesPath = this.getSource(sourcefilepath);
|
||||
const rulesFile = await fs.readFileSync(rulesPath);
|
||||
const rules = JSON.parse(rulesFile);
|
||||
// Get options
|
||||
const batchRulesOptions = this.parseBatchRulesOptions(params);
|
||||
|
||||
// Instantiate Algolia index
|
||||
const client = algolia(appId, apiKey);
|
||||
const index = client.initIndex(indexName);
|
||||
// Add rules
|
||||
const result = await index.batchRules(rules, batchRulesOptions);
|
||||
return console.log(result);
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const addRulesScript = new AddRulesScript();
|
||||
module.exports = addRulesScript;
|
||||
89
themes/keepit/node_modules/@algolia/cli/commands/AddSynonyms.js
generated
vendored
Normal file
89
themes/keepit/node_modules/@algolia/cli/commands/AddSynonyms.js
generated
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
const fs = require('fs');
|
||||
const algolia = require('algoliasearch');
|
||||
const Base = require('./Base.js');
|
||||
|
||||
class AddSynonymsScript extends Base {
|
||||
constructor() {
|
||||
super();
|
||||
// Bind class methods
|
||||
this.getSource = this.getSource.bind(this);
|
||||
this.parseBatchSynonymsOptions = this.parseBatchSynonymsOptions.bind(this);
|
||||
this.convertCsvToJson = this.convertCsvToJson.bind(this);
|
||||
this.start = this.start.bind(this);
|
||||
// Define validation constants
|
||||
this.message =
|
||||
'\nExample: $ algolia addsynonyms -a algoliaappid -k algoliaapikey -n algoliaindexname -s sourcefilepath -p batchSynonymsParams\n\n';
|
||||
this.params = [
|
||||
'algoliaappid',
|
||||
'algoliaapikey',
|
||||
'algoliaindexname',
|
||||
'sourcefilepath',
|
||||
];
|
||||
}
|
||||
|
||||
getSource(path) {
|
||||
const filepath = this.normalizePath(path);
|
||||
if (!fs.lstatSync(filepath).isFile())
|
||||
throw new Error('Source filepath must target valid synonyms file.');
|
||||
return filepath;
|
||||
}
|
||||
|
||||
parseBatchSynonymsOptions(params) {
|
||||
try {
|
||||
const options = {
|
||||
forwardToReplicas: false,
|
||||
clearExistingSynonyms: false,
|
||||
};
|
||||
if (params === null) return options;
|
||||
else return JSON.parse(params);
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
convertCsvToJson(synonymFile, filepath) {
|
||||
const synonyms = synonymFile.toString().split('\n');
|
||||
return synonyms.map((line, num) => ({
|
||||
type: 'synonym',
|
||||
objectID: `${filepath}-${num}`,
|
||||
synonyms: line.split(','),
|
||||
}));
|
||||
}
|
||||
|
||||
async start(program) {
|
||||
try {
|
||||
// Validate command; if invalid display help text and exit
|
||||
this.validate(program, this.message, this.params);
|
||||
|
||||
// Config params
|
||||
const appId = program.algoliaappid;
|
||||
const apiKey = program.algoliaapikey;
|
||||
const indexName = program.algoliaindexname;
|
||||
const sourcefilepath = program.sourcefilepath;
|
||||
const params = program.params || null;
|
||||
const isCsv = sourcefilepath.split('.').pop() === 'csv';
|
||||
|
||||
// Get synonyms
|
||||
const synonymsPath = this.getSource(sourcefilepath);
|
||||
const synonymsFile = await fs.readFileSync(synonymsPath);
|
||||
const synonyms = isCsv
|
||||
? this.convertCsvToJson(synonymsFile, sourcefilepath)
|
||||
: JSON.parse(synonymsFile);
|
||||
|
||||
// Get options
|
||||
const batchSynonymsOptions = this.parseBatchSynonymsOptions(params);
|
||||
|
||||
// Instantiate Algolia index
|
||||
const client = algolia(appId, apiKey);
|
||||
const index = client.initIndex(indexName);
|
||||
// Add rules
|
||||
const result = await index.batchSynonyms(synonyms, batchSynonymsOptions);
|
||||
return console.log(result);
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const addSynonymsScript = new AddSynonymsScript();
|
||||
module.exports = addSynonymsScript;
|
||||
97
themes/keepit/node_modules/@algolia/cli/commands/Base.js
generated
vendored
Normal file
97
themes/keepit/node_modules/@algolia/cli/commands/Base.js
generated
vendored
Normal file
@@ -0,0 +1,97 @@
|
||||
const os = require('os');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const readLine = require('readline');
|
||||
const chalk = require('chalk');
|
||||
const speedTest = require('speedtest-net');
|
||||
|
||||
class Base {
|
||||
constructor() {
|
||||
this.maxHeapMb = process.arch.includes('64') ? 1024 : 512;
|
||||
}
|
||||
|
||||
validate(program, message, params) {
|
||||
let flag = false;
|
||||
let output = message;
|
||||
params.forEach(param => {
|
||||
if (!program[param]) {
|
||||
output += chalk.red(`Must specify ${param}\n`);
|
||||
flag = true;
|
||||
}
|
||||
});
|
||||
if (flag) return program.help(h => h + output);
|
||||
else return { flag, output };
|
||||
}
|
||||
|
||||
writeProgress(message) {
|
||||
readLine.clearLine(process.stdout, 0);
|
||||
readLine.cursorTo(process.stdout, 0);
|
||||
process.stdout.write(message);
|
||||
}
|
||||
|
||||
normalizePath(input) {
|
||||
// Convert path input param to valid system absolute path
|
||||
// Path is absolute, originating from system root
|
||||
if (path.isAbsolute(input)) return input;
|
||||
// Path is relative to user's home directory
|
||||
if (input[0] === '~') return path.join(os.homedir(), input.substr(1));
|
||||
// Path is relative to current directory
|
||||
return path.resolve(process.cwd(), input);
|
||||
}
|
||||
|
||||
setSource(options) {
|
||||
// Set source directory and filenames array
|
||||
// Used to process path inputs that may either be a single file or a directory of files
|
||||
const source = this.normalizePath(options.sourceFilepath);
|
||||
if (fs.lstatSync(source).isDirectory()) {
|
||||
this.directory = source;
|
||||
this.filenames = fs.readdirSync(source);
|
||||
} else if (fs.lstatSync(source).isFile()) {
|
||||
this.directory = path.parse(source).dir;
|
||||
this.filenames = [path.parse(source).base];
|
||||
} else {
|
||||
throw new Error('Invalid sourcefilepath param');
|
||||
}
|
||||
}
|
||||
|
||||
getMemoryUsage() {
|
||||
const used = process.memoryUsage().heapUsed / 1024 / 1024;
|
||||
const usedMb = Math.round(used * 100) / 100;
|
||||
const percentUsed = Math.floor((usedMb / this.maxHeapMb) * 100);
|
||||
return { usedMb, percentUsed };
|
||||
}
|
||||
|
||||
getStringSizeMb(string) {
|
||||
const bytes = Buffer.byteLength(string, 'utf8');
|
||||
const mb = bytes / 1024 / 1024;
|
||||
return Math.ceil(mb);
|
||||
}
|
||||
|
||||
getNetworkSpeed() {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.writeProgress('Estimating network speed...');
|
||||
const test = speedTest({ maxTime: 5000 });
|
||||
let downloadSpeedMb = null;
|
||||
let uploadSpeedMb = null;
|
||||
test.on('error', e => {
|
||||
console.log(chalk.white.bgRed('Speed test error'), chalk.red(e));
|
||||
reject(e);
|
||||
});
|
||||
test.on('downloadspeed', speed => {
|
||||
downloadSpeedMb = ((speed * 125) / 1000).toFixed(2);
|
||||
});
|
||||
test.on('uploadspeed', speed => {
|
||||
uploadSpeedMb = ((speed * 125) / 1000).toFixed(2);
|
||||
});
|
||||
test.on('done', () => {
|
||||
console.log(
|
||||
chalk.blue(`\nDownload: ${downloadSpeedMb} MB/s`),
|
||||
chalk.blue(`\nUpload: ${uploadSpeedMb} MB/s`)
|
||||
);
|
||||
resolve(uploadSpeedMb);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Base;
|
||||
104
themes/keepit/node_modules/@algolia/cli/commands/DeleteIndicesPattern.js
generated
vendored
Normal file
104
themes/keepit/node_modules/@algolia/cli/commands/DeleteIndicesPattern.js
generated
vendored
Normal file
@@ -0,0 +1,104 @@
|
||||
const algolia = require('algoliasearch');
|
||||
const Base = require('./Base.js');
|
||||
|
||||
class DeleteIndicesPatternScript extends Base {
|
||||
constructor() {
|
||||
super();
|
||||
// Define validation constants
|
||||
this.message =
|
||||
"\nUsage: $ algolia deleteindices -a algoliaappid -k algoliaapikey -r 'regexp for filtering' -x\n\n";
|
||||
this.params = ['algoliaappid', 'algoliaapikey', 'regexp', 'dryrun'];
|
||||
}
|
||||
|
||||
removeReplicas({ indices, regexp, dryRun }) {
|
||||
return Promise.all(
|
||||
indices.map(async ({ name: indexName }) => {
|
||||
const index = await this.client.initIndex(indexName);
|
||||
const indexSettings = await index.getSettings();
|
||||
const replicas = indexSettings.slaves || indexSettings.replicas;
|
||||
if (replicas !== undefined && replicas.length > 0) {
|
||||
const newReplicas = replicas.filter(
|
||||
replicaIndexName => regexp.test(replicaIndexName) === false
|
||||
);
|
||||
|
||||
if (replicas.length !== newReplicas.length) {
|
||||
if (dryRun === false) {
|
||||
const { taskID } = await index.setSettings({
|
||||
[indexSettings.slaves !== undefined
|
||||
? 'slaves'
|
||||
: 'replicas']: newReplicas,
|
||||
});
|
||||
await index.waitTask(taskID);
|
||||
} else {
|
||||
console.log(
|
||||
`[DRY RUN] Replicas change on index ${indexName}, \n- before: ${replicas.join(
|
||||
','
|
||||
)}\n- after: ${newReplicas.join(',')}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
deleteIndices({ indices, regexp, dryRun }) {
|
||||
let deletedIndices = 0;
|
||||
return Promise.all(
|
||||
indices
|
||||
.filter(({ name: indexName }) => regexp.test(indexName) === true)
|
||||
.map(async ({ name: indexName }) => {
|
||||
deletedIndices++;
|
||||
|
||||
if (dryRun === false) {
|
||||
this.writeProgress(`Deleted indices: ${deletedIndices}`);
|
||||
const index = this.client.initIndex(indexName);
|
||||
const { taskID } = await this.client.deleteIndex(indexName);
|
||||
return index.waitTask(taskID);
|
||||
} else {
|
||||
console.log(`[DRY RUN] Delete index ${indexName}`);
|
||||
return false;
|
||||
}
|
||||
})
|
||||
).then(() => {
|
||||
console.log('');
|
||||
if (dryRun === false) {
|
||||
console.log(`${deletedIndices} indices deleted`);
|
||||
} else {
|
||||
console.log(`[DRY RUN] ${deletedIndices} indices deleted`);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async deleteIndicesPattern(options) {
|
||||
this.client = algolia(options.appId, options.apiKey);
|
||||
const { items: indices } = await this.client.listIndexes();
|
||||
const regexp = new RegExp(options.regexp);
|
||||
await this.removeReplicas({ indices, regexp, dryRun: options.dryRun });
|
||||
await this.deleteIndices({ indices, regexp, dryRun: options.dryRun });
|
||||
}
|
||||
|
||||
start(program) {
|
||||
try {
|
||||
// Validate command; if invalid display help text and exit
|
||||
this.validate(program, this.message, this.params);
|
||||
|
||||
// Config params
|
||||
const options = {
|
||||
appId: program.algoliaappid,
|
||||
apiKey: program.algoliaapikey,
|
||||
regexp: program.regexp,
|
||||
dryRun: program.dryrun !== undefined ? program.dryrun === 'true' : true,
|
||||
};
|
||||
|
||||
// Delete indices
|
||||
return this.deleteIndicesPattern(options);
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new DeleteIndicesPatternScript();
|
||||
117
themes/keepit/node_modules/@algolia/cli/commands/Export.js
generated
vendored
Normal file
117
themes/keepit/node_modules/@algolia/cli/commands/Export.js
generated
vendored
Normal file
@@ -0,0 +1,117 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const algolia = require('algoliasearch');
|
||||
const Base = require('./Base.js');
|
||||
|
||||
class ExportScript extends Base {
|
||||
constructor() {
|
||||
super();
|
||||
// Bind class methods
|
||||
this.getOutput = this.getOutput.bind(this);
|
||||
this.parseParams = this.parseParams.bind(this);
|
||||
this.writeFile = this.writeFile.bind(this);
|
||||
this.exportData = this.exportData.bind(this);
|
||||
this.start = this.start.bind(this);
|
||||
// Define validation constants
|
||||
this.message =
|
||||
'\nExample: $ algolia export -a algoliaappid -k algoliaapikey -n algoliaindexname -o outputpath -p params\n\n';
|
||||
this.params = ['algoliaappid', 'algoliaapikey', 'algoliaindexname'];
|
||||
}
|
||||
|
||||
getOutput(outputPath) {
|
||||
// If no outputPath is provided, use directory from which command was invoked
|
||||
const outputDir =
|
||||
outputPath !== null ? this.normalizePath(outputPath) : process.cwd();
|
||||
// Ensure outputPath is a directory
|
||||
if (!fs.lstatSync(outputDir).isDirectory())
|
||||
throw new Error('Output path must be a directory.');
|
||||
return outputDir;
|
||||
}
|
||||
|
||||
parseParams(params) {
|
||||
try {
|
||||
if (params === null) return { hitsPerPage: 1000 };
|
||||
return JSON.parse(params);
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
writeFile(hits, options, fileCount) {
|
||||
const filename = `algolia-index-${options.indexName}-${fileCount}.json`;
|
||||
const filePath = path.resolve(options.outputPath, filename);
|
||||
fs.writeFileSync(filePath, JSON.stringify(hits));
|
||||
return console.log(`\nDone writing ${filename}`);
|
||||
}
|
||||
|
||||
exportData(options) {
|
||||
return new Promise((resolve, reject) => {
|
||||
// Instantiate Algolia index
|
||||
const client = algolia(options.appId, options.apiKey);
|
||||
const index = client.initIndex(options.indexName);
|
||||
|
||||
// Export index
|
||||
const browse = index.browseAll('', options.params);
|
||||
let hits = [];
|
||||
let hitsCount = 0;
|
||||
let fileCount = 0;
|
||||
|
||||
browse.on('result', result => {
|
||||
// Push 1000 new hits to array
|
||||
hits = hits.concat(result.hits);
|
||||
hitsCount += result.hits.length;
|
||||
this.writeProgress(`Records browsed: ${hitsCount}`);
|
||||
if (hits.length >= 10000) {
|
||||
// Write batch of 10,000 records to file
|
||||
fileCount++;
|
||||
this.writeFile(hits, options, fileCount);
|
||||
// Clear array
|
||||
hits = [];
|
||||
}
|
||||
});
|
||||
|
||||
browse.on('end', () => {
|
||||
if (hits.length > 0) {
|
||||
// Write remaining records to file
|
||||
fileCount++;
|
||||
this.writeFile(hits, options, fileCount);
|
||||
}
|
||||
return resolve(
|
||||
`\nDone exporting index.\nSee your data here: ${options.outputPath}`
|
||||
);
|
||||
});
|
||||
|
||||
browse.on('error', err => reject(err));
|
||||
});
|
||||
}
|
||||
|
||||
async start(program) {
|
||||
try {
|
||||
// Validate command; if invalid display help text and exit
|
||||
this.validate(program, this.message, this.params);
|
||||
|
||||
// Config params
|
||||
const options = {
|
||||
appId: program.algoliaappid,
|
||||
apiKey: program.algoliaapikey,
|
||||
indexName: program.algoliaindexname,
|
||||
outputPath: program.outputpath || null,
|
||||
params: program.params || null,
|
||||
};
|
||||
|
||||
// Configure and validate output path
|
||||
options.outputPath = this.getOutput(options.outputPath);
|
||||
// Configure browseAll params
|
||||
options.params = this.parseParams(options.params);
|
||||
|
||||
// Export data
|
||||
const result = await this.exportData(options);
|
||||
return console.log(result);
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const exportScript = new ExportScript();
|
||||
module.exports = exportScript;
|
||||
61
themes/keepit/node_modules/@algolia/cli/commands/ExportRules.js
generated
vendored
Normal file
61
themes/keepit/node_modules/@algolia/cli/commands/ExportRules.js
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const algolia = require('algoliasearch');
|
||||
const Base = require('./Base.js');
|
||||
|
||||
class ExportRulesScript extends Base {
|
||||
constructor() {
|
||||
super();
|
||||
// Bind class methods
|
||||
this.getOutputPath = this.getOutputPath.bind(this);
|
||||
this.start = this.start.bind(this);
|
||||
// Define validation constants
|
||||
this.message =
|
||||
'\nExample: $ algolia exportrules -a algoliaappid -k algoliaapikey -n algoliaindexname -o outputpath\n\n';
|
||||
this.params = ['algoliaappid', 'algoliaapikey', 'algoliaindexname'];
|
||||
}
|
||||
|
||||
getOutputPath(outputpath, indexName) {
|
||||
const defaultFilename = `${indexName}-rules.json`;
|
||||
const defaultFilepath = path.resolve(process.cwd(), defaultFilename);
|
||||
// Process output filepath
|
||||
const filepath =
|
||||
outputpath !== null ? this.normalizePath(outputpath) : defaultFilepath;
|
||||
// Validate filepath targets valid directory
|
||||
const dir = path.dirname(filepath);
|
||||
if (!fs.lstatSync(dir).isDirectory()) {
|
||||
throw new Error(
|
||||
`Output path must target valid directory. Eg. ${defaultFilepath}`
|
||||
);
|
||||
}
|
||||
return filepath;
|
||||
}
|
||||
|
||||
async start(program) {
|
||||
try {
|
||||
// Validate command; if invalid display help text and exit
|
||||
this.validate(program, this.message, this.params);
|
||||
|
||||
// Config params
|
||||
const appId = program.algoliaappid;
|
||||
const apiKey = program.algoliaapikey;
|
||||
const indexName = program.algoliaindexname;
|
||||
const outputpath = program.outputpath || null;
|
||||
|
||||
const filepath = this.getOutputPath(outputpath, indexName);
|
||||
|
||||
// Instantiate Algolia index
|
||||
const client = algolia(appId, apiKey);
|
||||
const index = client.initIndex(indexName);
|
||||
// Get index settings
|
||||
const rules = await index.exportRules();
|
||||
fs.writeFileSync(filepath, JSON.stringify(rules));
|
||||
return console.log(`Done writing ${filepath}`);
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const exportRulesScript = new ExportRulesScript();
|
||||
module.exports = exportRulesScript;
|
||||
61
themes/keepit/node_modules/@algolia/cli/commands/ExportSynonyms.js
generated
vendored
Normal file
61
themes/keepit/node_modules/@algolia/cli/commands/ExportSynonyms.js
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const algolia = require('algoliasearch');
|
||||
const Base = require('./Base.js');
|
||||
|
||||
class ExportSynonymsScript extends Base {
|
||||
constructor() {
|
||||
super();
|
||||
// Bind class methods
|
||||
this.getOutputPath = this.getOutputPath.bind(this);
|
||||
this.start = this.start.bind(this);
|
||||
// Define validation constants
|
||||
this.message =
|
||||
'\nExample: $ algolia exportsynonyms -a algoliaappid -k algoliaapikey -n algoliaindexname -o outputpath\n\n';
|
||||
this.params = ['algoliaappid', 'algoliaapikey', 'algoliaindexname'];
|
||||
}
|
||||
|
||||
getOutputPath(outputpath, indexName) {
|
||||
const defaultFilename = `${indexName}-synonyms.json`;
|
||||
const defaultFilepath = path.resolve(process.cwd(), defaultFilename);
|
||||
// Process output filepath
|
||||
const filepath =
|
||||
outputpath !== null ? this.normalizePath(outputpath) : defaultFilepath;
|
||||
// Validate filepath targets valid directory
|
||||
const dir = path.dirname(filepath);
|
||||
if (!fs.lstatSync(dir).isDirectory()) {
|
||||
throw new Error(
|
||||
`Output path must target valid directory. Eg. ${defaultFilepath}`
|
||||
);
|
||||
}
|
||||
return filepath;
|
||||
}
|
||||
|
||||
async start(program) {
|
||||
try {
|
||||
// Validate command; if invalid display help text and exit
|
||||
this.validate(program, this.message, this.params);
|
||||
|
||||
// Config params
|
||||
const appId = program.algoliaappid;
|
||||
const apiKey = program.algoliaapikey;
|
||||
const indexName = program.algoliaindexname;
|
||||
const outputpath = program.outputpath || null;
|
||||
|
||||
const filepath = this.getOutputPath(outputpath, indexName);
|
||||
|
||||
// Instantiate Algolia index
|
||||
const client = algolia(appId, apiKey);
|
||||
const index = client.initIndex(indexName);
|
||||
// Get index settings
|
||||
const synonyms = await index.exportSynonyms();
|
||||
fs.writeFileSync(filepath, JSON.stringify(synonyms));
|
||||
return console.log(`Done writing ${filepath}`);
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const exportSynonymsScript = new ExportSynonymsScript();
|
||||
module.exports = exportSynonymsScript;
|
||||
38
themes/keepit/node_modules/@algolia/cli/commands/GetSettings.js
generated
vendored
Normal file
38
themes/keepit/node_modules/@algolia/cli/commands/GetSettings.js
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
const algolia = require('algoliasearch');
|
||||
const Base = require('./Base.js');
|
||||
|
||||
class GetSettingsScript extends Base {
|
||||
constructor() {
|
||||
super();
|
||||
// Bind class methods
|
||||
this.start = this.start.bind(this);
|
||||
// Define validation constants
|
||||
this.message =
|
||||
'\nExample: $ algolia getsettings -a algoliaappid -k algoliaapikey -n algoliaindexname\n\n';
|
||||
this.params = ['algoliaappid', 'algoliaapikey', 'algoliaindexname'];
|
||||
}
|
||||
|
||||
async start(program) {
|
||||
try {
|
||||
// Validate command; if invalid display help text and exit
|
||||
this.validate(program, this.message, this.params);
|
||||
|
||||
// Config params
|
||||
const appId = program.algoliaappid;
|
||||
const apiKey = program.algoliaapikey;
|
||||
const indexName = program.algoliaindexname;
|
||||
|
||||
// Instantiate Algolia index
|
||||
const client = algolia(appId, apiKey);
|
||||
const index = client.initIndex(indexName);
|
||||
// Get index settings
|
||||
const settings = await index.getSettings();
|
||||
return console.log(JSON.stringify(settings));
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const getSettingsScript = new GetSettingsScript();
|
||||
module.exports = getSettingsScript;
|
||||
379
themes/keepit/node_modules/@algolia/cli/commands/Import.js
generated
vendored
Normal file
379
themes/keepit/node_modules/@algolia/cli/commands/Import.js
generated
vendored
Normal file
@@ -0,0 +1,379 @@
|
||||
const fs = require('fs');
|
||||
const JSONStream = require('JSONStream');
|
||||
const through = require('through');
|
||||
const transform = require('stream-transform');
|
||||
const Batch = require('batch-stream');
|
||||
const async = require('async');
|
||||
const csv = require('csvtojson');
|
||||
const regexParser = require('regex-parser');
|
||||
const chalk = require('chalk');
|
||||
const algolia = require('algoliasearch');
|
||||
const Base = require('./Base.js');
|
||||
|
||||
class ImportScript extends Base {
|
||||
constructor() {
|
||||
super();
|
||||
// Bind class methods
|
||||
this.defaultTransformations = this.defaultTransformations.bind(this);
|
||||
this.suggestions = this.suggestions.bind(this);
|
||||
this.checkMemoryUsage = this.checkMemoryUsage.bind(this);
|
||||
this.handleHighMemoryUsage = this.handleHighMemoryUsage.bind(this);
|
||||
this.handleExtremeMemoryUsage = this.handleExtremeMemoryUsage.bind(this);
|
||||
this.setIndex = this.setIndex.bind(this);
|
||||
this.setTransformations = this.setTransformations.bind(this);
|
||||
this.setCsvOptions = this.setCsvOptions.bind(this);
|
||||
this.conditionallyParseCsv = this.conditionallyParseCsv.bind(this);
|
||||
this.setBatchSize = this.setBatchSize.bind(this);
|
||||
this.estimateBatchSize = this.estimateBatchSize.bind(this);
|
||||
this.updateBatchSize = this.updateBatchSize.bind(this);
|
||||
this.importToAlgolia = this.importToAlgolia.bind(this);
|
||||
this.retryImport = this.retryImport.bind(this);
|
||||
this.indexFiles = this.indexFiles.bind(this);
|
||||
this.start = this.start.bind(this);
|
||||
// Define validation constants
|
||||
this.message =
|
||||
'\nExample: $ algolia import -s sourcefilepath -a algoliaappid -k algoliaapikey -n algoliaindexname -b batchsize -t transformationfilepath -m maxconcurrency -p csvtojsonparams\n\n';
|
||||
this.params = [
|
||||
'sourcefilepath',
|
||||
'algoliaappid',
|
||||
'algoliaapikey',
|
||||
'algoliaindexname',
|
||||
];
|
||||
}
|
||||
|
||||
defaultTransformations(data, cb) {
|
||||
cb(null, data);
|
||||
}
|
||||
|
||||
suggestions() {
|
||||
let output = `\nConsider reducing <batchSize> (currently ${
|
||||
this.batchSize
|
||||
}).`;
|
||||
if (this.maxConcurrency > 1)
|
||||
output += `\nConsider reducing <maxConcurrency> (currently ${
|
||||
this.maxConcurrency
|
||||
}).`;
|
||||
return output;
|
||||
}
|
||||
|
||||
checkMemoryUsage() {
|
||||
// Exit early if high memory usage warning issued too recently
|
||||
if (this.highMemoryUsage) return false;
|
||||
// Get memory usage
|
||||
const { usedMb, percentUsed } = this.getMemoryUsage();
|
||||
// Handle if heap usage exceeds n% of estimated allocation for node process
|
||||
if (percentUsed >= 70) this.handleHighMemoryUsage(percentUsed);
|
||||
if (percentUsed >= 90) this.handleExtremeMemoryUsage(usedMb, percentUsed);
|
||||
return false;
|
||||
}
|
||||
|
||||
handleHighMemoryUsage(percentUsed) {
|
||||
const newBatchSize = Math.floor(this.batchSize / 2);
|
||||
this.updateBatchSize(newBatchSize);
|
||||
this.writeProgress(
|
||||
`High memory usage (${percentUsed}%). Reducing batchSize to ${newBatchSize}`
|
||||
);
|
||||
}
|
||||
|
||||
handleExtremeMemoryUsage(usedMb, percentUsed) {
|
||||
// Issue warning
|
||||
const name = `Warning: High memory usage`;
|
||||
const message = `Memory usage at ${usedMb} MB (${percentUsed}% of heap allocation for this process).`;
|
||||
// Set class instance flag to debounce future warnings
|
||||
this.highMemoryUsage = true;
|
||||
// Output warning
|
||||
console.log(
|
||||
chalk.white.bgRed(`\n${name}`),
|
||||
chalk.red(`\n${message}`),
|
||||
chalk.red(`${this.suggestions()}`)
|
||||
);
|
||||
// Reset flag in 30 seconds
|
||||
setTimeout(() => {
|
||||
this.highMemoryUsage = false;
|
||||
}, 30000);
|
||||
}
|
||||
|
||||
setIndex(options) {
|
||||
// Set Algolia index
|
||||
this.client = algolia(options.appId, options.apiKey);
|
||||
this.index = this.client.initIndex(options.indexName);
|
||||
}
|
||||
|
||||
setTransformations(options) {
|
||||
try {
|
||||
// Set JSON record transformations
|
||||
const transformations = options.transformations
|
||||
? require(this.normalizePath(options.transformations))
|
||||
: null;
|
||||
// Validate transformations function input param
|
||||
const valid = transformations && typeof transformations === 'function';
|
||||
// Assign our transformations function using provided custom transformations file if exists
|
||||
this.formatRecord = valid ? transformations : this.defaultTransformations;
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
setCsvOptions(options) {
|
||||
try {
|
||||
this.csvOptions = options.csvToJsonParams
|
||||
? JSON.parse(options.csvToJsonParams)
|
||||
: null;
|
||||
if (!this.csvOptions) return;
|
||||
const csvToJsonRegexPropertyList = ['includeColumns', 'ignoreColumns'];
|
||||
csvToJsonRegexPropertyList.forEach(prop => {
|
||||
if (this.csvOptions.hasOwnProperty(prop)) {
|
||||
this.csvOptions[prop] = regexParser(this.csvOptions[prop]);
|
||||
}
|
||||
});
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
conditionallyParseCsv(isCsv) {
|
||||
// Return the appropriate writestream for piping depending on filetype
|
||||
return isCsv
|
||||
? csv(this.csvOptions) // Convert from CSV to JSON
|
||||
: through(); // Do nothing
|
||||
}
|
||||
|
||||
async setBatchSize(options) {
|
||||
try {
|
||||
// If user provided batchSize, use and exit early
|
||||
// Otherwise calculate and set optimal batch size
|
||||
if (options.objectsPerBatch !== null) {
|
||||
this.batchSize = options.objectsPerBatch;
|
||||
return;
|
||||
}
|
||||
// Test files to estimate optimal batch size
|
||||
const estimatedBatchSize = await this.estimateBatchSize();
|
||||
// Test network upload speed
|
||||
const uploadSpeedMb = await this.getNetworkSpeed();
|
||||
// Calculate optimal batch size
|
||||
this.writeProgress('Calculating optimal batch size...');
|
||||
let batchSize;
|
||||
// Reconcile batch size with network speed
|
||||
if (uploadSpeedMb >= this.desiredBatchSizeMb)
|
||||
batchSize = Math.floor(estimatedBatchSize);
|
||||
else
|
||||
batchSize = Math.floor(
|
||||
(uploadSpeedMb / this.desiredBatchSizeMb) * estimatedBatchSize
|
||||
);
|
||||
// Ensure minimum batch size is enforced
|
||||
batchSize = Math.max(this.minBatchSize, batchSize);
|
||||
console.log(chalk.blue(`\nOptimal batch size: ${batchSize}`));
|
||||
// Set batch size
|
||||
this.batchSize = batchSize;
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
estimateBatchSize() {
|
||||
// Read file, estimate average record size, estimate batch size
|
||||
// Return estimated batch size divided by maxConcurrency
|
||||
return new Promise((resolve, reject) => {
|
||||
try {
|
||||
const filename = this.filenames[0];
|
||||
const file = `${this.directory}/${filename}`;
|
||||
const isCsv = filename.split('.').pop() === 'csv';
|
||||
const fileStream = fs.createReadStream(file, {
|
||||
autoclose: true,
|
||||
flags: 'r',
|
||||
});
|
||||
this.writeProgress(`Estimating data size...`);
|
||||
const jsonStreamOption = isCsv ? null : '*';
|
||||
fileStream
|
||||
.pipe(this.conditionallyParseCsv(isCsv))
|
||||
.pipe(JSONStream.parse(jsonStreamOption))
|
||||
.pipe(transform(this.formatRecord))
|
||||
.pipe(new Batch({ size: 10000 }))
|
||||
.pipe(
|
||||
through(data => {
|
||||
const count = data.length;
|
||||
const string = JSON.stringify(data);
|
||||
const batchSizeMb = this.getStringSizeMb(string);
|
||||
const avgRecordSizeMb = batchSizeMb / count;
|
||||
const avgRecordSizeKb = Math.ceil(avgRecordSizeMb * 1000);
|
||||
const roughBatchSize = this.desiredBatchSizeMb / avgRecordSizeMb;
|
||||
const estimatedBatchSize = Math.floor(
|
||||
roughBatchSize / this.maxConcurrency
|
||||
);
|
||||
console.log(
|
||||
chalk.blue(`\nAverage record size: ${avgRecordSizeKb} Kb`)
|
||||
);
|
||||
fileStream.destroy();
|
||||
resolve(estimatedBatchSize);
|
||||
})
|
||||
);
|
||||
} catch (e) {
|
||||
reject(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
updateBatchSize(newSize) {
|
||||
this.batchSize = newSize;
|
||||
}
|
||||
|
||||
getBatchStream() {
|
||||
return new Batch({ size: this.batchSize });
|
||||
}
|
||||
|
||||
async importToAlgolia(data) {
|
||||
// Method to index batches of records in Algolia
|
||||
try {
|
||||
await this.index.addObjects(data);
|
||||
this.importCount += data.length;
|
||||
this.writeProgress(`Records indexed: ${this.importCount}`);
|
||||
} catch (e) {
|
||||
let message = e.message;
|
||||
let addendum = e.stack;
|
||||
if (e.name === 'AlgoliaSearchRequestTimeoutError') {
|
||||
message = `You may be attempting to import batches too large for the network connection.`;
|
||||
addendum = this.suggestions();
|
||||
this.retryImport(data);
|
||||
}
|
||||
console.log(
|
||||
chalk.white.bgRed(`\nImport error: ${e.name}`),
|
||||
chalk.red(`\n${message}`),
|
||||
chalk.red(addendum)
|
||||
);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
retryImport(data) {
|
||||
// Algolia import retry strategy
|
||||
try {
|
||||
this.retryCount++;
|
||||
console.log(`\n(${this.retryCount}) Retrying batch...`);
|
||||
const importedBatchCount = Math.floor(this.importCount / this.batchSize);
|
||||
const retryLimit =
|
||||
this.retryCount > 15 && this.retryCount > importedBatchCount / 2;
|
||||
if (retryLimit) {
|
||||
console.log(
|
||||
chalk.white.bgRed(`\nError: Failure to index data`),
|
||||
chalk.red(`\nRetry limit reached.`),
|
||||
chalk.red(this.suggestions())
|
||||
);
|
||||
return;
|
||||
}
|
||||
// Split data in half
|
||||
const middle = Math.floor(data.length / 2);
|
||||
const firstHalf = data.splice(0, middle);
|
||||
// Reduce batchsize
|
||||
if (this.batchSize > middle) this.updateBatchSize(middle);
|
||||
// Push each half of data into import queue
|
||||
this.queue.push([firstHalf]);
|
||||
this.queue.push([data]);
|
||||
} catch (e) {
|
||||
console.error('Retry error:', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
indexFiles(filenames) {
|
||||
// Recursive method that iterates through an array of filenames, opens a read stream for each file
|
||||
// then pipes the read stream through a series of transformations (parse CSV/JSON objects, transform
|
||||
// them, batch them, index them in Algolia) while imposing a queue so that only so many
|
||||
// indexing threads will be run in parallel
|
||||
if (filenames.length <= 0) {
|
||||
console.log('\nDone reading files');
|
||||
return;
|
||||
}
|
||||
// Start new file read stream
|
||||
// Note: filenames is a reference to the mutable class instance variable this.filenames
|
||||
const filename = filenames.pop();
|
||||
const file = `${this.directory}/${filename}`;
|
||||
const isCsv = filename.split('.').pop() === 'csv';
|
||||
const fileStream = fs.createReadStream(file, {
|
||||
autoclose: true,
|
||||
flags: 'r',
|
||||
});
|
||||
|
||||
fileStream.on('data', () => {
|
||||
if (this.queue.length() >= this.maxConcurrency) {
|
||||
// If async upload queue is full, pause reading from file stream
|
||||
fileStream.pause();
|
||||
}
|
||||
});
|
||||
|
||||
fileStream.on('end', () => {
|
||||
// File complete, process next file
|
||||
this.indexFiles(filenames);
|
||||
});
|
||||
|
||||
// Once the async upload queue is drained, resume reading from file stream
|
||||
this.queue.drain = () => {
|
||||
fileStream.resume();
|
||||
};
|
||||
|
||||
// Handle parsing, transforming, batching, and indexing JSON and CSV files
|
||||
console.log(`\nImporting [${filename}]`);
|
||||
const jsonStreamOption = isCsv ? null : '*';
|
||||
fileStream
|
||||
.pipe(this.conditionallyParseCsv(isCsv, filename))
|
||||
.pipe(JSONStream.parse(jsonStreamOption))
|
||||
.pipe(transform(this.formatRecord))
|
||||
.pipe(this.getBatchStream())
|
||||
.pipe(
|
||||
through(data => {
|
||||
this.checkMemoryUsage();
|
||||
this.queue.push([data]);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
async start(program) {
|
||||
// Script reads JSON or CSV file, or directory of such files, optionally applies
|
||||
// transformations, then batches and indexes the data in Algolia.
|
||||
|
||||
// Validate command; if invalid display help text and exit
|
||||
this.validate(program, this.message, this.params);
|
||||
|
||||
// Config params
|
||||
const options = {
|
||||
sourceFilepath: program.sourcefilepath,
|
||||
appId: program.algoliaappid,
|
||||
apiKey: program.algoliaapikey,
|
||||
indexName: program.algoliaindexname,
|
||||
objectsPerBatch: program.batchsize || null,
|
||||
transformations: program.transformationfilepath || null,
|
||||
maxConcurrency: program.maxconcurrency || 2,
|
||||
csvToJsonParams: program.params || null,
|
||||
};
|
||||
// Configure Algolia (this.client, this.index)
|
||||
this.setIndex(options);
|
||||
// Configure source paths (this.directory, this.filenames)
|
||||
this.setSource(options);
|
||||
// Configure transformations (this.formatRecord)
|
||||
this.setTransformations(options);
|
||||
// Configure optional csvtojson params (this.csvOptions)
|
||||
this.setCsvOptions(options);
|
||||
// Configure data upload parameters
|
||||
this.maxConcurrency = options.maxConcurrency;
|
||||
// Theoretically desirable batch size in MB
|
||||
this.desiredBatchSizeMb = 10;
|
||||
// Minimum batch size
|
||||
this.minBatchSize = 100;
|
||||
// Configure number of records to index per batch (this.batchSize, this.batch)
|
||||
await this.setBatchSize(options);
|
||||
// Assign dangerous memory usage flag
|
||||
this.highMemoryUsage = false;
|
||||
// Assign import count
|
||||
this.importCount = 0;
|
||||
// Assign retry count
|
||||
this.retryCount = 0;
|
||||
// Assign async queue
|
||||
this.queue = async.queue(this.importToAlgolia, this.maxConcurrency);
|
||||
|
||||
// Execute import
|
||||
console.log(chalk.bgGreen.white('Starting import...'));
|
||||
return this.indexFiles(this.filenames);
|
||||
}
|
||||
}
|
||||
|
||||
const importScript = new ImportScript();
|
||||
module.exports = importScript;
|
||||
60
themes/keepit/node_modules/@algolia/cli/commands/Interactive.js
generated
vendored
Normal file
60
themes/keepit/node_modules/@algolia/cli/commands/Interactive.js
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
const inquirer = require('inquirer');
|
||||
|
||||
class Interactive {
|
||||
parseCommandNames(commandList, ownName) {
|
||||
const names = commandList.map(command => command._name);
|
||||
// Remove current command name and default command
|
||||
const commandNames = names.filter(name => name !== ownName && name !== '*');
|
||||
return commandNames;
|
||||
}
|
||||
|
||||
getCommandQuestion(commandNames) {
|
||||
return {
|
||||
type: 'list',
|
||||
name: 'commandChoice',
|
||||
message: 'Select the command to run',
|
||||
choices: commandNames,
|
||||
};
|
||||
}
|
||||
|
||||
getArgumentQuestions(validArguments) {
|
||||
return validArguments.map(argument => ({
|
||||
type: argument.description.includes('key') ? 'password' : 'input',
|
||||
name: argument.long.substring(2),
|
||||
message: `${argument.long} | ${argument.description}`,
|
||||
}));
|
||||
}
|
||||
|
||||
async start(program) {
|
||||
try {
|
||||
const commands = require('../commands.js');
|
||||
const ownName = program._name;
|
||||
const commandList = program.parent.commands;
|
||||
// Get list of valid commands
|
||||
const commandNames = this.parseCommandNames(commandList, ownName);
|
||||
const commandQuestion = this.getCommandQuestion(commandNames);
|
||||
// Prompt user to select a command
|
||||
const commandResponse = await inquirer.prompt(commandQuestion);
|
||||
// Prepare subsequent questions
|
||||
const selectedCommand = commandList.find(
|
||||
command => command._name === commandResponse.commandChoice
|
||||
);
|
||||
const validArguments = selectedCommand.options;
|
||||
const argumentQuestions = this.getArgumentQuestions(validArguments);
|
||||
// Prompt user to input command arguments
|
||||
const argumentsResponse = await inquirer.prompt(argumentQuestions);
|
||||
// Pass arguments to program
|
||||
const argumentsList = Object.keys(argumentsResponse);
|
||||
argumentsList.forEach(arg => {
|
||||
if (argumentsResponse[arg] !== '')
|
||||
program[arg] = argumentsResponse[arg]; // eslint-disable-line no-param-reassign
|
||||
});
|
||||
// Execute selected command
|
||||
commands[selectedCommand._name].start(program);
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new Interactive();
|
||||
68
themes/keepit/node_modules/@algolia/cli/commands/Search.js
generated
vendored
Normal file
68
themes/keepit/node_modules/@algolia/cli/commands/Search.js
generated
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const algolia = require('algoliasearch');
|
||||
const Base = require('./Base.js');
|
||||
|
||||
class SearchScript extends Base {
|
||||
constructor() {
|
||||
super();
|
||||
// Bind class methods
|
||||
this.start = this.start.bind(this);
|
||||
this.parseSearchOptions = this.parseSearchOptions.bind(this);
|
||||
this.writeOutput = this.writeOutput.bind(this);
|
||||
// Define validation constants
|
||||
this.message =
|
||||
'\nExample: $ algolia search -a algoliaappid -k algoliaapikey -n algoliaindexname -q query -p searchparams -o outputpath\n\n';
|
||||
this.params = ['algoliaappid', 'algoliaapikey', 'algoliaindexname'];
|
||||
}
|
||||
|
||||
parseSearchOptions(params) {
|
||||
return params === null ? {} : JSON.parse(params);
|
||||
}
|
||||
|
||||
async writeOutput(outputFilepath, content) {
|
||||
const defaultFilepath = path.resolve(process.cwd(), 'search-results.json');
|
||||
const filepath = this.normalizePath(outputFilepath);
|
||||
const dir = path.dirname(filepath);
|
||||
if (!fs.lstatSync(dir).isDirectory()) {
|
||||
throw new Error(
|
||||
`Output path must target valid directory. Eg. ${defaultFilepath}`
|
||||
);
|
||||
} else {
|
||||
await fs.writeFileSync(filepath, content);
|
||||
}
|
||||
}
|
||||
|
||||
async start(program) {
|
||||
try {
|
||||
// Validate command; if invalid display help text and exit
|
||||
this.validate(program, this.message, this.params);
|
||||
|
||||
// Config params
|
||||
const appId = program.algoliaappid;
|
||||
const apiKey = program.algoliaapikey;
|
||||
const indexName = program.algoliaindexname;
|
||||
const query = program.query || '';
|
||||
const params = program.params || null;
|
||||
const outputPath = program.outputpath || null;
|
||||
|
||||
// Get options
|
||||
const options = this.parseSearchOptions(params);
|
||||
|
||||
// Instantiate Algolia index
|
||||
const client = algolia(appId, apiKey);
|
||||
const index = client.initIndex(indexName);
|
||||
// Get index settings
|
||||
const result = await index.search(query, options);
|
||||
const output = JSON.stringify(result);
|
||||
return outputPath === null
|
||||
? console.log(output)
|
||||
: await this.writeOutput(outputPath, output);
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const searchScript = new SearchScript();
|
||||
module.exports = searchScript;
|
||||
72
themes/keepit/node_modules/@algolia/cli/commands/SetSettings.js
generated
vendored
Normal file
72
themes/keepit/node_modules/@algolia/cli/commands/SetSettings.js
generated
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
const fs = require('fs');
|
||||
const algolia = require('algoliasearch');
|
||||
const Base = require('./Base.js');
|
||||
|
||||
class SetSettingsScript extends Base {
|
||||
constructor() {
|
||||
super();
|
||||
// Bind class methods
|
||||
this.getSource = this.getSource.bind(this);
|
||||
this.parseSetSettingsOptions = this.parseSetSettingsOptions.bind(this);
|
||||
this.start = this.start.bind(this);
|
||||
// Define validation constants
|
||||
this.message =
|
||||
'\nExample: $ algolia setsettings -a algoliaappid -k algoliaapikey -n algoliaindexname -s sourcefilepath -p setsettingsparams\n\n';
|
||||
this.params = [
|
||||
'algoliaappid',
|
||||
'algoliaapikey',
|
||||
'algoliaindexname',
|
||||
'sourcefilepath',
|
||||
];
|
||||
}
|
||||
|
||||
getSource(path) {
|
||||
const filepath = this.normalizePath(path);
|
||||
if (!fs.lstatSync(filepath).isFile())
|
||||
throw new Error('Source filepath must target valid settings file.');
|
||||
return filepath;
|
||||
}
|
||||
|
||||
parseSetSettingsOptions(params) {
|
||||
try {
|
||||
const options = { forwardToReplicas: false };
|
||||
if (params === null) return options;
|
||||
else return JSON.parse(params);
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
async start(program) {
|
||||
try {
|
||||
// Validate command; if invalid display help text and exit
|
||||
this.validate(program, this.message, this.params);
|
||||
|
||||
// Config params
|
||||
const appId = program.algoliaappid;
|
||||
const apiKey = program.algoliaapikey;
|
||||
const indexName = program.algoliaindexname;
|
||||
const sourceFilepath = program.sourcefilepath;
|
||||
const params = program.params || null;
|
||||
|
||||
// Get index settings
|
||||
const settingsPath = this.getSource(sourceFilepath);
|
||||
const settingsFile = await fs.readFileSync(settingsPath);
|
||||
const settings = JSON.parse(settingsFile);
|
||||
// Get options
|
||||
const settingsOptions = this.parseSetSettingsOptions(params);
|
||||
|
||||
// Instantiate Algolia index
|
||||
const client = algolia(appId, apiKey);
|
||||
const index = client.initIndex(indexName);
|
||||
// Set index settings
|
||||
const result = await index.setSettings(settings, settingsOptions);
|
||||
return console.log(result);
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const setSettingsScript = new SetSettingsScript();
|
||||
module.exports = setSettingsScript;
|
||||
126
themes/keepit/node_modules/@algolia/cli/commands/TransferIndex.js
generated
vendored
Normal file
126
themes/keepit/node_modules/@algolia/cli/commands/TransferIndex.js
generated
vendored
Normal file
@@ -0,0 +1,126 @@
|
||||
const algolia = require('algoliasearch');
|
||||
const Base = require('./Base.js');
|
||||
|
||||
class TransferIndexScript extends Base {
|
||||
constructor() {
|
||||
super();
|
||||
// Bind class methods
|
||||
this.getIndices = this.getIndices.bind(this);
|
||||
this.getTransformations = this.getTransformations.bind(this);
|
||||
this.transferIndexConfig = this.transferIndexConfig.bind(this);
|
||||
this.transferData = this.transferData.bind(this);
|
||||
this.start = this.start.bind(this);
|
||||
// Define validation constants
|
||||
this.message =
|
||||
'\nExample: $ algolia transferindex -a sourcealgoliaappid -k sourcealgoliaapikey -n sourcealgoliaindexname -d destinationalgoliaappid -y destinationalgoliaapikey -i destinationindexname -t transformationfilepath -e true\n\n';
|
||||
this.params = [
|
||||
'sourcealgoliaappid',
|
||||
'sourcealgoliaapikey',
|
||||
'sourcealgoliaindexname',
|
||||
'destinationalgoliaappid',
|
||||
'destinationalgoliaapikey',
|
||||
];
|
||||
}
|
||||
|
||||
getIndices(options) {
|
||||
// Instantiate Algolia indices
|
||||
const sourceClient = algolia(options.sourceAppId, options.sourceApiKey);
|
||||
const sourceIndex = sourceClient.initIndex(options.sourceIndexName);
|
||||
const destinationClient = algolia(
|
||||
options.destinationAppId,
|
||||
options.destinationApiKey
|
||||
);
|
||||
const destinationIndex = destinationClient.initIndex(
|
||||
options.destinationIndexName
|
||||
);
|
||||
|
||||
return { sourceIndex, destinationIndex };
|
||||
}
|
||||
|
||||
getTransformations(options) {
|
||||
// Set JSON record transformations
|
||||
const transformations = options.transformations
|
||||
? require(options.transformations)
|
||||
: null;
|
||||
// Validate transformations function input param
|
||||
const valid = transformations && typeof transformations === 'function';
|
||||
// Return provided transformation function if exists
|
||||
return valid ? transformations : null;
|
||||
}
|
||||
|
||||
async transferIndexConfig(indices, options) {
|
||||
// Transfer settings, synonyms, and query rules
|
||||
const settings = await indices.sourceIndex.getSettings();
|
||||
const synonyms = await indices.sourceIndex.exportSynonyms();
|
||||
const rules = await indices.sourceIndex.exportRules();
|
||||
if (options.excludeReplicas) delete settings.replicas;
|
||||
await indices.destinationIndex.setSettings(settings);
|
||||
await indices.destinationIndex.batchSynonyms(synonyms);
|
||||
await indices.destinationIndex.batchRules(rules);
|
||||
}
|
||||
|
||||
transferData(indices, formatRecord) {
|
||||
return new Promise((resolve, reject) => {
|
||||
// Export index
|
||||
const browse = indices.sourceIndex.browseAll('', {
|
||||
attributesToRetrieve: ['*'],
|
||||
});
|
||||
let hitsCount = 0;
|
||||
// Set browseAll event handlers
|
||||
browse.on('result', async result => {
|
||||
// Push hits to destination index
|
||||
try {
|
||||
const hits = formatRecord
|
||||
? result.hits.map(formatRecord)
|
||||
: result.hits;
|
||||
await indices.destinationIndex.addObjects(hits);
|
||||
hitsCount += result.hits.length;
|
||||
this.writeProgress(`Records transferred: ${hitsCount}`);
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
});
|
||||
browse.on('end', () => resolve('\nDone transferring index.\n'));
|
||||
browse.on('error', err => reject(err));
|
||||
});
|
||||
}
|
||||
|
||||
async start(program) {
|
||||
try {
|
||||
// Validate command; if invalid display help text and exit
|
||||
this.validate(program, this.message, this.params);
|
||||
|
||||
// Config params
|
||||
const options = {
|
||||
sourceAppId: program.sourcealgoliaappid,
|
||||
sourceApiKey: program.sourcealgoliaapikey,
|
||||
sourceIndexName: program.sourcealgoliaindexname,
|
||||
destinationAppId: program.destinationalgoliaappid,
|
||||
destinationApiKey: program.destinationalgoliaapikey,
|
||||
destinationIndexName:
|
||||
program.destinationindexname || program.sourcealgoliaindexname,
|
||||
transformations: program.transformationfilepath || null,
|
||||
excludeReplicas:
|
||||
program.excludereplicas !== undefined
|
||||
? program.excludereplicas === 'true'
|
||||
: false,
|
||||
};
|
||||
|
||||
// Configure Algolia clients/indices
|
||||
const indices = this.getIndices(options);
|
||||
// Configure transformations
|
||||
const formatRecord = this.getTransformations(options);
|
||||
// Transfer index configuration
|
||||
await this.transferIndexConfig(indices, options);
|
||||
// Transfer data
|
||||
const result = await this.transferData(indices, formatRecord);
|
||||
|
||||
return console.log(result);
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const transferIndexScript = new TransferIndexScript();
|
||||
module.exports = transferIndexScript;
|
||||
108
themes/keepit/node_modules/@algolia/cli/commands/TransferIndexConfig.js
generated
vendored
Normal file
108
themes/keepit/node_modules/@algolia/cli/commands/TransferIndexConfig.js
generated
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
const algolia = require('algoliasearch');
|
||||
const Base = require('./Base.js');
|
||||
|
||||
class TransferIndexConfigScript extends Base {
|
||||
constructor() {
|
||||
super();
|
||||
// Bind class methods
|
||||
this.start = this.start.bind(this);
|
||||
this.getIndices = this.getIndices.bind(this);
|
||||
this.getConfigOptions = this.getConfigOptions.bind(this);
|
||||
this.transferIndexConfig = this.transferIndexConfig.bind(this);
|
||||
// Define validation constants
|
||||
this.message =
|
||||
'\nExample: $ algolia transferindexconfig -a sourcealgoliaappid -k sourcealgoliaapikey -n sourcealgoliaindexname -d destinationalgoliaappid -y destinationalgoliaapikey -i destinationindexname -p configParams -e true\n\n';
|
||||
this.params = [
|
||||
'sourcealgoliaappid',
|
||||
'sourcealgoliaapikey',
|
||||
'sourcealgoliaindexname',
|
||||
'destinationalgoliaappid',
|
||||
'destinationalgoliaapikey',
|
||||
];
|
||||
}
|
||||
|
||||
getIndices(options) {
|
||||
// Instantiate Algolia indices
|
||||
const sourceClient = algolia(options.sourceAppId, options.sourceApiKey);
|
||||
const sourceIndex = sourceClient.initIndex(options.sourceIndexName);
|
||||
const destinationClient = algolia(
|
||||
options.destinationAppId,
|
||||
options.destinationApiKey
|
||||
);
|
||||
const destinationIndex = destinationClient.initIndex(
|
||||
options.destinationIndexName
|
||||
);
|
||||
|
||||
return { sourceIndex, destinationIndex };
|
||||
}
|
||||
|
||||
getConfigOptions(options) {
|
||||
// Default config
|
||||
const config = {
|
||||
sOptions: {},
|
||||
rOptions: {},
|
||||
};
|
||||
// No params provided, exit early
|
||||
if (!options.configParams) return config;
|
||||
|
||||
const params = JSON.parse(options.configParams);
|
||||
|
||||
// Set provided batchSynonyms and batchRules options
|
||||
if (params.batchSynonymsParams)
|
||||
config.sOptions = Object.assign({}, params.batchSynonymsParams);
|
||||
if (params.batchRulesParams)
|
||||
config.rOptions = Object.assign({}, params.batchRulesParams);
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
async transferIndexConfig(indices, config, options) {
|
||||
// Transfer settings, synonyms, and query rules
|
||||
const settings = await indices.sourceIndex.getSettings();
|
||||
const synonyms = await indices.sourceIndex.exportSynonyms();
|
||||
const rules = await indices.sourceIndex.exportRules();
|
||||
if (options.excludeReplicas) delete settings.replicas;
|
||||
await indices.destinationIndex.setSettings(settings);
|
||||
await indices.destinationIndex.batchSynonyms(synonyms, config.sOptions);
|
||||
await indices.destinationIndex.batchRules(rules, config.rOptions);
|
||||
}
|
||||
|
||||
async start(program) {
|
||||
try {
|
||||
// Validate command; if invalid display help text and exit
|
||||
this.validate(program, this.message, this.params);
|
||||
|
||||
// Config params
|
||||
const options = {
|
||||
sourceAppId: program.sourcealgoliaappid,
|
||||
sourceApiKey: program.sourcealgoliaapikey,
|
||||
sourceIndexName: program.sourcealgoliaindexname,
|
||||
destinationAppId: program.destinationalgoliaappid,
|
||||
destinationApiKey: program.destinationalgoliaapikey,
|
||||
destinationIndexName:
|
||||
program.destinationindexname || program.sourcealgoliaindexname,
|
||||
configParams: program.params || null,
|
||||
excludeReplicas:
|
||||
program.excludereplicas !== undefined
|
||||
? program.excludereplicas === 'true'
|
||||
: false,
|
||||
};
|
||||
|
||||
// Configure Algolia clients/indices
|
||||
const indices = this.getIndices(options);
|
||||
// Configure batchSynonyms and batchRules options
|
||||
const config = this.getConfigOptions(options);
|
||||
// Transfer index configuration
|
||||
await this.transferIndexConfig(indices, config, options);
|
||||
|
||||
return console.log(
|
||||
'Index settings, synonyms, and query rules transferred successfully.'
|
||||
);
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const transferIndexConfigScript = new TransferIndexConfigScript();
|
||||
module.exports = transferIndexConfigScript;
|
||||
136
themes/keepit/node_modules/@algolia/cli/commands/TransformLines.js
generated
vendored
Normal file
136
themes/keepit/node_modules/@algolia/cli/commands/TransformLines.js
generated
vendored
Normal file
@@ -0,0 +1,136 @@
|
||||
const fs = require('fs');
|
||||
const readLine = require('readline');
|
||||
const Base = require('./Base.js');
|
||||
|
||||
class TransformLinesScript extends Base {
|
||||
constructor() {
|
||||
super();
|
||||
// Bind class methods
|
||||
this.defaultLineTransformation = this.defaultLineTransformation.bind(this);
|
||||
this.setOutput = this.setOutput.bind(this);
|
||||
this.setTransformations = this.setTransformations.bind(this);
|
||||
this.transformFile = this.transformFile.bind(this);
|
||||
this.init = this.init.bind(this);
|
||||
this.start = this.start.bind(this);
|
||||
// Define validation constants
|
||||
this.message =
|
||||
'\nExample: $ algolia transformlines -s sourcefilepath -o outputpath -t transformationfilepath \n\n';
|
||||
this.params = ['sourcefilepath'];
|
||||
}
|
||||
|
||||
defaultLineTransformation(line) {
|
||||
// Default line transformation method
|
||||
/* eslint-disable no-control-regex */
|
||||
const newLine = line.match(/\u001e/, 'i')
|
||||
? line.replace(/\u001e/, ',')
|
||||
: line;
|
||||
return newLine;
|
||||
/* eslint-enable no-control-regex */
|
||||
}
|
||||
|
||||
setOutput(outputPath) {
|
||||
this.outputDir =
|
||||
outputPath !== null ? this.normalizePath(outputPath) : process.cwd();
|
||||
|
||||
// Ensure outputpath is a directory
|
||||
if (!fs.lstatSync(this.outputDir).isDirectory())
|
||||
throw new Error('Output path must be a directory.');
|
||||
}
|
||||
|
||||
setTransformations(transformationFilepath) {
|
||||
try {
|
||||
// Set JSON record transformations
|
||||
const transformations = transformationFilepath
|
||||
? require(this.normalizePath(transformationFilepath))
|
||||
: null;
|
||||
// Validate transformations function input param
|
||||
const valid = transformations && typeof transformations === 'function';
|
||||
// Assign our transformations function using provided custom transformations file if exists
|
||||
this.lineTransformation = valid
|
||||
? transformations
|
||||
: this.defaultLineTransformation;
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
// Method to transform an individual file line-by-line
|
||||
transformFile(filename) {
|
||||
return new Promise((resolve, reject) => {
|
||||
try {
|
||||
const writeStream = fs.createWriteStream(
|
||||
`${this.outputDir}/${filename}`
|
||||
);
|
||||
let count = 0;
|
||||
|
||||
if (this.transformationFilepath === null) {
|
||||
writeStream.write('['); // Comment this out to prevent injecting opening bracket at start of new output file
|
||||
}
|
||||
|
||||
const lineReader = readLine.createInterface({
|
||||
input: fs.createReadStream(`${this.directory}/${filename}`),
|
||||
});
|
||||
|
||||
lineReader.on('line', line => {
|
||||
count++;
|
||||
const newLine = this.lineTransformation(line);
|
||||
this.writeProgress(`Line ${count}...`);
|
||||
writeStream.write(newLine);
|
||||
});
|
||||
|
||||
lineReader.on('close', () => {
|
||||
console.log('Done writing!');
|
||||
if (this.transformationFilepath === null) {
|
||||
writeStream.write(']'); // Comment this out to prevent injecting closing bracket at end of new output file
|
||||
}
|
||||
writeStream.end();
|
||||
resolve(true);
|
||||
});
|
||||
} catch (e) {
|
||||
reject(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Start script
|
||||
async init(filenames) {
|
||||
for (const filename of filenames) {
|
||||
try {
|
||||
console.log(`Reading: ${this.directory}/${filename}`);
|
||||
console.log(`Writing to: ${this.outputDir}/${filename}`);
|
||||
await this.transformFile(filename);
|
||||
} catch (e) {
|
||||
console.log(`Error while processing ${filename}`);
|
||||
throw new Error(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
start(program) {
|
||||
// Script reads a file or directory of files synchronously, line-by-line.
|
||||
// Writes each file synchronously, line-by-line, to an output directory
|
||||
// while optionally applying a provided transformation function to each line.
|
||||
|
||||
// Validate command; if invalid display help text and exit
|
||||
this.validate(program, this.message, this.params);
|
||||
|
||||
// Config params
|
||||
this.sourceFilepath = program.sourcefilepath;
|
||||
this.outputpath = program.outputpath || null;
|
||||
this.transformationFilepath = program.transformationfilepath || null;
|
||||
|
||||
// Configure source paths (this.directory, this.filenames)
|
||||
this.setSource({ sourceFilepath: this.sourceFilepath });
|
||||
// Configure output path (this.outputDir)
|
||||
this.setOutput(this.outputpath);
|
||||
// Configure transformations (this.lineTransformation)
|
||||
this.setTransformations(this.transformationFilepath);
|
||||
|
||||
// Execute line transformations
|
||||
this.init(this.filenames);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
const transformLinesScript = new TransformLinesScript();
|
||||
module.exports = transformLinesScript;
|
||||
Reference in New Issue
Block a user