-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdataset.js
executable file
·152 lines (133 loc) · 4.52 KB
/
dataset.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
#!/usr/bin/env node
require('dotenv').config();
const fs = require('fs');
const path = require('path');
const bcrypt = require('bcryptjs');
const {
GraphQLProcessor,
JobProcessor
} = require('@restorecommerce/gql-bot');
const { program } = require('commander');
const DB_IMPORT_CONFIG_NAME = process.env.DB_IMPORT_CONFIG_NAME ?? '.config.json';
const CONFIG = JSON.parse(fs.readFileSync(DB_IMPORT_CONFIG_NAME).toString())?.db_import;
async function commandDataImport(cmd) {
CONFIG ?? exitWithError('error: invalid or missing config');
const dataset = cmd.dataset
?? exitWithError('error: please select data set');
const accessToken = cmd.token
?? exitWithError('error: please provide an access token');
const jobs = (cmd.job?.length > 0 ? cmd.job : undefined)
?? exitWithError('error: please provide a job');
jobs.forEach(job => {
const filePath = getFullJobPath(dataset, job);
try {
fs.statSync(filePath);
} catch (e) {
exitWithError(`error: job '${job}' does not exist at path: ${filePath}`);
}
});
CONFIG.headers = Object.assign(CONFIG.headers ?? {}, { 'Authorization': `Bearer ${accessToken}` });
CONFIG.entry = cmd.url ?? CONFIG.entry
const gqlProcessor = new GraphQLProcessor(CONFIG);
/* eslint no-restricted-syntax: ["error", "FunctionExpression",
"WithStatement", "BinaryExpression[operator='in']"] */
for (const jobName of jobs) {
const job = JSON.parse(fs.readFileSync(getFullJobPath(dataset, jobName), 'utf8'));
job.options.processor = gqlProcessor;
const jobProcessor = new JobProcessor(job);
const jobResult = await jobProcessor.start(null, null, !!cmd.verbose, !!cmd.ignore);
jobResult.on('progress', (task) => {
console.log('Progress:', task.basename);
});
jobResult.on('done', () => {
console.log('Import successfully');
});
jobResult.on('error', (err) => {
console.error('Error:', err, JSON.stringify(err ?? '', null, 2));
});
}
}
function commandListJobs(cmd) {
const dataset = cmd.dataset ?? exitWithError('Error: please select data set');
const files = fs.readdirSync(path.join(CONFIG['data_directory'], dataset, CONFIG['job_directory']));
const prefix = CONFIG['job_prefix'];
files.forEach(file => {
if (file.startsWith(prefix) && file.endsWith('.json')) {
console.log(file.substring(prefix.length, file.length - 5));
}
});
}
function commandListDatasets(cmd) {
const dirs = fs.readdirSync(CONFIG['data_directory']);
dirs.forEach(dir => {
console.log(path.basename(dir));
});
}
function commandHashPassword(pw) {
const salt = bcrypt.genSaltSync(10);
const hash = bcrypt.hashSync(pw, salt);
console.log("Hash Password:", pw, "->", hash);
}
function commandValidateHash(pw, hash) {
console.log("Input:", pw, hash);
console.log("Validate Password:", bcrypt.compareSync(pw, hash));
}
async function importData() {
program
.command('import')
.description('import data')
.option('-d, --dataset <dataset>', 'select dataset domain')
.option(
'-j, --job <job...>',
'list of jobs to process',
process.env.DB_IMPORT_JOBS?.split(' ') ?? CONFIG?.jobs ?? []
)
.option(
'-u, --url <entry>',
'url to endpoint point',
process.env.DB_IMPORT_ENTRY ?? CONFIG?.entry
)
.option(
'-t, --token <access_token>',
'access token to use for communications',
process.env.ACCESS_TOKEN ?? CONFIG?.access_token
)
.option('-i, --ignore', 'ignore errors and don\'t stop', false)
.option('-v, --verbose', 'verbose output', false)
.action(commandDataImport);
program
.command('jobs')
.description('list all available jobs')
.option('-d, --dataset <dataset>', 'select dataset domain')
.action(commandListJobs);
program
.command('list')
.description('list all available datasets')
.action(commandListDatasets);
program
.command('hash')
.description('hash a password')
.argument('<pw>', 'the password to be hashed')
.action(commandHashPassword);
program
.command('validate')
.description('validate a password')
.argument('<pw>', 'password')
.argument('<hash>', 'hash')
.action(commandValidateHash);
await program.parseAsync(process.argv);
}
function exitWithError(message) {
console.error(message, '\n');
console.log(program.helpInformation());
process.exit(1);
}
function getFullJobPath(dataset, job) {
return path.resolve(path.join(
CONFIG?.data_directory,
dataset,
CONFIG?.job_directory,
CONFIG?.job_prefix + job + '.json'
));
}
importData();