Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Release/v3.31.2 #3764

Merged
merged 20 commits into from
Jan 24, 2025
Merged
Show file tree
Hide file tree
Changes from 13 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions config.defaults.sh
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,9 @@ T_MYSQL_PASSWORD="password"
T_MYSQL_PHPMYADMIN="FALSE"
# Enter "true" if using a mysql container instead of an external database service such as AWS RDS. This will launch a mysql container.
T_USE_MYSQL_CONTAINER=""
# The username and password for the mysql-api.
T_MYSQL_API_AUTH_USER="admin"
T_MYSQL_API_AUTH_PASSWORD="password"

#
# Optional
Expand Down
6 changes: 4 additions & 2 deletions develop.sh
Original file line number Diff line number Diff line change
Expand Up @@ -79,8 +79,8 @@ fi

if echo "$T_USE_MYSQL_CONTAINER" | grep "true"; then
./mysql-start-container.sh
echo "Waiting 60 seconds for mysql container to start..."
sleep 60
echo "Waiting 20 seconds for mysql container to start..."
sleep 20
./mysql-setup.sh
fi

Expand Down Expand Up @@ -250,6 +250,8 @@ OPTIONS="
--env \"T_MYSQL_USER=$T_MYSQL_USER\" \
--env \"T_MYSQL_PASSWORD=$T_MYSQL_PASSWORD\" \
--env \"T_MYSQL_MULTI_PARTICIPANT_SCHEMA=$T_MYSQL_MULTI_PARTICIPANT_SCHEMA\" \
--env \"T_MYSQL_API_AUTH_USER=$T_MYSQL_API_AUTH_USER\" \
--env \"T_MYSQL_API_AUTH_PASSWORD=$T_MYSQL_API_AUTH_PASSWORD\" \
--volume $(pwd)/data/mysql/state:/mysql-module-state:delegated \
--volume $(pwd)/server/src/modules/mysql-js/conf.d:/etc/mysql/conf.d:delegated \
$OPTIONS
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ export class UpdateUserRoleComponent implements OnInit {
try {
await this.groupsService.addUserToGroup(this.groupId, this.username, this.role);
this.errorHandler.handleError(_TRANSLATE('User Added to Group Successfully'));
this.router.navigate([`groups/${this.groupId}`]);
this.router.navigate([`groups/${this.groupId}/configure/security`]);
} catch (error) {
console.log(error);
}
Expand Down
2 changes: 2 additions & 0 deletions server/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -76,8 +76,10 @@
"cors": "^2.8.4",
"couchdb-wedge": "1.0.2",
"crypto": "^1.0.1",
"csv-writer": "^1.6.0",
"exceljs": "^1.2.1",
"express": "^4.16.2",
"express-basic-auth": "^1.2.1",
"express-http-proxy": "^1.1.0",
"express-session": "1.15.6",
"fs-extra": "^4.0.3",
Expand Down
154 changes: 2 additions & 152 deletions server/src/express-app.js
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ const {archiveToDiskConfig, passwordPolicyConfig} = require('./config-utils.js')
const { generateCSV, generateCSVDataSet, generateCSVDataSetsRoute, listCSVDataSets, getDatasetDetail } = require('./routes/group-csv.js');
const allowIfUser1 = require('./middleware/allow-if-user1.js');
const hasUploadToken = require("./middleware/has-upload-token");
const tangerineMySQLApi = require('./mysql-api/index.js');

if (process.env.T_AUTO_COMMIT === 'true') {
setInterval(commitFilesToVersionControl,parseInt(process.env.T_AUTO_COMMIT_FREQUENCY))
Expand Down Expand Up @@ -437,158 +438,7 @@ app.get('/rolesByGroupId/:groupId/role/:role', isAuthenticated, findRoleByName);
app.get('/rolesByGroupId/:groupId/roles', isAuthenticated, getAllRoles);
app.post('/permissions/updateRoleInGroup/:groupId', isAuthenticated, permitOnGroupIfAll(['can_manage_group_roles']), updateRoleInGroup);

// app.use('/api/generateDbDump/:groupId/:deviceId/:syncUsername/:syncPassword', async function(req, res, next){
// const groupId = req.params.groupId;
// const deviceId = req.params.deviceId;
// const syncUsername = req.params.syncUsername;
// const syncPassword = req.params.syncPassword;
// const url = `http://${syncUsername}:${syncPassword}@couchdb:5984/${groupId}`
// const devicesUrl = `http://${syncUsername}:${syncPassword}@couchdb:5984/${groupId}-devices`
// console.log("about to generateDbDump to " + groupId + " deviceId: " + deviceId + " syncUsername: " + syncUsername + " syncPassword: " + syncPassword + " using devicesUrl: " + devicesUrl)
// const groupDevicesDb = await new PouchDB(devicesUrl)
// const device = await groupDevicesDb.get(deviceId)
// const formInfos = await fs.readJson(`/tangerine/client/content/groups/${groupId}/forms.json`)
// let locations;
// if (device.syncLocations.length > 0) {
// locations = device.syncLocations.map(locationConfig => {
// // Get last value, that's the focused sync point.
// let location = locationConfig.value.slice(-1).pop()
// return location
// })
// }
// const pullSelector = {
// "$or": [
// ...formInfos.reduce(($or, formInfo) => {
// if (formInfo.couchdbSyncSettings && formInfo.couchdbSyncSettings.enabled && formInfo.couchdbSyncSettings.pull) {
// $or = [
// ...$or,
// ...device.syncLocations.length > 0 && formInfo.couchdbSyncSettings.filterByLocation
// ? device.syncLocations.map(locationConfig => {
// // Get last value, that's the focused sync point.
// let location = locationConfig.value.slice(-1).pop()
// return {
// "form.id": formInfo.id,
// [`location.${location.level}`]: location.value
// }
// })
// : [
// {
// "form.id": formInfo.id
// }
// ]
// ]
// }
// return $or
// }, []),
// ...device.syncLocations.length > 0
// ? device.syncLocations.map(locationConfig => {
// // Get last value, that's the focused sync point.
// let location = locationConfig.value.slice(-1).pop()
// return {
// "type": "issue",
// [`location.${location.level}`]: location.value,
// "resolveOnAppContext": AppContext.Client
// }
// })
// : [
// {
// "resolveOnAppContext": AppContext.Client,
// "type": "issue"
// }
// ]
// ]
// }
//
// const replicationOpts = {
// "selector": pullSelector
// }
// // stream db to express response
// const db = new PouchDB(url);
//
// let dbDumpFileDir = `/tangerine/groups/${groupId}/client/dbDumpFiles`
//
// for (const location of locations) {
// // locations: [{"location.region":"B7BzlR6h"}]
// const locationIdentifier = `${location.level}_${location.value}`
// let dbDumpFilePath = `${dbDumpFileDir}/${sanitize(locationIdentifier)}-dbDumpFile`
// let metadataFilePath = `${dbDumpFileDir}/${sanitize(locationIdentifier)}-metadata`
// try {
// await fs.ensureDir(dbDumpFileDir)
// } catch (err) {
// console.error(err)
// }
//
// const exists = await fs.pathExists(dbDumpFilePath)
// if (! exists) {
// console.log("dbDumpFilePath not created; generating.")
// const stream = new MemoryStream()
// let dbDumpFileWriteStream = fsc.createWriteStream(dbDumpFilePath)
// let metadataWriteStream = fsc.createWriteStream(metadataFilePath)
// console.log("Now dumping to the writeStream")
// let i = 0
// stream.on('data', function (chunk) {
// // chunks.push(chunk)
// console.log("on dbDumpFileReadStream")
// dbDumpFileWriteStream.write(chunk.toString());
// if (i === 0) {
// try {
// const firstChunk = chunk.toString();
// const ndjObject = JSON.parse(firstChunk)
// console.log("firstChunk: " + firstChunk)
// let payloadDocCount, pullLastSeq
// if (ndjObject) {
// payloadDocCount = ndjObject.db_info?.doc_count;
// pullLastSeq = ndjObject.db_info?.update_seq;
// const responseObject = {
// "payloadDocCount": payloadDocCount,
// "pullLastSeq": pullLastSeq,
// "locationIdentifier": sanitize(locationIdentifier)
// }
// metadataWriteStream.write(JSON.stringify(responseObject));
// }
//
// } catch (e) {
// console.log("firstChunk ERROR: " + e)
// }
// }
// i++
// // writeStream.write(chunk);
// });
// // await db.dump(dbDumpFileWriteStream, replicationOpts).then(async () => {
// await db.dump(stream, replicationOpts).then(async () => {
// console.log('Dump from db complete!')
// console.log('Sleep for 2 seconds')
// await sleep(2000);
// // const dbDumpFileReadStream = fs.createReadStream(dbDumpFilePath)
// metadataWriteStream.end()
// dbDumpFileWriteStream.end()
// }).catch(function(err){
// // res.status(500).send(err);
// console.trace()
// res.send({ statusCode: 500, data: "Error dumping database to file: " + err })
// reject("Error dumping database to file: " + err)
// });
// console.log('dumpedString from db complete!')
// }
// console.log('sending metadata')
// fs.createReadStream(metadataFilePath).pipe(res);
// }
// });

// app.use('/api/getDbDump/:groupId/:locationIdentifier', async function(req, res, next){
// const groupId = req.params.groupId;
// const locationIdentifier = req.params.locationIdentifier;
// let dbDumpFileDir = `/tangerine/groups/${groupId}/client/dbDumpFiles`
// let dbDumpFilePath = `${dbDumpFileDir}/${locationIdentifier}-dbDumpFile`
// const exists = await fs.pathExists(dbDumpFilePath)
// if (exists) {
// console.log("Transferring the dbDumpFile to locationIdentifier: " + locationIdentifier)
// fs.createReadStream(dbDumpFilePath).pipe(res);
// } else {
// res.send({ statusCode: 404, data: "DB dump file not found. "})
// }
// });

app.use('/mysql-api', isAuthenticated, permitOnGroupIfAll(['can_access_mysql_api']), tangerineMySQLApi);

/**
* @function`getDirectories` returns an array of strings of the top level directories found in the path supplied
Expand Down
156 changes: 156 additions & 0 deletions server/src/generate-db-dump.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,156 @@
var express = require('express');
var router = express.Router();

router.use('/api/generateDbDump/:groupId/:deviceId/:syncUsername/:syncPassword', async function(req, res, next){
const groupId = req.params.groupId;
const deviceId = req.params.deviceId;
const syncUsername = req.params.syncUsername;
const syncPassword = req.params.syncPassword;
const url = `http://${syncUsername}:${syncPassword}@couchdb:5984/${groupId}`
const devicesUrl = `http://${syncUsername}:${syncPassword}@couchdb:5984/${groupId}-devices`
console.log("about to generateDbDump to " + groupId + " deviceId: " + deviceId + " syncUsername: " + syncUsername + " syncPassword: " + syncPassword + " using devicesUrl: " + devicesUrl)
Fixed Show fixed Hide fixed
const groupDevicesDb = await new PouchDB(devicesUrl)
const device = await groupDevicesDb.get(deviceId)
const formInfos = await fs.readJson(`/tangerine/client/content/groups/${groupId}/forms.json`)
let locations;
if (device.syncLocations.length > 0) {
locations = device.syncLocations.map(locationConfig => {
// Get last value, that's the focused sync point.
let location = locationConfig.value.slice(-1).pop()
return location
})
}
const pullSelector = {
"$or": [
...formInfos.reduce(($or, formInfo) => {
if (formInfo.couchdbSyncSettings && formInfo.couchdbSyncSettings.enabled && formInfo.couchdbSyncSettings.pull) {
$or = [
...$or,
...device.syncLocations.length > 0 && formInfo.couchdbSyncSettings.filterByLocation
? device.syncLocations.map(locationConfig => {
// Get last value, that's the focused sync point.
let location = locationConfig.value.slice(-1).pop()
return {
"form.id": formInfo.id,
[`location.${location.level}`]: location.value
}
})
: [
{
"form.id": formInfo.id
}
]
]
}
return $or
}, []),
...device.syncLocations.length > 0
? device.syncLocations.map(locationConfig => {
// Get last value, that's the focused sync point.
let location = locationConfig.value.slice(-1).pop()
return {
"type": "issue",
[`location.${location.level}`]: location.value,
"resolveOnAppContext": AppContext.Client
}
})
: [
{
"resolveOnAppContext": AppContext.Client,
"type": "issue"
}
]
]
}

const replicationOpts = {
"selector": pullSelector
}
// stream db to express response
const db = new PouchDB(url);

let dbDumpFileDir = `/tangerine/groups/${groupId}/client/dbDumpFiles`

for (const location of locations) {
// locations: [{"location.region":"B7BzlR6h"}]
const locationIdentifier = `${location.level}_${location.value}`
let dbDumpFilePath = `${dbDumpFileDir}/${sanitize(locationIdentifier)}-dbDumpFile`
let metadataFilePath = `${dbDumpFileDir}/${sanitize(locationIdentifier)}-metadata`
try {
await fs.ensureDir(dbDumpFileDir)
} catch (err) {
console.error(err)
}

const exists = await fs.pathExists(dbDumpFilePath)
if (! exists) {
console.log("dbDumpFilePath not created; generating.")
const stream = new MemoryStream()
let dbDumpFileWriteStream = fsc.createWriteStream(dbDumpFilePath)
let metadataWriteStream = fsc.createWriteStream(metadataFilePath)
console.log("Now dumping to the writeStream")
let i = 0
stream.on('data', function (chunk) {
// chunks.push(chunk)
console.log("on dbDumpFileReadStream")
dbDumpFileWriteStream.write(chunk.toString());
if (i === 0) {
try {
const firstChunk = chunk.toString();
const ndjObject = JSON.parse(firstChunk)
console.log("firstChunk: " + firstChunk)
let payloadDocCount, pullLastSeq
if (ndjObject) {
payloadDocCount = ndjObject.db_info?.doc_count;
pullLastSeq = ndjObject.db_info?.update_seq;
const responseObject = {
"payloadDocCount": payloadDocCount,
"pullLastSeq": pullLastSeq,
"locationIdentifier": sanitize(locationIdentifier)
}
metadataWriteStream.write(JSON.stringify(responseObject));
}

} catch (e) {
console.log("firstChunk ERROR: " + e)
}
}
i++
// writeStream.write(chunk);
});
// await db.dump(dbDumpFileWriteStream, replicationOpts).then(async () => {
await db.dump(stream, replicationOpts).then(async () => {
console.log('Dump from db complete!')
console.log('Sleep for 2 seconds')
await sleep(2000);
// const dbDumpFileReadStream = fs.createReadStream(dbDumpFilePath)
metadataWriteStream.end()
dbDumpFileWriteStream.end()
}).catch(function(err){
// res.status(500).send(err);
console.trace()
res.send({ statusCode: 500, data: "Error dumping database to file: " + err })
reject("Error dumping database to file: " + err)
});
console.log('dumpedString from db complete!')
}
console.log('sending metadata')
fs.createReadStream(metadataFilePath).pipe(res);
}
});

router.use('/api/getDbDump/:groupId/:locationIdentifier', async function(req, res, next){
const groupId = req.params.groupId;
const locationIdentifier = req.params.locationIdentifier;
let dbDumpFileDir = `/tangerine/groups/${groupId}/client/dbDumpFiles`
let dbDumpFilePath = `${dbDumpFileDir}/${locationIdentifier}-dbDumpFile`
const exists = await fs.pathExists(dbDumpFilePath)
if (exists) {
console.log("Transferring the dbDumpFile to locationIdentifier: " + locationIdentifier)
fs.createReadStream(dbDumpFilePath).pipe(res);
} else {
res.send({ statusCode: 404, data: "DB dump file not found. "})
}
});

module.exports = router;
2 changes: 1 addition & 1 deletion server/src/middleware/permitted.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ const permitOnGroupIfAll = (permissions) => {
const isAllowed = groupPermissions => permissions.every(e => groupPermissions.includes(e));
return (req, res, next) => {
try {
const group = req.params.groupId || req.params.groupName || req.params.group;
const group = req.params.groupId || req.params.groupName || req.params.group || req.query.groupId;
const allGroupsPermissions = req.user.groupPermissions;
const myGroupsPermissions = (allGroupsPermissions.find(g => g.groupName === group)).permissions;
if (isAllowed(myGroupsPermissions)) {
Expand Down
Loading
Loading