Skip to content

Commit

Permalink
feat: getByIds to make second attempts after recreating of Client
Browse files Browse the repository at this point in the history
Debugging/fixing "GCP timeout issue"
  • Loading branch information
kirillgroshkov committed Jan 19, 2022
1 parent 4a1e4b7 commit df9e60e
Show file tree
Hide file tree
Showing 2 changed files with 200 additions and 267 deletions.
48 changes: 30 additions & 18 deletions src/datastore.db.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,14 +17,14 @@ import {
JsonSchemaObject,
JsonSchemaString,
pMap,
pRetry,
_assert,
_chunk,
_omit,
JsonSchemaRootObject,
CommonLogger,
commonLoggerMinLevel,
pTimeout,
pRetryFn,
} from '@naturalcycles/js-lib'
import { ReadableTyped } from '@naturalcycles/nodejs-lib'
import { boldWhite } from '@naturalcycles/nodejs-lib/dist/colors'
Expand Down Expand Up @@ -118,25 +118,31 @@ export class DatastoreDB extends BaseCommonDB implements CommonDB {
const keys = ids.map(id => this.key(table, id))
let rows: any[]

try {
if (this.cfg.timeout) {
if (this.cfg.timeout) {
// First try
try {
const r = await pTimeout(this.ds().get(keys), {
timeout: this.cfg.timeout,
name: `datastore.getByIds(${table})`,
})
rows = r[0]
} else {
rows = (await this.ds().get(keys))[0]
}
} catch (err) {
this.cfg.logger.log('datastore recreated on error')
} catch {
this.cfg.logger.log('datastore recreated on error')

// This is to debug "GCP Datastore Timeout issue"
const datastoreLib = require('@google-cloud/datastore')
const DS = datastoreLib.Datastore as typeof Datastore
this.cachedDatastore = new DS(this.cfg)
// This is to debug "GCP Datastore Timeout issue"
const datastoreLib = require('@google-cloud/datastore')
const DS = datastoreLib.Datastore as typeof Datastore
this.cachedDatastore = new DS(this.cfg)

throw err
// Second try (will throw)
const r = await pTimeout(this.ds().get(keys), {
timeout: this.cfg.timeout,
name: `datastore.getByIds(${table}) second try`,
})
rows = r[0]
}
} else {
rows = (await this.ds().get(keys))[0]
}

return (
Expand Down Expand Up @@ -237,14 +243,14 @@ export class DatastoreDB extends BaseCommonDB implements CommonDB {
this.toDatastoreEntity(table, obj, opt.excludeFromIndexes as string[]),
)

const save = pRetry(
const save = pRetryFn(
async (batch: DatastorePayload<ROW>[]) => {
await (opt.tx || this.ds()).save(batch)
},
{
// Here we retry the GOAWAY errors that are somewhat common for Datastore
// Currently only retrying them here in .saveBatch(), cause probably they're only thrown when saving
predicate: err => RETRY_ON.some(s => (err as Error)?.message.includes(s)),
predicate: err => RETRY_ON.some(s => err?.message?.includes(s)),
name: `DatastoreLib.saveBatch(${table})`,
maxAttempts: 5,
delay: 5000,
Expand All @@ -257,15 +263,21 @@ export class DatastoreDB extends BaseCommonDB implements CommonDB {
)

try {
await pMap(_chunk(entities, MAX_ITEMS), async batch => await save(batch))
const chunks = _chunk(entities, MAX_ITEMS)
if (chunks.length === 1) {
// Not using pMap in hope to preserve stack trace
await save(chunks[0]!)
} else {
await pMap(chunks, async batch => await save(batch))
}
} catch (err) {
// console.log(`datastore.save ${kind}`, { obj, entity })
this.cfg.logger.error(
`error in DatastoreLib.saveBatch for ${table} (${rows.length} rows)`,
err,
)
// don't throw, because datastore SDK makes it in separate thread, so exception will be unhandled otherwise
return await Promise.reject(err)

throw err
}
}

Expand Down
Loading

0 comments on commit df9e60e

Please sign in to comment.