Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
8a85408
update to readme
nikiwycherley Apr 23, 2025
00abfb3
Merge pull request #100 from DEFRA/feature/FSR-1429
nikiwycherley Apr 24, 2025
6f86ed3
Merge branch 'release/2.3.0' into development
neilbmclaughlin Apr 28, 2025
09bff02
Merge pull request #102 from DEFRA/master
neilbmclaughlin May 29, 2025
ec46c32
feature/FSR-1136-update-to-aws-sdk-v3
pwadmore-ea Jul 9, 2025
cad3156
Migrate to AWS SDK v3
pwadmore-ea Jul 10, 2025
3602ba0
Update dependencies
pwadmore-ea Jul 11, 2025
2119091
Use transformToString calls
pwadmore-ea Jul 14, 2025
3c0b625
Use asynchronous Lambda invocation
pwadmore-ea Jul 21, 2025
296fc20
Remove async function declarations
pwadmore-ea Jul 21, 2025
f6405b1
Remove async function declarations
pwadmore-ea Jul 21, 2025
c07243f
Correct case
pwadmore-ea Jul 21, 2025
eeb681f
Support S3 configuration
pwadmore-ea Jul 21, 2025
2a37c4f
Consume fetched station data using transformToString
pwadmore-ea Jul 22, 2025
25dc328
Resolve SonarQube cloud issue
pwadmore-ea Jul 22, 2025
328beb9
Rename environment variables for consistency
pwadmore-ea Jul 22, 2025
4d60947
Correct type of response body
pwadmore-ea Jul 22, 2025
c47cd32
Refactor DEFAULT_MAX_SOCKETS
pwadmore-ea Jul 22, 2025
54e1156
Remove environment variable defaults
pwadmore-ea Jul 22, 2025
52b1848
feature/FSR-1136-update-to-aws-sdk-v3 (#104)
pwadmore-ea Jul 31, 2025
589c10c
Revert "feature/FSR-1136-update-to-aws-sdk-v3 (#104)" (#105)
ShaunRixDefra Aug 6, 2025
46e694d
adding batch sizing to station process saveToObjects function
nikiwycherley Aug 18, 2025
1390243
npm audit
nikiwycherley Aug 18, 2025
2f10790
Sequential S3 uploads to avoid EMFILE errors
nikiwycherley Aug 18, 2025
2d70f1f
fix to some wording
nikiwycherley Aug 22, 2025
f05fc77
file uploaded in error
nikiwycherley Aug 22, 2025
21edd54
Correct typo
pwadmore-ea Aug 29, 2025
f9d8cd9
Merge pull request #106 from DEFRA/feature/FSR-1136-update-to-aws-sdk-v3
LeeGordon83 Sep 1, 2025
78460ab
Add release notes for version 2.4.0 including aws-sdk update
Keyurx11 Sep 2, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 17 additions & 0 deletions .labrc.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
// Ignore items in global scope added by the AWS SDK for JavaScript v3.
const globalsAsArray = [
'__extends', '__assign', '__rest', '__decorate', '__param', '__esDecorate',
'__runInitializers', '__propKey', '__setFunctionName', '__metadata',
'__awaiter', '__generator', '__exportStar', '__createBinding', '__values',
'__read', '__spread', '__spreadArrays', '__spreadArray', '__await',
'__asyncGenerator', '__asyncDelegator', '__asyncValues', '__makeTemplateObject',
'__importStar', '__importDefault', '__classPrivateFieldGet', '__classPrivateFieldSet',
'__classPrivateFieldIn', '__addDisposableResource', '__disposeResources',
'__rewriteRelativeImportExtension'
]

const globals = globalsAsArray.toString()

module.exports = {
globals
}
4 changes: 2 additions & 2 deletions lib/functions/ffoi-process.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ module.exports.handler = async (event) => {
const bucket = event.Records[0].s3.bucket.name
const key = event.Records[0].s3.object.key
const data = await s3.getObject({ Bucket: bucket, Key: key })

const file = await util.parseXml(data.Body)
const bodyContents = await data.Body.transformToString()
const file = await util.parseXml(bodyContents)

// use pool and not client due to multiple database queries
const pool = new Pool({ connectionString: process.env.LFW_DATA_DB_CONNECTION })
Expand Down
4 changes: 2 additions & 2 deletions lib/functions/rloi-process.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ module.exports.handler = async (event) => {
const bucket = event.Records[0].s3.bucket.name
const key = event.Records[0].s3.object.key
const data = await s3.getObject({ Bucket: bucket, Key: key })

const file = await util.parseXml(data.Body)
const bodyContents = await data.Body.transformToString()
const file = await util.parseXml(bodyContents)

// use pool and not client due to multiple database queries
const pool = new Pool({ connectionString: process.env.LFW_DATA_DB_CONNECTION, max: 3 })
Expand Down
4 changes: 2 additions & 2 deletions lib/functions/station-process.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ module.exports.handler = async (event) => {
const key = event.Records[0].s3.object.key

const data = await s3.getObject({ Bucket: bucket, Key: key })

const stations = await util.parseCsv(data.Body.toString())
const bodyContents = await data.Body.transformToString()
const stations = await util.parseCsv(bodyContents)

const pool = new Pool({ connectionString: process.env.LFW_DATA_DB_CONNECTION })
await station.saveToDb(stations, pool)
Expand Down
13 changes: 8 additions & 5 deletions lib/helpers/invoke-lambda.js
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
const { Lambda } = require('aws-sdk')
const { LambdaClient, InvokeCommand } = require('@aws-sdk/client-lambda')

const lambda = new Lambda()
const lambda = new LambdaClient({})

module.exports = function invokeLambda (functionName, payload) {
return lambda.invokeAsync({
const command = new InvokeCommand({
FunctionName: functionName,
InvokeArgs: JSON.stringify(payload)
}).promise()
Payload: Buffer.from(JSON.stringify(payload)),
InvocationType: 'Event'
})

return lambda.send(command)
}
26 changes: 20 additions & 6 deletions lib/helpers/s3.js
Original file line number Diff line number Diff line change
@@ -1,19 +1,33 @@
const AWS = require('aws-sdk')
const s3 = new AWS.S3()
const { S3Client, GetObjectCommand, PutObjectCommand, DeleteObjectCommand, ListObjectsV2Command } = require('@aws-sdk/client-s3')
const { NodeHttpHandler } = require('@smithy/node-http-handler')

const s3 = new S3Client({
requestHandler: new NodeHttpHandler({
httpsAgent: { maxSockets: process.env.LFW_DATA_S3_MAX_SOCKETS }
}),
region: process.env.LFW_DATA_S3_REGION
})

module.exports = {
getObject (params) {
return s3.getObject(params).promise()
const command = new GetObjectCommand(params)
return s3.send(command)
},

putObject (params) {
// Put object then set ACL to allow bucket-owner-full-control
params.ACL = 'bucket-owner-full-control'
return s3.putObject(params).promise()
const command = new PutObjectCommand(params)
return s3.send(command)
},

deleteObject (params) {
return s3.deleteObject(params).promise()
const command = new DeleteObjectCommand(params)
return s3.send(command)
},

listObjects (params) {
return s3.listObjectsV2(params).promise()
const command = new ListObjectsV2Command(params)
return s3.send(command)
}
}
3 changes: 2 additions & 1 deletion lib/models/rloi.js
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,8 @@ module.exports = {

try {
const result = await fetchStation(s3, bucket, `rloi/${item.$.region}/${item.$.stationReference}/station.json`)
station = JSON.parse(result.Body)
const bodyContents = await result.Body.transformToString()
station = JSON.parse(bodyContents)
} catch (err) {
// the console log is commented out so as not to spam the cloudwatch lambda
// logging, as the s3.getObject throws an error when it can't find the object, and there
Expand Down
12 changes: 7 additions & 5 deletions lib/models/station.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,8 @@ module.exports = {

console.log(`${count} stations to load`)

await Promise.all(stations.map(async station => {
// Sequential S3 uploads to avoid EMFILE errors
for (const station of stations) {
uploaded++
try {
await s3.putObject({
Expand All @@ -23,10 +24,10 @@ module.exports = {
Key: `rloi/${station.Region}/${station.Telemetry_ID}/station.json`
})
} catch (err) {
console.log(`Failed to upload(${uploaded}/${count}): ${params.Key}`)
console.log(`Failed to upload(${uploaded}/${count}): rloi/${station.Region}/${station.Telemetry_ID}/station.json`)
console.error(err)
}
}))
}
console.log('Stations processed')
},

Expand Down Expand Up @@ -86,9 +87,10 @@ module.exports = {
await pool.query('deleteStations')

// batch up the database inserts as struggles with > 1500 records
const stationsFactor = Math.floor(dbStations.length / 500)
const batchSize = 500
const stationsFactor = Math.floor(dbStations.length / batchSize)
for (let i = 0; i <= stationsFactor; i++) {
const batch = dbStations.slice(i * 500, (i * 500) + 500)
const batch = dbStations.slice(i * batchSize, (i * batchSize) + batchSize)
await pool.query('insertStations', batch)
}

Expand Down
Loading