Skip to content

Commit 0b1ff73

Browse files
heiskrJamesMGreene
andauthored
Update some readFileSync to await readFile with top level await (github#20525)
* Update some readFileSync to await readFile with top level await * More updates * Update all-products.js * Use 'lib/readfile-async.js' in runtime files for better performance * Remove unnecessary use of 'for await...of' loops * Revert to importing 'fs/promises' Co-authored-by: James M. Greene <jamesmgreene@github.com>
1 parent 47f358b commit 0b1ff73

23 files changed

+111
-98
lines changed

.github/actions-scripts/create-enterprise-issue.js

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
#!/usr/bin/env node
22

3-
import fs from 'fs'
3+
import fs from 'fs/promises'
44
import path from 'path'
55
import { getOctokit } from '@actions/github'
66
import enterpriseDates from '../../lib/enterprise-dates.js'
@@ -74,7 +74,7 @@ async function run() {
7474
process.exit(0)
7575
}
7676

77-
const milestoneSteps = fs.readFileSync(
77+
const milestoneSteps = await fs.readFile(
7878
path.join(
7979
process.cwd(),
8080
`.github/actions-scripts/enterprise-server-issue-templates/${milestone}-issue.md`

.github/actions-scripts/enterprise-algolia-label.js

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
#!/usr/bin/env node
22

3-
import fs from 'fs'
3+
import fs from 'fs/promises'
44
import { setOutput } from '@actions/core'
55

6-
const eventPayload = JSON.parse(fs.readFileSync(process.env.GITHUB_EVENT_PATH, 'utf8'))
6+
const eventPayload = JSON.parse(await fs.readFile(process.env.GITHUB_EVENT_PATH, 'utf8'))
77

88
// This workflow-run script does the following:
99
// 1. Gets an array of labels on a PR.

lib/all-products.js

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,26 +1,31 @@
1-
import fs from 'fs'
1+
import fs from 'fs/promises'
22
import path from 'path'
3+
import readFileAsync from './readfile-async.js'
34
import frontmatter from './read-frontmatter.js'
45
import getApplicableVersions from './get-applicable-versions.js'
56
import removeFPTFromPath from './remove-fpt-from-path.js'
67

78
// Both internal and external products are specified in content/index.md
89
const homepage = path.posix.join(process.cwd(), 'content/index.md')
9-
const { data } = frontmatter(fs.readFileSync(homepage, 'utf8'))
10+
const { data } = frontmatter(await readFileAsync(homepage, 'utf8'))
1011
export const productIds = data.children
1112
const externalProducts = data.externalProducts
1213

1314
const internalProducts = {}
1415

15-
productIds.forEach((productId) => {
16+
for (const productId of productIds) {
1617
const relPath = productId
1718
const dir = path.posix.join('content', relPath)
1819

1920
// Early Access may not exist in the current checkout
20-
if (!fs.existsSync(dir)) return
21+
try {
22+
await fs.readdir(dir)
23+
} catch (e) {
24+
continue
25+
}
2126

2227
const toc = path.posix.join(dir, 'index.md')
23-
const { data } = frontmatter(fs.readFileSync(toc, 'utf8'))
28+
const { data } = frontmatter(await readFileAsync(toc, 'utf8'))
2429
const applicableVersions = getApplicableVersions(data.versions, toc)
2530
const href = removeFPTFromPath(path.posix.join('/', applicableVersions[0], productId))
2631

@@ -35,7 +40,7 @@ productIds.forEach((productId) => {
3540
}
3641

3742
internalProducts[productId].versions = applicableVersions
38-
})
43+
}
3944

4045
export const productMap = Object.assign({}, internalProducts, externalProducts)
4146

lib/check-node-version.js

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
import semver from 'semver'
2-
import fs from 'fs'
32
import path from 'path'
3+
import readFileAsync from './readfile-async.js'
44

5-
const packageFile = JSON.parse(fs.readFileSync(path.join(process.cwd(), './package.json')))
5+
const packageFile = JSON.parse(await readFileAsync(path.join(process.cwd(), './package.json')))
66
const { engines } = packageFile
77

88
/* istanbul ignore next */

lib/enterprise-server-releases.js

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
import versionSatisfiesRange from './version-satisfies-range.js'
2-
import fs from 'fs'
32
import path from 'path'
3+
import readFileAsync from './readfile-async.js'
44

55
export const dates = JSON.parse(
6-
fs.readFileSync(path.join(process.cwd(), './lib/enterprise-dates.json'))
6+
await readFileAsync(path.join(process.cwd(), './lib/enterprise-dates.json'))
77
)
88

99
// GHES Release Lifecycle Dates:

lib/process-learning-tracks.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ export default async function processLearningTracks(rawLearningTracks, context)
1111

1212
let featuredTrack
1313

14-
for await (const rawTrackName of rawLearningTracks) {
14+
for (const rawTrackName of rawLearningTracks) {
1515
let isFeaturedTrack = false
1616

1717
// Track names in frontmatter may include Liquid conditionals.

script/content-migrations/deduplicate-enterprise-assets.js

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
#!/usr/bin/env node
2-
import fs from 'fs'
2+
import fs from 'fs/promises'
33
import path from 'path'
44
import walk from 'walk-sync'
55
import xJimp from 'jimp'
@@ -38,7 +38,7 @@ async function main() {
3838
// the image in the local /assets/images directory, then we can
3939
// delete the enterprise image and the reference in the Markdown
4040
// will just work
41-
if (fs.existsSync(existingFileToCompare)) {
41+
if (await fs.readFile(existingFileToCompare)) {
4242
// Buffer.compare and Jimp both return 0 if files match
4343
let compareResult = 1
4444
try {
@@ -52,8 +52,8 @@ async function main() {
5252
const diff = await jimp.diff(existingImageToCompare, enterpriseImage)
5353
compareResult = diff.percent
5454
} else {
55-
const existingImageToCompare = await fs.readFileSync(existingFileToCompare)
56-
const enterpriseImage = await fs.readFileSync(file)
55+
const existingImageToCompare = await fs.readFile(existingFileToCompare)
56+
const enterpriseImage = await fs.readFile(file)
5757
compareResult = Buffer.compare(
5858
Buffer.from(existingImageToCompare),
5959
Buffer.from(enterpriseImage)
@@ -63,7 +63,7 @@ async function main() {
6363
console.log(file)
6464
console.log(err)
6565
}
66-
if (compareResult === 0) fs.unlinkSync(file)
66+
if (compareResult === 0) await fs.unlink(file)
6767
}
6868
}
6969
}

script/content-migrations/update-short-titles-from-csv.js

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
#!/usr/bin/env node
2-
import fs from 'fs'
2+
import fs from 'fs/promises'
33
import path from 'path'
44
import readFrontmatter from '../../lib/read-frontmatter.js'
55
import csv from 'csv-parse'
@@ -36,11 +36,11 @@ async function main() {
3636

3737
async function updateFrontmatter(csvData) {
3838
const filePath = path.join(process.cwd(), csvData[4])
39-
const fileContent = fs.readFileSync(filePath, 'utf8')
39+
const fileContent = await fs.readFile(filePath, 'utf8')
4040
const { content, data } = readFrontmatter(fileContent)
4141
data.shortTitle = csvData[3]
4242
const newContents = readFrontmatter.stringify(content, data, { lineWidth: 10000 })
43-
fs.writeFileSync(filePath, newContents)
43+
await fs.writeFile(filePath, newContents)
4444
}
4545

4646
// Ensure the columns being read out are in the location expected

script/create-glossary-from-spreadsheet.js

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
#!/usr/bin/env node
22
import { fileURLToPath } from 'url'
33
import path from 'path'
4-
import fs from 'fs'
4+
import fs from 'fs/promises'
55
import yaml from 'js-yaml'
66
const __dirname = path.dirname(fileURLToPath(import.meta.url))
77

@@ -13,7 +13,7 @@ const __dirname = path.dirname(fileURLToPath(import.meta.url))
1313

1414
const inputFile = path.join(__dirname, '../data/glossary.yml')
1515

16-
const glossary = yaml.load(fs.readFileSync(inputFile, 'utf8'))
16+
const glossary = yaml.load(await fs.readFile(inputFile, 'utf8'))
1717

1818
console.log(glossary)
1919
const external = []
@@ -27,6 +27,6 @@ glossary.forEach((term) => {
2727
}
2828
})
2929

30-
fs.writeFileSync(path.join(__dirname, '../data/glossaries/internal.yml'), yaml.dump(internal))
30+
await fs.writeFile(path.join(__dirname, '../data/glossaries/internal.yml'), yaml.dump(internal))
3131

32-
fs.writeFileSync(path.join(__dirname, '../data/glossaries/external.yml'), yaml.dump(external))
32+
await fs.writeFile(path.join(__dirname, '../data/glossaries/external.yml'), yaml.dump(external))

script/enterprise-server-releases/create-graphql-files.js

Lines changed: 24 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
#!/usr/bin/env node
2-
import fs from 'fs'
2+
import fs from 'fs/promises'
33
import path from 'path'
44
import program from 'commander'
55
import xMkdirp from 'mkdirp'
@@ -50,10 +50,12 @@ const oldVersionId = allVersions[oldVersion].miscVersionName
5050
// copy the schema file wholesale (there are separate schema files per version)
5151
const newSchemaFile = path.join(graphqlStaticDir, `schema-${newVersionId}.json`)
5252
const oldSchemaFile = path.join(graphqlStaticDir, `schema-${oldVersionId}.json`)
53-
fs.copyFileSync(oldSchemaFile, newSchemaFile)
53+
await fs.copyFile(oldSchemaFile, newSchemaFile)
5454

5555
// check that it worked
56-
if (!fs.existsSync(newSchemaFile)) {
56+
try {
57+
await fs.readFile(newSchemaFile)
58+
} catch (e) {
5759
console.log(`Error! Can't find ${newSchemaFile}.`)
5860
process.exit(1)
5961
}
@@ -64,10 +66,10 @@ const changesFile = path.join(graphqlStaticDir, 'upcoming-changes.json')
6466
const objectsFile = path.join(graphqlStaticDir, 'prerendered-objects.json')
6567
const inputObjectsFile = path.join(graphqlStaticDir, 'prerendered-input-objects.json')
6668

67-
const previews = JSON.parse(fs.readFileSync(previewsFile))
68-
const changes = JSON.parse(fs.readFileSync(changesFile))
69-
const objects = JSON.parse(fs.readFileSync(objectsFile))
70-
const inputObjects = JSON.parse(fs.readFileSync(inputObjectsFile))
69+
const previews = JSON.parse(await fs.readFile(previewsFile))
70+
const changes = JSON.parse(await fs.readFile(changesFile))
71+
const objects = JSON.parse(await fs.readFile(objectsFile))
72+
const inputObjects = JSON.parse(await fs.readFile(inputObjectsFile))
7173
// The prerendered objects file for the "old version" contains hardcoded links with the old version number.
7274
// We need to update those links to include the new version to prevent a test from failing.
7375
const regexOldVersion = new RegExp(oldVersion, 'gi')
@@ -104,33 +106,35 @@ if (!Object.keys(inputObjects).includes(newVersionId)) {
104106
}
105107

106108
// write the new files
107-
fs.writeFileSync(previewsFile, JSON.stringify(previews, null, 2))
108-
fs.writeFileSync(changesFile, JSON.stringify(changes, null, 2))
109-
fs.writeFileSync(objectsFile, JSON.stringify(objects, null, 2))
110-
fs.writeFileSync(inputObjectsFile, JSON.stringify(inputObjects, null, 2))
109+
await fs.writeFile(previewsFile, JSON.stringify(previews, null, 2))
110+
await fs.writeFile(changesFile, JSON.stringify(changes, null, 2))
111+
await fs.writeFile(objectsFile, JSON.stringify(objects, null, 2))
112+
await fs.writeFile(inputObjectsFile, JSON.stringify(inputObjects, null, 2))
111113

112114
// now create the new version directory in data/graphql
113115
const srcDir = path.join(graphqlDataDir, oldVersionId)
114116
const destDir = path.join(graphqlDataDir, newVersionId)
115117
mkdirp(destDir)
116118

117119
// copy the files
118-
fs.readdirSync(srcDir).forEach((file) => {
120+
const files = await fs.readdir(srcDir)
121+
for (const file of files) {
119122
const srcFile = path.join(srcDir, file)
120123
const destFile = path.join(destDir, file)
121-
fs.copyFileSync(srcFile, destFile)
122-
})
124+
await fs.copyFile(srcFile, destFile)
125+
}
123126

124127
// check that it worked
125-
if (!fs.existsSync(destDir)) {
128+
try {
129+
const destDirResult = await fs.readdir(destDir)
130+
if (!destDirResult.length) {
131+
console.log(`Error! The directory created at ${destDir} is empty.`)
132+
process.exit(1)
133+
}
134+
} catch (e) {
126135
console.log(`Error! A new directory was not successfully created at ${destDir}.`)
127136
process.exit(1)
128137
}
129138

130-
if (!fs.readdirSync(destDir).length) {
131-
console.log(`Error! The directory created at ${destDir} is empty.`)
132-
process.exit(1)
133-
}
134-
135139
// print success message
136140
console.log(`Done! Copied ${oldVersion} GraphQL files to ${newVersion} files.`)

0 commit comments

Comments
 (0)