Untitled

 avatar
unknown
plain_text
a year ago
4.1 kB
14
Indexable
require("dotenv").config();
const fs = require("fs");
const readDir = require("recursive-readdir");
const path = require("path");
const AWS = require("aws-sdk");
const mime = require("mime-types");
// const version = require('./package.json').version;

// You will run this script from your CI/Pipeline after build has completed.
// It will read the content of the build directory and upload to S3 (live assets bucket)
// Every deployment is immutable. Cache will be invalidated every time you deploy.
AWS.config.update({
  region: process.env.AWS_S3_REGION,
  accessKeyId: process.env.AWS_ACCESS_KEY_ID,
  secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
  maxRetries: 3,
});

// Retrive al the files path in the build directory
const getDirectoryFilesRecursive = (dir, ignores = []) => {
  return new Promise((resolve, reject) => {
    readDir(dir, ignores, (err, files) => (err ? reject(err) : resolve(files)));
  });
};

// The Key will look like this: _next/public/<buildid>/pages/index.js
// the <buildid> is exposed by nextJS and it's unique per deployment.
// See: https://nextjs.org/blog/next-7/#static-cdn-support
const generateFileKey = (fileName, toReplace, replaced) => {
  // I'm interested in only the last part of the file: '/some/path/.next/build-manifest.json',
  const S3objectPath = fileName.split(toReplace)[1];
  console.log(replaced + S3objectPath, "replaced + S3objectPath");
  return replaced + S3objectPath;
  //   return version + replaced + S3objectPath;
};

const s3 = new AWS.S3();
const deleteS3Folder = async (folderPath) => {
  try {
    const listParams = {
      Bucket: process.env.AWS_S3_BUCKET_NAME,
      Prefix: folderPath,
    };

    const listedObjects = await s3.listObjectsV2(listParams).promise();

    if (listedObjects.Contents.length === 0) {
      return;
    }

    const deleteParams = {
      Bucket: process.env.AWS_S3_BUCKET_NAME,
      Delete: { Objects: [] },
    };

    listedObjects.Contents.forEach(({ Key }) => {
      deleteParams.Delete.Objects.push({ Key });
    });

    await s3.deleteObjects(deleteParams).promise();

    if (listedObjects.IsTruncated) {
      await deleteS3Folder(folderPath);
    }
  } catch (error) {
    console.error(error);
  }
};
const uploadToS3 = async (fileArray, toReplace, replaced) => {
  try {
    fileArray.map((file) => {
      console.log(file, "file");
      if (file.includes("/static/")) {
        // Configuring parameters for S3 Object
        const S3params = {
          Bucket: process.env.AWS_S3_BUCKET_NAME,
          Body: fs.createReadStream(file),
          Key: generateFileKey(file, toReplace, replaced),
          ACL: null,
          ContentType: String(mime.lookup(file)),
          ContentEncoding: "utf-8",
          CacheControl: "immutable,max-age=31536000,public",
        };

        s3.upload(S3params, function (err, data) {
          if (err) {
            // Set the exit code while letting
            // the process exit gracefully.
            console.error(err);
            process.exitCode = 1;
          } else {
            console.log(`Assets uploaded to S3:`, data.key);
          }
        });
      }
    });
  } catch (error) {
    console.error(error);
  }
};

// Start function
// getDirectoryFilesRecursive(path, ignore);
const start = async function (dict) {
  for (var i = 0; i < dict.length; i++) {
    const files = await getDirectoryFilesRecursive(
      path.resolve(__dirname, dict[i].filePath),
      [".DS_Store", "BUILD_ID"]
    );
    uploadToS3(files, dict[i].toReplace, dict[i].replaced);
  }
};

// Call start

deleteS3Folder("_next/").then((res) =>
  start([
    {
      filePath: ".next/",
      toReplace: ".next/",
      replaced: "_next/",
    },
    {
      filePath: "public/assets/",
      toReplace: "public/",
      replaced: "",
    },
  ])
);

// start([
//   {
//     filePath: '.next',
//     toReplace: '.next/',
//     replaced: '_next/'
//   }
// ]);
Editor is loading...